diff --git a/.eslintrc.js b/.eslintrc.js
new file mode 100644
--- /dev/null
+++ b/.eslintrc.js
@@ -0,0 +1,21 @@
+module.exports = {
+ "env": {
+ "browser": true,
+ "es6": true,
+ "jquery": true
+ },
+ "extends": "eslint:recommended",
+ "globals": {
+ "Atomics": "readonly",
+ "SharedArrayBuffer": "readonly"
+ },
+ "parserOptions": {
+ "ecmaVersion": 2018,
+ "sourceType": "module"
+ },
+ "plugins": [
+ "html"
+ ],
+ "rules": {
+ }
+};
diff --git a/Jenkinsfile b/Jenkinsfile
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -9,10 +9,10 @@ node {
daysToKeepStr: '',
numToKeepStr: '']]]);
if (isUnix()) {
- createvirtualenv = 'rm -r $JENKINS_HOME/venv/$JOB_NAME || true && virtualenv $JENKINS_HOME/venv/$JOB_NAME'
+ createvirtualenv = 'rm -r $JENKINS_HOME/venv/$JOB_NAME || true && python3 -m venv $JENKINS_HOME/venv/$JOB_NAME'
activatevirtualenv = '. $JENKINS_HOME/venv/$JOB_NAME/bin/activate'
} else {
- createvirtualenv = 'rmdir /s /q %JENKINS_HOME%\\venv\\%JOB_NAME% || true && virtualenv %JENKINS_HOME%\\venv\\%JOB_NAME%'
+ createvirtualenv = 'rmdir /s /q %JENKINS_HOME%\\venv\\%JOB_NAME% || true && python3 -m venv %JENKINS_HOME%\\venv\\%JOB_NAME%'
activatevirtualenv = 'call %JENKINS_HOME%\\venv\\%JOB_NAME%\\Scripts\\activate.bat'
}
diff --git a/README.rst b/README.rst
--- a/README.rst
+++ b/README.rst
@@ -24,8 +24,8 @@ Kallithea was forked from RhodeCode in J
Installation
------------
-Kallithea requires Python_ 2.7 and it is recommended to install it in a
-virtualenv_. Official releases of Kallithea can be installed with::
+Kallithea requires Python_ 3 and it is recommended to install it in a
+virtualenv. Official releases of Kallithea can be installed with::
pip install kallithea
@@ -173,7 +173,6 @@ database from RhodeCode to Kallithea, be
of Kallithea.
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
.. _Python: http://www.python.org/
.. _Sphinx: http://sphinx.pocoo.org/
.. _Mercurial: http://mercurial.selenic.com/
diff --git a/conftest.py b/conftest.py
--- a/conftest.py
+++ b/conftest.py
@@ -2,10 +2,19 @@ import os
import mock
import pytest
+import tg
here = os.path.dirname(__file__)
+# HACK:
+def pytest_configure():
+ # Register global dummy tg.context to avoid "TypeError: No object (name: context) has been registered for this thread"
+ tg.request_local.context._push_object(tg.util.bunch.Bunch())
+ # could be removed again after use with
+ # tg.request_local.context._pop_object ... but we keep it around forever as
+ # a reasonable sentinel
+
def pytest_ignore_collect(path):
# ignore all files outside the 'kallithea' directory
if not str(path).startswith(os.path.join(here, 'kallithea')):
@@ -36,3 +45,10 @@ def doctest_mock_ugettext(request):
m = __import__(request.module.__name__, globals(), locals(), [None], 0)
with mock.patch.object(m, '_', lambda s: s):
yield
+
+if getattr(pytest, 'register_assert_rewrite', None):
+ # make sure that all asserts under kallithea/tests benefit from advanced
+ # assert reporting with pytest-3.0.0+, including api/api_base.py,
+ # models/common.py etc.
+ # See also: https://docs.pytest.org/en/latest/assert.html#advanced-assertion-introspection
+ pytest.register_assert_rewrite('kallithea.tests')
diff --git a/dev_requirements.txt b/dev_requirements.txt
--- a/dev_requirements.txt
+++ b/dev_requirements.txt
@@ -1,8 +1,9 @@
-pytest >= 4.6.6, < 4.7
+pytest >= 4.6.6, < 5.4
pytest-sugar >= 0.9.2, < 0.10
pytest-benchmark >= 3.2.2, < 3.3
pytest-localserver >= 0.5.0, < 0.6
-mock >= 3.0.0, < 3.1
-Sphinx >= 1.8.0, < 1.9
-WebTest >= 2.0.3, < 2.1
+mock >= 3.0.0, < 4.1
+Sphinx >= 1.8.0, < 2.4
+WebTest >= 2.0.6, < 2.1
isort == 4.3.21
+pyflakes == 2.1.1
diff --git a/development.ini b/development.ini
--- a/development.ini
+++ b/development.ini
@@ -1,10 +1,10 @@
-################################################################################
-################################################################################
-# Kallithea - config file generated with kallithea-config #
-# #
-# The %(here)s variable will be replaced with the parent directory of this file#
-################################################################################
-################################################################################
+###################################################################################
+###################################################################################
+## Kallithea config file generated with kallithea-config ##
+## ##
+## The %(here)s variable will be replaced with the parent directory of this file ##
+###################################################################################
+###################################################################################
[DEFAULT]
@@ -126,7 +126,7 @@ commit_parse_limit = 25
## used, which is correct in many cases but for example not when using uwsgi.
## If you change this setting, you should reinstall the Git hooks via
## Admin > Settings > Remap and Rescan.
-# git_hook_interpreter = /srv/kallithea/venv/bin/python2
+#git_hook_interpreter = /srv/kallithea/venv/bin/python3
## path to git executable
git_path = git
@@ -198,7 +198,7 @@ issue_sub =
## issue_pat, issue_server_link and issue_sub can have suffixes to specify
## multiple patterns, to other issues server, wiki or others
## below an example how to create a wiki pattern
-# wiki-some-id -> https://wiki.example.com/some-id
+## wiki-some-id -> https://wiki.example.com/some-id
#issue_pat_wiki = wiki-(\S+)
#issue_server_link_wiki = https://wiki.example.com/\1
@@ -216,12 +216,12 @@ allow_repo_location_change = True
allow_custom_hooks_settings = True
## extra extensions for indexing, space separated and without the leading '.'.
-# index.extensions =
+#index.extensions =
# gemfile
# lock
## extra filenames for indexing, space separated
-# index.filenames =
+#index.filenames =
# .dockerignore
# .editorconfig
# INSTALL
@@ -250,25 +250,23 @@ ssh_enabled = false
### CELERY CONFIG ####
####################################
+## Note: Celery doesn't support Windows.
use_celery = false
-## Example: connect to the virtual host 'rabbitmqhost' on localhost as rabbitmq:
-broker.url = amqp://rabbitmq:qewqew@localhost:5672/rabbitmqhost
+## Celery config settings from https://docs.celeryproject.org/en/4.4.0/userguide/configuration.html prefixed with 'celery.'.
-celery.imports = kallithea.lib.celerylib.tasks
-celery.accept.content = pickle
-celery.result.backend = amqp
-celery.result.dburi = amqp://
-celery.result.serializer = json
+## Example: use the message queue on the local virtual host 'kallitheavhost' as the RabbitMQ user 'kallithea':
+celery.broker_url = amqp://kallithea:thepassword@localhost:5672/kallitheavhost
-#celery.send.task.error.emails = true
+celery.result.backend = db+sqlite:///celery-results.db
+
#celery.amqp.task.result.expires = 18000
-celeryd.concurrency = 2
-celeryd.max.tasks.per.child = 1
+celery.worker_concurrency = 2
+celery.worker_max_tasks_per_child = 1
## If true, tasks will never be sent to the queue, but executed locally instead.
-celery.always.eager = false
+celery.task_always_eager = false
####################################
### BEAKER CACHE ####
@@ -277,19 +275,15 @@ celery.always.eager = false
beaker.cache.data_dir = %(here)s/data/cache/data
beaker.cache.lock_dir = %(here)s/data/cache/lock
-beaker.cache.regions = short_term,long_term,sql_cache_short
-
-beaker.cache.short_term.type = memory
-beaker.cache.short_term.expire = 60
-beaker.cache.short_term.key_length = 256
+beaker.cache.regions = long_term,long_term_file
beaker.cache.long_term.type = memory
beaker.cache.long_term.expire = 36000
beaker.cache.long_term.key_length = 256
-beaker.cache.sql_cache_short.type = memory
-beaker.cache.sql_cache_short.expire = 10
-beaker.cache.sql_cache_short.key_length = 256
+beaker.cache.long_term_file.type = file
+beaker.cache.long_term_file.expire = 604800
+beaker.cache.long_term_file.key_length = 256
####################################
### BEAKER SESSION ####
@@ -324,12 +318,25 @@ session.secret = development-not-secret
#session.sa.url = postgresql://postgres:qwe@localhost/kallithea
#session.table_name = db_session
-############################
-## ERROR HANDLING SYSTEMS ##
-############################
+####################################
+### ERROR HANDLING ####
+####################################
+
+## Show a nice error page for application HTTP errors and exceptions (default true)
+#errorpage.enabled = true
-# Propagate email settings to ErrorReporter of TurboGears2
-# You do not normally need to change these lines
+## Enable Backlash client-side interactive debugger (default false)
+## WARNING: *THIS MUST BE false IN PRODUCTION ENVIRONMENTS!!!*
+## This debug mode will allow all visitors to execute malicious code.
+#debug = false
+debug = true
+
+## Enable Backlash server-side error reporting (unless debug mode handles it client-side) (default true)
+#trace_errors.enable = true
+## Errors will be reported by mail if trace_errors.error_email is set.
+
+## Propagate email settings to ErrorReporter of TurboGears2
+## You do not normally need to change these lines
get trace_errors.smtp_server = smtp_server
get trace_errors.smtp_port = smtp_port
get trace_errors.from_address = error_email_from
@@ -338,13 +345,6 @@ get trace_errors.smtp_username = smtp_us
get trace_errors.smtp_password = smtp_password
get trace_errors.smtp_use_tls = smtp_use_tls
-################################################################################
-## WARNING: *DEBUG MODE MUST BE OFF IN A PRODUCTION ENVIRONMENT* ##
-## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
-## execute malicious code after an exception is raised. ##
-################################################################################
-#debug = false
-debug = true
##################################
### LOGVIEW CONFIG ###
@@ -358,10 +358,10 @@ logview.pylons.util = #eee
### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG ###
#########################################################
-# SQLITE [default]
+## SQLITE [default]
sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60
-# see sqlalchemy docs for others
+## see sqlalchemy docs for other backends
sqlalchemy.pool_recycle = 3600
@@ -392,9 +392,8 @@ keys = generic, color_formatter, color_f
[logger_root]
level = NOTSET
#handlers = console
+## For coloring based on log level:
handlers = console_color
-# For coloring based on log level:
-# handlers = console_color
[logger_routes]
#level = WARN
@@ -437,10 +436,10 @@ qualname = gearbox
level = WARN
handlers =
qualname = sqlalchemy.engine
-# For coloring based on log level and pretty printing of SQL:
-# level = INFO
-# handlers = console_color_sql
-# propagate = 0
+## For coloring based on log level and pretty printing of SQL:
+#level = INFO
+#handlers = console_color_sql
+#propagate = 0
[logger_whoosh_indexer]
#level = WARN
@@ -468,13 +467,13 @@ args = (sys.stderr,)
formatter = generic
[handler_console_color]
-# ANSI color coding based on log level
+## ANSI color coding based on log level
class = StreamHandler
args = (sys.stderr,)
formatter = color_formatter
[handler_console_color_sql]
-# ANSI color coding and pretty printing of SQL statements
+## ANSI color coding and pretty printing of SQL statements
class = StreamHandler
args = (sys.stderr,)
formatter = color_formatter_sql
@@ -505,16 +504,16 @@ datefmt = %Y-%m-%d %H:%M:%S
## SSH LOGGING ##
#################
-# The default loggers use 'handler_console' that uses StreamHandler with
-# destination 'sys.stderr'. In the context of the SSH server process, these log
-# messages would be sent to the client, which is normally not what you want.
-# By default, when running ssh-serve, just use NullHandler and disable logging
-# completely. For other logging options, see:
-# https://docs.python.org/2/library/logging.handlers.html
+## The default loggers use 'handler_console' that uses StreamHandler with
+## destination 'sys.stderr'. In the context of the SSH server process, these log
+## messages would be sent to the client, which is normally not what you want.
+## By default, when running ssh-serve, just use NullHandler and disable logging
+## completely. For other logging options, see:
+## https://docs.python.org/2/library/logging.handlers.html
[ssh_serve:logger_root]
level = CRITICAL
handlers = null
-# Note: If logging is configured with other handlers, they might need similar
-# muting for ssh-serve too.
+## Note: If logging is configured with other handlers, they might need similar
+## muting for ssh-serve too.
diff --git a/docs/administrator_guide/auth.rst b/docs/administrator_guide/auth.rst
--- a/docs/administrator_guide/auth.rst
+++ b/docs/administrator_guide/auth.rst
@@ -135,10 +135,10 @@ Certificate Checks : optional
.. _Custom CA Certificates:
Custom CA Certificates : optional
- Directory used by OpenSSL to find CAs for validating the LDAP server certificate.
- Python 2.7.10 and later default to using the system certificate store, and
- this should thus not be necessary when using certificates signed by a CA
- trusted by the system.
+ Directory used by OpenSSL to find CAs for validating the LDAP server
+ certificate. It defaults to using the system certificate store, and it
+ should thus not be necessary to specify *Custom CA Certificates* when using
+ certificates signed by a CA trusted by the system.
It can be set to something like `/etc/openldap/cacerts` on older systems or
if using self-signed certificates.
diff --git a/docs/api/models.rst b/docs/api/models.rst
--- a/docs/api/models.rst
+++ b/docs/api/models.rst
@@ -13,9 +13,6 @@ The :mod:`models` module
.. automodule:: kallithea.model.permission
:members:
-.. automodule:: kallithea.model.repo_permission
- :members:
-
.. automodule:: kallithea.model.repo
:members:
diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -46,8 +46,8 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
-project = u'Kallithea'
-copyright = u'2010-2020 by various authors, licensed as GPLv3.'
+project = 'Kallithea'
+copyright = '2010-2020 by various authors, licensed as GPLv3.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -187,8 +187,8 @@ htmlhelp_basename = 'Kallithea-docs'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ('index', 'Kallithea.tex', u'Kallithea Documentation',
- u'Kallithea Developers', 'manual'),
+ ('index', 'Kallithea.tex', 'Kallithea Documentation',
+ 'Kallithea Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -220,8 +220,8 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- ('index', 'kallithea', u'Kallithea Documentation',
- [u'Kallithea Developers'], 1)
+ ('index', 'kallithea', 'Kallithea Documentation',
+ ['Kallithea Developers'], 1)
]
diff --git a/docs/contributing.rst b/docs/contributing.rst
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -32,7 +32,7 @@ To get started with Kallithea developmen
hg clone https://kallithea-scm.org/repos/kallithea
cd kallithea
- virtualenv ../kallithea-venv
+ python3 -m venv ../kallithea-venv
source ../kallithea-venv/bin/activate
pip install --upgrade pip setuptools
pip install --upgrade -e . -r dev_requirements.txt python-ldap python-pam
@@ -92,8 +92,7 @@ Note that on unix systems, the temporary
and the test suite creates repositories in the temporary directory. Linux
systems with /tmp mounted noexec will thus fail.
-You can also use ``tox`` to run the tests with all supported Python versions
-(currently only Python 2.7).
+You can also use ``tox`` to run the tests with all supported Python versions.
When running tests, Kallithea generates a `test.ini` based on template values
in `kallithea/tests/conftest.py` and populates the SQLite database specified
@@ -199,8 +198,7 @@ of Mercurial's (https://www.mercurial-sc
consistency with existing code. Run ``scripts/run-all-cleanup`` before
committing to ensure some basic code formatting consistency.
-We currently only support Python 2.7.x and nothing else. For now we don't care
-about Python 3 compatibility.
+We support Python 3.6 and later.
We try to support the most common modern web browsers. IE9 is still supported
to the extent it is feasible, IE8 is not.
@@ -238,8 +236,8 @@ Each HTTP request runs inside an indepen
as in an independent database transaction). ``Session`` is the session manager
and factory. ``Session()`` will create a new session on-demand or return the
current session for the active thread. Many database operations are methods on
-such session instances - only ``Session.remove()`` should be called directly on
-the manager.
+such session instances. The session will generally be removed by
+TurboGears automatically.
Database model objects
(almost) always belong to a particular SQLAlchemy session, which means
@@ -268,6 +266,20 @@ code needs the database to assign an "au
a freshly created model object (before flushing, the ID attribute will
be ``None``).
+Debugging
+^^^^^^^^^
+
+A good way to trace what Kallithea is doing is to keep an eye on the output of
+stdout/stderr from the server process. Perhaps change ``my.ini`` to log at
+``DEBUG`` or ``INFO`` level, especially ``[logger_kallithea]``, but perhaps
+also other loggers. It is often easier to add additional ``log`` or ``print``
+statements than to use a Python debugger.
+
+Sometimes it is simpler to disable ``errorpage.enabled`` and perhaps also
+``trace_errors.enable`` to expose raw errors instead of adding extra
+processing. Enabling ``debug`` can be helpful for showing and exploring
+tracebacks in the browser, but is also insecure and will add extra processing.
+
TurboGears2 DebugBar
^^^^^^^^^^^^^^^^^^^^
diff --git a/docs/index.rst b/docs/index.rst
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -78,7 +78,6 @@ Developer guide
dev/dbmigrations
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
.. _python: http://www.python.org/
.. _django: http://www.djangoproject.com/
.. _mercurial: https://www.mercurial-scm.org/
diff --git a/docs/installation.rst b/docs/installation.rst
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -35,12 +35,12 @@ Git and development packages for the dat
For Debian and Ubuntu, the following command will ensure that a reasonable
set of dependencies is installed::
- sudo apt-get install build-essential git python-pip python-virtualenv libffi-dev python-dev
+ sudo apt-get install build-essential git libffi-dev python3-dev
For Fedora and RHEL-derivatives, the following command will ensure that a
reasonable set of dependencies is installed::
- sudo yum install gcc git python-pip python-virtualenv libffi-devel python-devel
+ sudo yum install gcc git libffi-devel python3-devel
.. _installation-source:
@@ -48,16 +48,16 @@ reasonable set of dependencies is instal
Installation from repository source
-----------------------------------
-To install Kallithea in a virtualenv_ using the stable branch of the development
+To install Kallithea in a virtualenv using the stable branch of the development
repository, follow the instructions below::
hg clone https://kallithea-scm.org/repos/kallithea -u stable
cd kallithea
- virtualenv ../kallithea-venv
+ python3 -m venv ../kallithea-venv
. ../kallithea-venv/bin/activate
pip install --upgrade pip setuptools
pip install --upgrade -e .
- python2 setup.py compile_catalog # for translation of the UI
+ python3 setup.py compile_catalog # for translation of the UI
You can now proceed to :ref:`setup`.
@@ -67,18 +67,18 @@ You can now proceed to :ref:`setup`.
Installing a released version in a virtualenv
---------------------------------------------
-It is highly recommended to use a separate virtualenv_ for installing Kallithea.
+It is highly recommended to use a separate virtualenv for installing Kallithea.
This way, all libraries required by Kallithea will be installed separately from your
main Python installation and other applications and things will be less
problematic when upgrading the system or Kallithea.
-An additional benefit of virtualenv_ is that it doesn't require root privileges.
+An additional benefit of virtualenv is that it doesn't require root privileges.
-- Assuming you have installed virtualenv_, create a new virtual environment
- for example, in `/srv/kallithea/venv`, using the virtualenv command::
+- Assuming you have installed virtualenv, create a new virtual environment
+ for example, in `/srv/kallithea/venv`, using the venv command::
- virtualenv /srv/kallithea/venv
+ python3 -m venv /srv/kallithea/venv
-- Activate the virtualenv_ in your current shell session and make sure the
+- Activate the virtualenv in your current shell session and make sure the
basic requirements are up-to-date by running::
. /srv/kallithea/venv/bin/activate
@@ -133,6 +133,3 @@ To install as a regular user in ``~/.loc
pip install --user kallithea
You can now proceed to :ref:`setup`.
-
-
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
diff --git a/docs/installation_iis.rst b/docs/installation_iis.rst
--- a/docs/installation_iis.rst
+++ b/docs/installation_iis.rst
@@ -1,5 +1,7 @@
.. _installation_iis:
+.. warning:: This section is outdated and needs updating for Python 3.
+
=====================================================================
Installing Kallithea on Microsoft Internet Information Services (IIS)
=====================================================================
@@ -66,7 +68,7 @@ the necessary components to finalize an
has been generated, it is necessary to run the following command due to the way
that ISAPI-WSGI is made::
- python2 dispatch.py install
+ python3 dispatch.py install
This accomplishes two things: generating an ISAPI compliant DLL file,
``_dispatch.dll``, and installing a script map handler into IIS for the
@@ -119,7 +121,7 @@ ISAPI-WSGI wrapper above uses ``win32tra
In order to dump output from WSGI using ``win32traceutil`` it is sufficient to
type the following in a console window::
- python2 -m win32traceutil
+ python3 -m win32traceutil
and any exceptions occurring in the WSGI layer and below (i.e. in the Kallithea
application itself) that are uncaught, will be printed here complete with stack
diff --git a/docs/installation_win.rst b/docs/installation_win.rst
--- a/docs/installation_win.rst
+++ b/docs/installation_win.rst
@@ -1,5 +1,7 @@
.. _installation_win:
+.. warning:: This section is outdated and needs updating for Python 3.
+
====================================================
Installation on Windows (7/Server 2008 R2 and newer)
====================================================
@@ -17,18 +19,16 @@ To install on an older version of Window
Step 1 -- Install Python
^^^^^^^^^^^^^^^^^^^^^^^^
-Install Python 2.7.x. Latest version is recommended. If you need another version, they can run side by side.
+Install Python 3. Latest version is recommended. If you need another version, they can run side by side.
-.. warning:: Python 3.x is not supported.
-
-- Download Python 2.7.x from http://www.python.org/download/
+- Download Python 3 from http://www.python.org/download/
- Choose and click on the version
- Click on "Windows X86-64 Installer" for x64 or "Windows x86 MSI installer" for Win32.
- Disable UAC or run the installer with admin privileges. If you chose to disable UAC, do not forget to reboot afterwards.
-While writing this guide, the latest version was v2.7.9.
+While writing this guide, the latest version was v3.8.1.
Remember the specific major and minor versions installed, because they will
-be needed in the next step. In this case, it is "2.7".
+be needed in the next step. In this case, it is "3.8".
Step 2 -- Python BIN
^^^^^^^^^^^^^^^^^^^^
@@ -42,7 +42,7 @@ Open a CMD and type::
SETX PATH "%PATH%;[your-python-path]" /M
Please substitute [your-python-path] with your Python installation
-path. Typically this is ``C:\\Python27``.
+path. Typically this is ``C:\\Python38``.
Step 3 -- Install pywin32 extensions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -52,38 +52,14 @@ http://sourceforge.net/projects/pywin32/
- Click on "pywin32" folder
- Click on the first folder (in this case, Build 219, maybe newer when you try)
-- Choose the file ending with ".amd64-py2.x.exe" (".win32-py2.x.exe"
+- Choose the file ending with ".amd64-py3.x.exe" (".win32-py3.x.exe"
for Win32) where x is the minor version of Python you installed.
When writing this guide, the file was:
- http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/pywin32-219.win-amd64-py2.7.exe/download
+ http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/pywin32-219.win-amd64-py3.8.exe/download
(x64)
- http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/pywin32-219.win32-py2.7.exe/download
+ http://sourceforge.net/projects/pywin32/files/pywin32/Build%20219/pywin32-219.win32-py3.8.exe/download
(Win32)
-Step 4 -- Install pip
-^^^^^^^^^^^^^^^^^^^^^
-
-pip is a package management system for Python. You will need it to install Kallithea and its dependencies.
-
-If you installed Python 2.7.9+, you already have it (as long as you ran the installer with admin privileges or disabled UAC).
-
-If it was not installed or if you are using Python < 2.7.9:
-
-- Go to https://bootstrap.pypa.io
-- Right-click on get-pip.py and choose Saves as...
-- Run "python2 get-pip.py" in the folder where you downloaded get-pip.py (may require admin access).
-
-.. note::
-
- See http://stackoverflow.com/questions/4750806/how-to-install-pip-on-windows
- for details and alternative methods.
-
-Note that pip.exe will be placed inside your Python installation's
-Scripts folder, which is likely not on your path. To correct this,
-open a CMD and type::
-
- SETX PATH "%PATH%;[your-python-path]\Scripts" /M
-
Step 5 -- Kallithea folder structure
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -108,24 +84,18 @@ Step 6 -- Install virtualenv
A python virtual environment will allow for isolation between the Python packages of your system and those used for Kallithea.
It is strongly recommended to use it to ensure that Kallithea does not change a dependency that other software uses or vice versa.
-In a command prompt type::
-
- pip install virtualenv
-
-Virtualenv will now be inside your Python Scripts path (C:\\Python27\\Scripts or similar).
-
To create a virtual environment, run::
- virtualenv C:\Kallithea\Env
+ python3 -m venv C:\Kallithea\Env
Step 7 -- Install Kallithea
^^^^^^^^^^^^^^^^^^^^^^^^^^^
In order to install Kallithea, you need to be able to run "pip install kallithea". It will use pip to install the Kallithea Python package and its dependencies.
Some Python packages use managed code and need to be compiled.
-This can be done on Linux without any special steps. On Windows, you will need to install Microsoft Visual C++ compiler for Python 2.7.
+This can be done on Linux without any special steps. On Windows, you will need to install Microsoft Visual C++ compiler for Python 3.8.
-Download and install "Microsoft Visual C++ Compiler for Python 2.7" from http://aka.ms/vcpython27
+Download and install "Microsoft Visual C++ Compiler for Python 3.8" from http://aka.ms/vcpython27
.. note::
You can also install the dependencies using already compiled Windows binaries packages. A good source of compiled Python packages is http://www.lfd.uci.edu/~gohlke/pythonlibs/. However, not all of the necessary packages for Kallithea are on this site and some are hard to find, so we will stick with using the compiler.
diff --git a/docs/installation_win_old.rst b/docs/installation_win_old.rst
--- a/docs/installation_win_old.rst
+++ b/docs/installation_win_old.rst
@@ -1,5 +1,7 @@
.. _installation_win_old:
+.. warning:: This section is outdated and needs updating for Python 3.
+
==========================================================
Installation on Windows (XP/Vista/Server 2003/Server 2008)
==========================================================
@@ -60,14 +62,11 @@ choose "Visual C++ 2008 Express" when in
Step 2 -- Install Python
^^^^^^^^^^^^^^^^^^^^^^^^
-Install Python 2.7.x x86 version (32-bit). DO NOT USE A 3.x version.
-Download Python 2.7.x from:
+Install Python 3.8.x from:
http://www.python.org/download/
-Choose "Windows Installer" (32-bit version) not "Windows X86-64
-Installer". While writing this guide, the latest version was v2.7.3.
Remember the specific major and minor version installed, because it will
-be needed in the next step. In this case, it is "2.7".
+be needed in the next step. In this case, it is "3.8".
.. note::
@@ -80,17 +79,17 @@ Download pywin32 from:
http://sourceforge.net/projects/pywin32/files/
- Click on "pywin32" folder
-- Click on the first folder (in this case, Build 217, maybe newer when you try)
-- Choose the file ending with ".win32-py2.x.exe" -> x being the minor
+- Click on the first folder (in this case, Build 218, maybe newer when you try)
+- Choose the file ending with ".win32-py3.x.exe" -> x being the minor
version of Python you installed (in this case, 7)
When writing this guide, the file was:
- http://sourceforge.net/projects/pywin32/files/pywin32/Build%20217/pywin32-217.win32-py2.7.exe/download
+ http://sourceforge.net/projects/pywin32/files/pywin32/Build%20218/pywin32-218.win-amd64-py3.8.exe/download
.. note::
64-bit: Download and install the 64-bit version.
At the time of writing you can find this at:
- http://sourceforge.net/projects/pywin32/files/pywin32/Build%20218/pywin32-218.win-amd64-py2.7.exe/download
+ http://sourceforge.net/projects/pywin32/files/pywin32/Build%20218/pywin32-218.win-amd64-py3.8.exe/download
Step 4 -- Python BIN
^^^^^^^^^^^^^^^^^^^^
@@ -117,7 +116,7 @@ that came preinstalled in Vista/7 and ca
SETX PATH "%PATH%;[your-python-path]" /M
Please substitute [your-python-path] with your Python installation path.
- Typically: C:\\Python27
+ Typically: C:\\Python38
Step 5 -- Kallithea folder structure
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -139,22 +138,10 @@ Create the following folder structure::
Step 6 -- Install virtualenv
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Install Virtual Env for Python
-
-Navigate to: http://www.virtualenv.org/en/latest/index.html#installation
-Right click on "virtualenv.py" file and choose "Save link as...".
-Download to C:\\Kallithea (or whatever you want)
-(the file is located at
-https://raw.github.com/pypa/virtualenv/master/virtualenv.py)
+Create a virtual Python environment in C:\\Kallithea\\Env (or similar). To
+do so, open a CMD (Python Path should be included in Step3), and write::
-Create a virtual Python environment in C:\\Kallithea\\Env (or similar). To
-do so, open a CMD (Python Path should be included in Step3), navigate
-where you downloaded "virtualenv.py", and write::
-
- python2 virtualenv.py C:\Kallithea\Env
-
-(--no-site-packages is now the default behaviour of virtualenv, no need
-to include it)
+ python3 -m venv C:\Kallithea\Env
Step 7 -- Install Kallithea
^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/docs/overview.rst b/docs/overview.rst
--- a/docs/overview.rst
+++ b/docs/overview.rst
@@ -12,7 +12,7 @@ Python environment
------------------
**Kallithea** is written entirely in Python_ and requires Python version
-2.7 or higher. Python 3.x is currently not supported.
+3.6 or higher.
Given a Python installation, there are different ways of providing the
environment for running Python applications. Each of them pretty much
@@ -30,7 +30,7 @@ environment used for running Kallithea.
- Packages could also be installed in ``~/.local`` ... but that is probably
only a good idea if using a dedicated user per application or instance.
-- Finally, it can be installed in a virtualenv_. That is a very lightweight
+- Finally, it can be installed in a virtualenv. That is a very lightweight
"container" where each Kallithea instance can get its own dedicated and
self-contained virtual environment.
@@ -98,7 +98,7 @@ installed.
installed with all dependencies using ``pip install kallithea``.
With this method, Kallithea is installed in the Python environment as any
- other package, usually as a ``.../site-packages/Kallithea-X-py2.7.egg/``
+ other package, usually as a ``.../site-packages/Kallithea-X-py3.8.egg/``
directory with Python files and everything else that is needed.
(``pip install kallithea`` from a source tree will do pretty much the same
@@ -165,7 +165,6 @@ continuous hammering from the internet.
.. _Python: http://www.python.org/
.. _Gunicorn: http://gunicorn.org/
.. _Waitress: http://waitress.readthedocs.org/en/latest/
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
.. _Gearbox: http://turbogears.readthedocs.io/en/latest/turbogears/gearbox.html
.. _PyPI: https://pypi.python.org/pypi
.. _Apache httpd: http://httpd.apache.org/
diff --git a/docs/setup.rst b/docs/setup.rst
--- a/docs/setup.rst
+++ b/docs/setup.rst
@@ -332,11 +332,11 @@ To enable it, simply set::
use_celery = true
-and add or change the ``celery.*`` and ``broker.*`` configuration variables.
+and add or change the ``celery.*`` configuration variables.
-Remember that the ini files use the format with '.' and not with '_' like
-Celery. So for example setting `BROKER_HOST` in Celery means setting
-`broker.host` in the configuration file.
+Configuration settings are prefixed with 'celery.', so for example setting
+`broker_url` in Celery means setting `celery.broker_url` in the configuration
+file.
To start the Celery process, run::
@@ -557,7 +557,7 @@ that, you'll need to:
os.chdir('/srv/kallithea/')
import site
- site.addsitedir("/srv/kallithea/venv/lib/python2.7/site-packages")
+ site.addsitedir("/srv/kallithea/venv/lib/python3.7/site-packages")
ini = '/srv/kallithea/my.ini'
from logging.config import fileConfig
@@ -624,7 +624,6 @@ the ``init.d`` directory of the Kallithe
.. __: https://kallithea-scm.org/repos/kallithea/files/tip/init.d/ .
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
.. _python: http://www.python.org/
.. _Python regular expression documentation: https://docs.python.org/2/library/re.html
.. _Mercurial: https://www.mercurial-scm.org/
diff --git a/docs/upgrade.rst b/docs/upgrade.rst
--- a/docs/upgrade.rst
+++ b/docs/upgrade.rst
@@ -241,6 +241,3 @@ To update the hooks of your Git reposito
.. note::
Kallithea does not use hooks on Mercurial repositories. This step is thus
not necessary if you only have Mercurial repositories.
-
-
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
diff --git a/docs/usage/troubleshooting.rst b/docs/usage/troubleshooting.rst
--- a/docs/usage/troubleshooting.rst
+++ b/docs/usage/troubleshooting.rst
@@ -8,7 +8,7 @@ Troubleshooting
:A: Make sure either to set the ``static_files = true`` in the .ini file or
double check the root path for your http setup. It should point to
for example:
- ``/home/my-virtual-python/lib/python2.7/site-packages/kallithea/public``
+ ``/home/my-virtual-python/lib/python3.7/site-packages/kallithea/public``
|
@@ -67,7 +67,6 @@ Troubleshooting
you have installed the latest Windows patches (especially KB2789397).
-.. _virtualenv: http://pypi.python.org/pypi/virtualenv
.. _python: http://www.python.org/
.. _mercurial: https://www.mercurial-scm.org/
.. _celery: http://celeryproject.org/
diff --git a/kallithea/__init__.py b/kallithea/__init__.py
--- a/kallithea/__init__.py
+++ b/kallithea/__init__.py
@@ -31,13 +31,16 @@ import platform
import sys
-VERSION = (0, 5, 2)
+if sys.version_info < (3, 6):
+ raise Exception('Kallithea requires python 3.6 or later')
+
+VERSION = (0, 5, 99)
BACKENDS = {
'hg': 'Mercurial repository',
'git': 'Git repository',
}
-CELERY_ON = False
+CELERY_APP = None # set to Celery app instance if using Celery
CELERY_EAGER = False
CONFIG = {}
diff --git a/kallithea/alembic/versions/a0a1bf09c143_db_add_ui_composite_index_and_drop_.py b/kallithea/alembic/versions/a0a1bf09c143_db_add_ui_composite_index_and_drop_.py
new file mode 100644
--- /dev/null
+++ b/kallithea/alembic/versions/a0a1bf09c143_db_add_ui_composite_index_and_drop_.py
@@ -0,0 +1,48 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+"""db: add Ui composite index and drop UniqueConstraint on Ui.ui_key
+
+Revision ID: a0a1bf09c143
+Revises: d7ec25b66e47
+Create Date: 2020-03-12 22:41:14.421837
+
+"""
+
+# The following opaque hexadecimal identifiers ("revisions") are used
+# by Alembic to track this migration script and its relations to others.
+revision = 'a0a1bf09c143'
+down_revision = 'd7ec25b66e47'
+branch_labels = None
+depends_on = None
+
+import sqlalchemy as sa
+from alembic import op
+
+
+def upgrade():
+ meta = sa.MetaData()
+ meta.reflect(bind=op.get_bind())
+
+ with op.batch_alter_table('ui', schema=None) as batch_op:
+ batch_op.create_index('ui_ui_section_ui_key_idx', ['ui_section', 'ui_key'], unique=False)
+ if any(i.name == 'uq_ui_ui_key' for i in meta.tables['ui'].constraints):
+ batch_op.drop_constraint('uq_ui_ui_key', type_='unique')
+ elif any(i.name == 'ui_ui_key_key' for i in meta.tables['ui'].constraints): # table was created with old naming before 1a080d4e926e
+ batch_op.drop_constraint('ui_ui_key_key', type_='unique')
+
+
+def downgrade():
+ with op.batch_alter_table('ui', schema=None) as batch_op:
+ batch_op.create_unique_constraint('uq_ui_ui_key', ['ui_key'])
+ batch_op.drop_index('ui_ui_section_ui_key_idx')
diff --git a/kallithea/bin/base.py b/kallithea/bin/base.py
--- a/kallithea/bin/base.py
+++ b/kallithea/bin/base.py
@@ -29,9 +29,10 @@ import os
import pprint
import random
import sys
-import urllib2
+import urllib.request
-from kallithea.lib.compat import json
+from kallithea.lib import ext_json
+from kallithea.lib.utils2 import ascii_bytes
CONFIG_NAME = '.config/kallithea'
@@ -67,12 +68,12 @@ def api_call(apikey, apihost, method=Non
raise Exception('please specify method name !')
apihost = apihost.rstrip('/')
id_ = random.randrange(1, 9999)
- req = urllib2.Request('%s/_admin/api' % apihost,
- data=json.dumps(_build_data(id_)),
+ req = urllib.request.Request('%s/_admin/api' % apihost,
+ data=ascii_bytes(ext_json.dumps(_build_data(id_))),
headers={'content-type': 'text/plain'})
- ret = urllib2.urlopen(req)
+ ret = urllib.request.urlopen(req)
raw_json = ret.read()
- json_data = json.loads(raw_json)
+ json_data = ext_json.loads(raw_json)
id_ret = json_data['id']
if id_ret == id_:
return json_data
@@ -107,7 +108,7 @@ class RcConf(object):
def __getitem__(self, key):
return self._conf[key]
- def __nonzero__(self):
+ def __bool__(self):
if self._conf:
return True
return False
@@ -128,7 +129,7 @@ class RcConf(object):
if os.path.exists(self._conf_name):
update = True
with open(self._conf_name, 'wb') as f:
- json.dump(config, f, indent=4)
+ ext_json.dump(config, f, indent=4)
f.write('\n')
if update:
@@ -146,7 +147,7 @@ class RcConf(object):
config = {}
try:
with open(self._conf_name, 'rb') as conf:
- config = json.load(conf)
+ config = ext_json.load(conf)
except IOError as e:
sys.stderr.write(str(e) + '\n')
@@ -159,7 +160,7 @@ class RcConf(object):
"""
try:
with open(self._conf_name, 'rb') as conf:
- return json.load(conf)
+ return ext_json.load(conf)
except IOError as e:
#sys.stderr.write(str(e) + '\n')
pass
diff --git a/kallithea/bin/kallithea_api.py b/kallithea/bin/kallithea_api.py
--- a/kallithea/bin/kallithea_api.py
+++ b/kallithea/bin/kallithea_api.py
@@ -25,12 +25,11 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-from __future__ import print_function
-
import argparse
+import json
import sys
-from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call, json
+from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call
def argparser(argv):
@@ -60,7 +59,7 @@ def argparser(argv):
'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
default=FORMAT_PRETTY
)
- args, other = parser.parse_known_args()
+ args, other = parser.parse_known_args(args=argv[1:])
return parser, args, other
@@ -101,7 +100,7 @@ def main(argv=None):
parser.error('Please specify method name')
try:
- margs = dict(map(lambda s: s.split(':', 1), other))
+ margs = dict(s.split(':', 1) for s in other)
except ValueError:
sys.stderr.write('Error parsing arguments \n')
sys.exit()
diff --git a/kallithea/bin/kallithea_cli.py b/kallithea/bin/kallithea_cli.py
--- a/kallithea/bin/kallithea_cli.py
+++ b/kallithea/bin/kallithea_cli.py
@@ -25,3 +25,8 @@ import kallithea.bin.kallithea_cli_repo
import kallithea.bin.kallithea_cli_ssh
# 'cli' is the main entry point for 'kallithea-cli', specified in setup.py as entry_points console_scripts
from kallithea.bin.kallithea_cli_base import cli
+
+
+# mute pyflakes "imported but unused"
+assert kallithea.bin.kallithea_cli_ssh
+assert cli
diff --git a/kallithea/bin/kallithea_cli_base.py b/kallithea/bin/kallithea_cli_base.py
--- a/kallithea/bin/kallithea_cli_base.py
+++ b/kallithea/bin/kallithea_cli_base.py
@@ -12,7 +12,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-import cStringIO
+import configparser
import functools
import logging.config
import os
@@ -23,6 +23,7 @@ import click
import paste.deploy
import kallithea
+import kallithea.config.middleware
# kallithea_cli is usually invoked through the 'kallithea-cli' wrapper script
@@ -71,12 +72,12 @@ def register_command(config_file=False,
def runtime_wrapper(config_file, *args, **kwargs):
path_to_ini_file = os.path.realpath(config_file)
kallithea.CONFIG = paste.deploy.appconfig('config:' + path_to_ini_file)
- config_bytes = read_config(path_to_ini_file, strip_section_prefix=annotated.__name__)
- logging.config.fileConfig(cStringIO.StringIO(config_bytes),
+ cp = configparser.ConfigParser(strict=False)
+ cp.read_string(read_config(path_to_ini_file, strip_section_prefix=annotated.__name__))
+ logging.config.fileConfig(cp,
{'__file__': path_to_ini_file, 'here': os.path.dirname(path_to_ini_file)})
if config_file_initialize_app:
- kallithea.config.middleware.make_app_without_logging(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
- kallithea.lib.utils.setup_cache_regions(kallithea.CONFIG)
+ kallithea.config.middleware.make_app(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
return annotated(*args, **kwargs)
return cli_command(runtime_wrapper)
return annotator
diff --git a/kallithea/bin/kallithea_cli_celery.py b/kallithea/bin/kallithea_cli_celery.py
--- a/kallithea/bin/kallithea_cli_celery.py
+++ b/kallithea/bin/kallithea_cli_celery.py
@@ -12,6 +12,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
+import celery.bin.worker
import click
import kallithea
@@ -31,10 +32,9 @@ def celery_run(celery_args):
by this CLI command.
"""
- if not kallithea.CELERY_ON:
+ if not kallithea.CELERY_APP:
raise Exception('Please set use_celery = true in .ini config '
'file before running this command')
- from kallithea.lib import celerypylons
- cmd = celerypylons.worker.worker(celerypylons.app)
+ cmd = celery.bin.worker.worker(kallithea.CELERY_APP)
return cmd.run_from_argv(None, command='celery-run -c CONFIG_FILE --', argv=list(celery_args))
diff --git a/kallithea/bin/kallithea_cli_config.py b/kallithea/bin/kallithea_cli_config.py
--- a/kallithea/bin/kallithea_cli_config.py
+++ b/kallithea/bin/kallithea_cli_config.py
@@ -89,6 +89,16 @@ def config_create(config_file, key_value
mako_variable_values.update({
'uuid': lambda: uuid.uuid4().hex,
})
+
+ click.echo('Creating config file using:')
+ for key, value in inifile.default_variables.items():
+ if isinstance(value, str):
+ options = inifile.variable_options.get(key)
+ if options:
+ click.echo(' %s=%s (options: %s)' % (key, mako_variable_values.get(key, value), ', '.join(options)))
+ else:
+ click.echo(' %s=%s' % (key, mako_variable_values.get(key, value)))
+
try:
config_file_abs = os.path.abspath(config_file)
inifile.create(config_file_abs, mako_variable_values, ini_settings)
diff --git a/kallithea/bin/kallithea_cli_db.py b/kallithea/bin/kallithea_cli_db.py
--- a/kallithea/bin/kallithea_cli_db.py
+++ b/kallithea/bin/kallithea_cli_db.py
@@ -67,7 +67,7 @@ def db_create(user, password, email, rep
Session().commit()
# initial repository scan
- kallithea.config.middleware.make_app_without_logging(
+ kallithea.config.middleware.make_app(
kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
added, _ = kallithea.lib.utils.repo2db_mapper(kallithea.model.scm.ScmModel().repo_scan())
if added:
diff --git a/kallithea/bin/kallithea_cli_iis.py b/kallithea/bin/kallithea_cli_iis.py
--- a/kallithea/bin/kallithea_cli_iis.py
+++ b/kallithea/bin/kallithea_cli_iis.py
@@ -67,6 +67,7 @@ def iis_install(virtualdir):
try:
import isapi_wsgi
+ assert isapi_wsgi
except ImportError:
sys.stderr.write('missing requirement: isapi-wsgi not installed\n')
sys.exit(1)
diff --git a/kallithea/bin/kallithea_cli_index.py b/kallithea/bin/kallithea_cli_index.py
--- a/kallithea/bin/kallithea_cli_index.py
+++ b/kallithea/bin/kallithea_cli_index.py
@@ -36,7 +36,7 @@ from kallithea.model.repo import RepoMod
@click.option('--repo-location', help='Base path of repositories to index. Default: all')
@click.option('--index-only', help='Comma-separated list of repositories to build index on. Default: all')
@click.option('--update-only', help='Comma-separated list of repositories to re-build index on. Default: all')
-@click.option('-f', '--full', 'full_index', help='Recreate the index from scratch')
+@click.option('-f', '--full/--no-full', 'full_index', help='Recreate the index from scratch')
def index_create(repo_location, index_only, update_only, full_index):
"""Create or update full text search index"""
diff --git a/kallithea/bin/kallithea_cli_ishell.py b/kallithea/bin/kallithea_cli_ishell.py
--- a/kallithea/bin/kallithea_cli_ishell.py
+++ b/kallithea/bin/kallithea_cli_ishell.py
@@ -20,12 +20,10 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-from __future__ import print_function
-
import sys
import kallithea.bin.kallithea_cli_base as cli_base
-from kallithea.model.db import *
+from kallithea.model.db import * # these names will be directly available in the IPython shell
@cli_base.register_command(config_file_initialize_app=True)
diff --git a/kallithea/bin/kallithea_cli_repo.py b/kallithea/bin/kallithea_cli_repo.py
--- a/kallithea/bin/kallithea_cli_repo.py
+++ b/kallithea/bin/kallithea_cli_repo.py
@@ -26,10 +26,11 @@ import shutil
import click
+import kallithea
import kallithea.bin.kallithea_cli_base as cli_base
from kallithea.lib.utils import REMOVED_REPO_PAT, repo2db_mapper
-from kallithea.lib.utils2 import ask_ok, safe_str, safe_unicode
-from kallithea.model.db import Repository, Ui
+from kallithea.lib.utils2 import ask_ok
+from kallithea.model.db import Repository
from kallithea.model.meta import Session
from kallithea.model.scm import ScmModel
@@ -74,7 +75,7 @@ def repo_update_metadata(repositories):
if not repositories:
repo_list = Repository.query().all()
else:
- repo_names = [safe_unicode(n.strip()) for n in repositories]
+ repo_names = [n.strip() for n in repositories]
repo_list = list(Repository.query()
.filter(Repository.repo_name.in_(repo_names)))
@@ -110,7 +111,7 @@ def repo_purge_deleted(ask, older_than):
return
parts = parts.groupdict()
time_params = {}
- for (name, param) in parts.iteritems():
+ for name, param in parts.items():
if param:
time_params[name] = int(param)
return datetime.timedelta(**time_params)
@@ -125,9 +126,9 @@ def repo_purge_deleted(ask, older_than):
date_part = name[4:19] # 4:19 since we don't parse milliseconds
return datetime.datetime.strptime(date_part, '%Y%m%d_%H%M%S')
- repos_location = Ui.get_repos_location()
+ repos_location = kallithea.CONFIG['base_path']
to_remove = []
- for dn_, dirs, f in os.walk(safe_str(repos_location)):
+ for dn_, dirs, f in os.walk(repos_location):
alldirs = list(dirs)
del dirs[:]
if ('.hg' in alldirs or
@@ -175,9 +176,8 @@ def repo_purge_deleted(ask, older_than):
remove = True
else:
remove = ask_ok('The following repositories will be removed completely:\n%s\n'
- 'Do you want to proceed? [y/n] '
- % '\n'.join(['%s deleted on %s' % (safe_str(x[0]), safe_str(x[1]))
- for x in to_remove]))
+ 'Do you want to proceed? [y/n] ' %
+ '\n'.join('%s deleted on %s' % (path, date_) for path, date_ in to_remove))
if remove:
for path, date_ in to_remove:
diff --git a/kallithea/bin/kallithea_cli_ssh.py b/kallithea/bin/kallithea_cli_ssh.py
--- a/kallithea/bin/kallithea_cli_ssh.py
+++ b/kallithea/bin/kallithea_cli_ssh.py
@@ -14,7 +14,6 @@
import logging
import os
-import re
import shlex
import sys
diff --git a/kallithea/bin/kallithea_gist.py b/kallithea/bin/kallithea_gist.py
--- a/kallithea/bin/kallithea_gist.py
+++ b/kallithea/bin/kallithea_gist.py
@@ -25,15 +25,14 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-from __future__ import print_function
-
import argparse
import fileinput
+import json
import os
import stat
import sys
-from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call, json
+from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call
def argparser(argv):
@@ -69,7 +68,7 @@ def argparser(argv):
'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
default=FORMAT_PRETTY
)
- args, other = parser.parse_known_args()
+ args, other = parser.parse_known_args(args=argv[1:])
return parser, args, other
diff --git a/kallithea/bin/ldap_sync.py b/kallithea/bin/ldap_sync.py
--- a/kallithea/bin/ldap_sync.py
+++ b/kallithea/bin/ldap_sync.py
@@ -25,15 +25,14 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-from __future__ import print_function
-
-import urllib2
+import urllib.request
import uuid
-from ConfigParser import ConfigParser
+from configparser import ConfigParser
import ldap
-from kallithea.lib.compat import json
+from kallithea.lib import ext_json
+from kallithea.lib.utils2 import ascii_bytes
config = ConfigParser()
@@ -80,12 +79,12 @@ class API(object):
uid = str(uuid.uuid1())
data = self.get_api_data(uid, method, args)
- data = json.dumps(data)
+ data = ascii_bytes(ext_json.dumps(data))
headers = {'content-type': 'text/plain'}
- req = urllib2.Request(self.url, data, headers)
+ req = urllib.request.Request(self.url, data, headers)
- response = urllib2.urlopen(req)
- response = json.load(response)
+ response = urllib.request.urlopen(req)
+ response = ext_json.load(response)
if uid != response["id"]:
raise InvalidResponseIDError("UUID does not match.")
diff --git a/kallithea/config/app_cfg.py b/kallithea/config/app_cfg.py
--- a/kallithea/config/app_cfg.py
+++ b/kallithea/config/app_cfg.py
@@ -28,20 +28,21 @@ import tg
from alembic.migration import MigrationContext
from alembic.script.base import ScriptDirectory
from sqlalchemy import create_engine
-from tg import hooks
from tg.configuration import AppConfig
from tg.support.converters import asbool
import kallithea.lib.locale
import kallithea.model.base
-from kallithea.lib.auth import set_available_permissions
+import kallithea.model.meta
+from kallithea.lib import celerypylons
from kallithea.lib.middleware.https_fixup import HttpsFixup
from kallithea.lib.middleware.permanent_repo_url import PermanentRepoUrl
from kallithea.lib.middleware.simplegit import SimpleGit
from kallithea.lib.middleware.simplehg import SimpleHg
from kallithea.lib.middleware.wrapper import RequestWrapper
-from kallithea.lib.utils import check_git_version, load_rcextensions, make_ui, set_app_settings, set_indexer_config, set_vcs_config
+from kallithea.lib.utils import check_git_version, load_rcextensions, set_app_settings, set_indexer_config, set_vcs_config
from kallithea.lib.utils2 import str2bool
+from kallithea.model import db
log = logging.getLogger(__name__)
@@ -98,17 +99,12 @@ class KallitheaAppConfig(AppConfig):
# Disable transaction manager -- currently Kallithea takes care of transactions itself
self['tm.enabled'] = False
- # Set the i18n source language so TG doesn't search beyond 'en' in Accept-Language.
- # Don't force the default here if configuration force something else.
- if not self.get('i18n.lang'):
- self['i18n.lang'] = 'en'
+ # Set the default i18n source language so TG doesn't search beyond 'en' in Accept-Language.
+ self['i18n.lang'] = 'en'
base_config = KallitheaAppConfig()
-# TODO still needed as long as we use pylonslib
-sys.modules['pylons'] = tg
-
# DebugBar, a debug toolbar for TurboGears2.
# (https://github.com/TurboGears/tgext.debugbar)
# To enable it, install 'tgext.debugbar' and 'kajiki', and run Kallithea with
@@ -117,6 +113,7 @@ sys.modules['pylons'] = tg
try:
from tgext.debugbar import enable_debugbar
import kajiki # only to check its existence
+ assert kajiki
except ImportError:
pass
else:
@@ -161,15 +158,14 @@ def setup_configuration(app):
sys.exit(1)
# store some globals into kallithea
- kallithea.CELERY_ON = str2bool(config.get('use_celery'))
- kallithea.CELERY_EAGER = str2bool(config.get('celery.always.eager'))
+ kallithea.DEFAULT_USER_ID = db.User.get_default_user().user_id
+
+ if str2bool(config.get('use_celery')):
+ kallithea.CELERY_APP = celerypylons.make_app()
kallithea.CONFIG = config
load_rcextensions(root_path=config['here'])
- set_available_permissions(config)
- repos_path = make_ui().configitems('paths')[0][1]
- config['base_path'] = repos_path
set_app_settings(config)
instance_id = kallithea.CONFIG.get('instance_id', '*')
@@ -188,8 +184,10 @@ def setup_configuration(app):
check_git_version()
+ kallithea.model.meta.Session.remove()
-hooks.register('configure_new_app', setup_configuration)
+
+tg.hooks.register('configure_new_app', setup_configuration)
def setup_application(app):
@@ -213,4 +211,4 @@ def setup_application(app):
return app
-hooks.register('before_config', setup_application)
+tg.hooks.register('before_config', setup_application)
diff --git a/kallithea/config/conf.py b/kallithea/config/conf.py
--- a/kallithea/config/conf.py
+++ b/kallithea/config/conf.py
@@ -35,7 +35,7 @@ LANGUAGES_EXTENSIONS_MAP = pygmentsutils
# Whoosh index targets
# Extensions we want to index content of using whoosh
-INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys()
+INDEX_EXTENSIONS = list(LANGUAGES_EXTENSIONS_MAP)
# Filenames we want to index content of using whoosh
INDEX_FILENAMES = pygmentsutils.get_index_filenames()
diff --git a/kallithea/config/middleware.py b/kallithea/config/middleware.py
--- a/kallithea/config/middleware.py
+++ b/kallithea/config/middleware.py
@@ -24,11 +24,6 @@ __all__ = ['make_app']
make_base_app = base_config.setup_tg_wsgi_app(load_environment)
-def make_app_without_logging(global_conf, full_stack=True, **app_conf):
- """The core of make_app for use from gearbox commands (other than 'serve')"""
- return make_base_app(global_conf, full_stack=full_stack, **app_conf)
-
-
def make_app(global_conf, full_stack=True, **app_conf):
"""
Set up Kallithea with the settings found in the PasteDeploy configuration
@@ -47,4 +42,6 @@ def make_app(global_conf, full_stack=Tru
``app_conf`` contains all the application-specific settings (those defined
under ``[app:main]``.
"""
- return make_app_without_logging(global_conf, full_stack=full_stack, **app_conf)
+ assert app_conf.get('sqlalchemy.url') # must be called with a Kallithea .ini file, which for example must have this config option
+ assert global_conf.get('here') and global_conf.get('__file__') # app config should be initialized the paste way ...
+ return make_base_app(global_conf, full_stack=full_stack, **app_conf)
diff --git a/kallithea/config/routing.py b/kallithea/config/routing.py
--- a/kallithea/config/routing.py
+++ b/kallithea/config/routing.py
@@ -19,14 +19,34 @@ may take precedent over the more generic
refer to the routes manual at http://routes.groovie.org/docs/
"""
-from routes import Mapper
+import routes
from tg import request
+from kallithea.lib.utils2 import safe_str
+
# prefix for non repository related links needs to be prefixed with `/`
ADMIN_PREFIX = '/_admin'
+class Mapper(routes.Mapper):
+ """
+ Subclassed Mapper with routematch patched to decode "unicode" str url to
+ *real* unicode str before applying matches and invoking controller methods.
+ """
+
+ def routematch(self, url=None, environ=None):
+ """
+ routematch that also decode url from "fake bytes" to real unicode
+ string before matching and invoking controllers.
+ """
+ # Process url like get_path_info does ... but PATH_INFO has already
+ # been retrieved from environ and is passed, so - let's just use that
+ # instead.
+ url = safe_str(url.encode('latin1'))
+ return super().routematch(url=url, environ=environ)
+
+
def make_map(config):
"""Create, configure and return the routes Mapper"""
rmap = Mapper(directory=config['paths']['controllers'],
@@ -86,7 +106,7 @@ def make_map(config):
#==========================================================================
# MAIN PAGE
- rmap.connect('home', '/', controller='home', action='index')
+ rmap.connect('home', '/', controller='home')
rmap.connect('about', '/about', controller='home', action='about')
rmap.redirect('/favicon.ico', '/images/favicon.ico')
rmap.connect('repo_switcher_data', '/_repos', controller='home',
@@ -106,7 +126,7 @@ def make_map(config):
m.connect("repos", "/repos",
action="create", conditions=dict(method=["POST"]))
m.connect("repos", "/repos",
- action="index", conditions=dict(method=["GET"]))
+ conditions=dict(method=["GET"]))
m.connect("new_repo", "/create_repository",
action="create_repository", conditions=dict(method=["GET"]))
m.connect("update_repo", "/repos/{repo_name:.*?}",
@@ -121,7 +141,7 @@ def make_map(config):
m.connect("repos_groups", "/repo_groups",
action="create", conditions=dict(method=["POST"]))
m.connect("repos_groups", "/repo_groups",
- action="index", conditions=dict(method=["GET"]))
+ conditions=dict(method=["GET"]))
m.connect("new_repos_group", "/repo_groups/new",
action="new", conditions=dict(method=["GET"]))
m.connect("update_repos_group", "/repo_groups/{group_name:.*?}",
@@ -161,9 +181,9 @@ def make_map(config):
m.connect("new_user", "/users/new",
action="create", conditions=dict(method=["POST"]))
m.connect("users", "/users",
- action="index", conditions=dict(method=["GET"]))
+ conditions=dict(method=["GET"]))
m.connect("formatted_users", "/users.{format}",
- action="index", conditions=dict(method=["GET"]))
+ conditions=dict(method=["GET"]))
m.connect("new_user", "/users/new",
action="new", conditions=dict(method=["GET"]))
m.connect("update_user", "/users/{id}",
@@ -216,7 +236,7 @@ def make_map(config):
m.connect("users_groups", "/user_groups",
action="create", conditions=dict(method=["POST"]))
m.connect("users_groups", "/user_groups",
- action="index", conditions=dict(method=["GET"]))
+ conditions=dict(method=["GET"]))
m.connect("new_users_group", "/user_groups/new",
action="new", conditions=dict(method=["GET"]))
m.connect("update_users_group", "/user_groups/{id}",
@@ -263,8 +283,7 @@ def make_map(config):
# ADMIN DEFAULTS ROUTES
with rmap.submapper(path_prefix=ADMIN_PREFIX,
controller='admin/defaults') as m:
- m.connect('defaults', '/defaults',
- action="index")
+ m.connect('defaults', '/defaults')
m.connect('defaults_update', 'defaults/{id}/update',
action="update", conditions=dict(method=["POST"]))
@@ -370,7 +389,7 @@ def make_map(config):
m.connect("gists", "/gists",
action="create", conditions=dict(method=["POST"]))
m.connect("gists", "/gists",
- action="index", conditions=dict(method=["GET"]))
+ conditions=dict(method=["GET"]))
m.connect("new_gist", "/gists/new",
action="new", conditions=dict(method=["GET"]))
@@ -396,7 +415,7 @@ def make_map(config):
# ADMIN MAIN PAGES
with rmap.submapper(path_prefix=ADMIN_PREFIX,
controller='admin/admin') as m:
- m.connect('admin_home', '', action='index')
+ m.connect('admin_home', '')
m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9. _-]*}',
action='add_repo')
#==========================================================================
@@ -408,7 +427,7 @@ def make_map(config):
# USER JOURNAL
rmap.connect('journal', '%s/journal' % ADMIN_PREFIX,
- controller='journal', action='index')
+ controller='journal')
rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX,
controller='journal', action='journal_rss')
rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX,
@@ -475,7 +494,7 @@ def make_map(config):
#==========================================================================
rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating',
controller='admin/repos', action='repo_creating')
- rmap.connect('repo_check_home', '/{repo_name:.*?}/crepo_check',
+ rmap.connect('repo_check_home', '/{repo_name:.*?}/repo_check_creating',
controller='admin/repos', action='repo_check')
rmap.connect('summary_home', '/{repo_name:.*?}',
@@ -544,13 +563,6 @@ def make_map(config):
controller='admin/repos', action="edit_advanced_fork",
conditions=dict(method=["POST"], function=check_repo))
- rmap.connect("edit_repo_caches", "/{repo_name:.*?}/settings/caches",
- controller='admin/repos', action="edit_caches",
- conditions=dict(method=["GET"], function=check_repo))
- rmap.connect("update_repo_caches", "/{repo_name:.*?}/settings/caches",
- controller='admin/repos', action="edit_caches",
- conditions=dict(method=["POST"], function=check_repo))
-
rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote",
controller='admin/repos', action="edit_remote",
conditions=dict(method=["GET"], function=check_repo))
@@ -602,7 +614,7 @@ def make_map(config):
rmap.connect('compare_home',
'/{repo_name:.*?}/compare',
- controller='compare', action='index',
+ controller='compare',
conditions=dict(function=check_repo))
rmap.connect('compare_url',
@@ -616,7 +628,7 @@ def make_map(config):
rmap.connect('pullrequest_home',
'/{repo_name:.*?}/pull-request/new', controller='pullrequests',
- action='index', conditions=dict(function=check_repo,
+ conditions=dict(function=check_repo,
method=["GET"]))
rmap.connect('pullrequest_repo_info',
@@ -674,7 +686,7 @@ def make_map(config):
controller='changelog', conditions=dict(function=check_repo))
rmap.connect('changelog_file_home', '/{repo_name:.*?}/changelog/{revision}/{f_path:.*}',
- controller='changelog', f_path=None,
+ controller='changelog',
conditions=dict(function=check_repo))
rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}',
@@ -719,8 +731,8 @@ def make_map(config):
rmap.connect('files_annotate_home',
'/{repo_name:.*?}/annotate/{revision}/{f_path:.*}',
- controller='files', action='index', revision='tip',
- f_path='', annotate=True, conditions=dict(function=check_repo))
+ controller='files', revision='tip',
+ f_path='', annotate='1', conditions=dict(function=check_repo))
rmap.connect('files_edit_home',
'/{repo_name:.*?}/edit/{revision}/{f_path:.*}',
diff --git a/kallithea/controllers/admin/admin.py b/kallithea/controllers/admin/admin.py
--- a/kallithea/controllers/admin/admin.py
+++ b/kallithea/controllers/admin/admin.py
@@ -36,7 +36,6 @@ from whoosh import query
from whoosh.qparser.dateparse import DateParserPlugin
from whoosh.qparser.default import QueryParser
-from kallithea.config.routing import url
from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired
from kallithea.lib.base import BaseController, render
from kallithea.lib.indexers import JOURNAL_SCHEMA
@@ -61,7 +60,7 @@ def _journal_filter(user_log, search_ter
if search_term:
qp = QueryParser('repository', schema=JOURNAL_SCHEMA)
qp.add_plugin(DateParserPlugin())
- qry = qp.parse(unicode(search_term))
+ qry = qp.parse(search_term)
log.debug('Filtering using parsed query %r', qry)
def wildcard_handler(col, wc_term):
@@ -139,10 +138,8 @@ class AdminController(BaseController):
p = safe_int(request.GET.get('page'), 1)
- def url_generator(**kw):
- return url.current(filter=c.search_term, **kw)
-
- c.users_log = Page(users_log, page=p, items_per_page=10, url=url_generator)
+ c.users_log = Page(users_log, page=p, items_per_page=10,
+ filter=c.search_term)
if request.environ.get('HTTP_X_PARTIAL_XHR'):
return render('admin/admin_log.html')
diff --git a/kallithea/controllers/admin/auth_settings.py b/kallithea/controllers/admin/auth_settings.py
--- a/kallithea/controllers/admin/auth_settings.py
+++ b/kallithea/controllers/admin/auth_settings.py
@@ -37,7 +37,6 @@ from kallithea.lib import auth_modules
from kallithea.lib import helpers as h
from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired
from kallithea.lib.base import BaseController, render
-from kallithea.lib.compat import formatted_json
from kallithea.model.db import Setting
from kallithea.model.forms import AuthSettingsForm
from kallithea.model.meta import Session
@@ -87,7 +86,7 @@ class AuthSettingsController(BaseControl
# we want to show , separated list of enabled plugins
c.defaults['auth_plugins'] = ','.join(c.enabled_plugin_names)
- log.debug(formatted_json(defaults))
+ log.debug('defaults: %s', defaults)
return formencode.htmlfill.render(
render('admin/auth/auth_settings.html'),
defaults=c.defaults,
@@ -103,7 +102,7 @@ class AuthSettingsController(BaseControl
def auth_settings(self):
"""POST create and store auth settings"""
self.__load_defaults()
- log.debug("POST Result: %s", formatted_json(dict(request.POST)))
+ log.debug("POST Result: %s", dict(request.POST))
# First, parse only the plugin list (not the plugin settings).
_auth_plugins_validator = AuthSettingsForm([]).fields['auth_plugins']
diff --git a/kallithea/controllers/admin/defaults.py b/kallithea/controllers/admin/defaults.py
--- a/kallithea/controllers/admin/defaults.py
+++ b/kallithea/controllers/admin/defaults.py
@@ -31,7 +31,6 @@ import traceback
import formencode
from formencode import htmlfill
from tg import request
-from tg import tmpl_context as c
from tg.i18n import ugettext as _
from webob.exc import HTTPFound
@@ -69,7 +68,7 @@ class DefaultsController(BaseController)
try:
form_result = _form.to_python(dict(request.POST))
- for k, v in form_result.iteritems():
+ for k, v in form_result.items():
setting = Setting.create_or_update(k, v)
Session().commit()
h.flash(_('Default settings updated successfully'),
diff --git a/kallithea/controllers/admin/gists.py b/kallithea/controllers/admin/gists.py
--- a/kallithea/controllers/admin/gists.py
+++ b/kallithea/controllers/admin/gists.py
@@ -40,7 +40,7 @@ from kallithea.lib import helpers as h
from kallithea.lib.auth import LoginRequired
from kallithea.lib.base import BaseController, jsonify, render
from kallithea.lib.page import Page
-from kallithea.lib.utils2 import safe_int, safe_unicode, time_to_datetime
+from kallithea.lib.utils2 import safe_int, safe_str, time_to_datetime
from kallithea.lib.vcs.exceptions import NodeNotChangedError, VCSError
from kallithea.model.db import Gist
from kallithea.model.forms import GistForm
@@ -71,6 +71,11 @@ class GistsController(BaseController):
not_default_user = not request.authuser.is_default_user
c.show_private = request.GET.get('private') and not_default_user
c.show_public = request.GET.get('public') and not_default_user
+ url_params = {}
+ if c.show_public:
+ url_params['public'] = 1
+ elif c.show_private:
+ url_params['private'] = 1
gists = Gist().query() \
.filter_by(is_expired=False) \
@@ -97,7 +102,8 @@ class GistsController(BaseController):
c.gists = gists
p = safe_int(request.GET.get('page'), 1)
- c.gists_pager = Page(c.gists, page=p, items_per_page=10)
+ c.gists_pager = Page(c.gists, page=p, items_per_page=10,
+ **url_params)
return render('admin/gists/index.html')
@LoginRequired()
@@ -176,7 +182,10 @@ class GistsController(BaseController):
log.error(traceback.format_exc())
raise HTTPNotFound()
if format == 'raw':
- content = '\n\n'.join([f.content for f in c.files if (f_path is None or safe_unicode(f.path) == f_path)])
+ content = '\n\n'.join(
+ safe_str(f.content)
+ for f in c.files if (f_path is None or f.path == f_path)
+ )
response.content_type = 'text/plain'
return content
return render('admin/gists/show.html')
diff --git a/kallithea/controllers/admin/my_account.py b/kallithea/controllers/admin/my_account.py
--- a/kallithea/controllers/admin/my_account.py
+++ b/kallithea/controllers/admin/my_account.py
@@ -279,8 +279,8 @@ class MyAccountController(BaseController
Session().commit()
SshKeyModel().write_authorized_keys()
h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success')
- except SshKeyModelException as errors:
- h.flash(errors.message, category='error')
+ except SshKeyModelException as e:
+ h.flash(e.args[0], category='error')
raise HTTPFound(location=url('my_account_ssh_keys'))
@IfSshEnabled
@@ -291,6 +291,6 @@ class MyAccountController(BaseController
Session().commit()
SshKeyModel().write_authorized_keys()
h.flash(_("SSH key successfully deleted"), category='success')
- except SshKeyModelException as errors:
- h.flash(errors.message, category='error')
+ except SshKeyModelException as e:
+ h.flash(e.args[0], category='error')
raise HTTPFound(location=url('my_account_ssh_keys'))
diff --git a/kallithea/controllers/admin/repo_groups.py b/kallithea/controllers/admin/repo_groups.py
--- a/kallithea/controllers/admin/repo_groups.py
+++ b/kallithea/controllers/admin/repo_groups.py
@@ -25,7 +25,6 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-import itertools
import logging
import traceback
@@ -37,7 +36,6 @@ from tg.i18n import ugettext as _
from tg.i18n import ungettext
from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound
-import kallithea
from kallithea.config.routing import url
from kallithea.lib import helpers as h
from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoGroupPermissionLevelDecorator, LoginRequired
@@ -93,10 +91,8 @@ class RepoGroupsController(BaseControlle
return data
def _revoke_perms_on_yourself(self, form_result):
- _up = filter(lambda u: request.authuser.username == u[0],
- form_result['perms_updates'])
- _new = filter(lambda u: request.authuser.username == u[0],
- form_result['perms_new'])
+ _up = [u for u in form_result['perms_updates'] if request.authuser.username == u[0]]
+ _new = [u for u in form_result['perms_new'] if request.authuser.username == u[0]]
if _new and _new[0][1] != 'group.admin' or _up and _up[0][1] != 'group.admin':
return True
return False
@@ -105,24 +101,20 @@ class RepoGroupsController(BaseControlle
_list = RepoGroup.query(sorted=True).all()
group_iter = RepoGroupList(_list, perm_level='admin')
repo_groups_data = []
- total_records = len(group_iter)
_tmpl_lookup = app_globals.mako_lookup
template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
- repo_group_name = lambda repo_group_name, children_groups: (
- template.get_def("repo_group_name")
- .render(repo_group_name, children_groups, _=_, h=h, c=c)
- )
- repo_group_actions = lambda repo_group_id, repo_group_name, gr_count: (
- template.get_def("repo_group_actions")
- .render(repo_group_id, repo_group_name, gr_count, _=_, h=h, c=c,
- ungettext=ungettext)
- )
+ def repo_group_name(repo_group_name, children_groups):
+ return template.get_def("repo_group_name") \
+ .render_unicode(repo_group_name, children_groups, _=_, h=h, c=c)
+
+ def repo_group_actions(repo_group_id, repo_group_name, gr_count):
+ return template.get_def("repo_group_actions") \
+ .render_unicode(repo_group_id, repo_group_name, gr_count, _=_, h=h, c=c,
+ ungettext=ungettext)
for repo_gr in group_iter:
- children_groups = map(h.safe_unicode,
- itertools.chain((g.name for g in repo_gr.parents),
- (x.name for x in [repo_gr])))
+ children_groups = [g.name for g in repo_gr.parents] + [repo_gr.name]
repo_count = repo_gr.repositories.count()
repo_groups_data.append({
"raw_name": repo_gr.group_name,
diff --git a/kallithea/controllers/admin/repos.py b/kallithea/controllers/admin/repos.py
--- a/kallithea/controllers/admin/repos.py
+++ b/kallithea/controllers/admin/repos.py
@@ -28,6 +28,7 @@ Original author and date, and relevant c
import logging
import traceback
+import celery.result
import formencode
from formencode import htmlfill
from tg import request
@@ -35,6 +36,7 @@ from tg import tmpl_context as c
from tg.i18n import ugettext as _
from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound
+import kallithea
from kallithea.config.routing import url
from kallithea.lib import helpers as h
from kallithea.lib.auth import HasPermissionAny, HasRepoPermissionLevelDecorator, LoginRequired, NotAnonymous
@@ -43,7 +45,7 @@ from kallithea.lib.exceptions import Att
from kallithea.lib.utils import action_logger
from kallithea.lib.utils2 import safe_int
from kallithea.lib.vcs import RepositoryError
-from kallithea.model.db import RepoGroup, Repository, RepositoryField, Setting, User, UserFollowing
+from kallithea.model.db import RepoGroup, Repository, RepositoryField, Setting, UserFollowing
from kallithea.model.forms import RepoFieldForm, RepoForm, RepoPermsForm
from kallithea.model.meta import Session
from kallithea.model.repo import RepoModel
@@ -110,17 +112,11 @@ class ReposController(BaseRepoController
@NotAnonymous()
def create(self):
self.__load_defaults()
- form_result = {}
try:
# CanWriteGroup validators checks permissions of this POST
form_result = RepoForm(repo_groups=c.repo_groups,
landing_revs=c.landing_revs_choices)() \
.to_python(dict(request.POST))
-
- # create is done sometimes async on celery, db transaction
- # management is handled there.
- task = RepoModel().create(form_result, request.authuser.user_id)
- task_id = task.task_id
except formencode.Invalid as errors:
log.info(errors)
return htmlfill.render(
@@ -131,6 +127,11 @@ class ReposController(BaseRepoController
force_defaults=False,
encoding="UTF-8")
+ try:
+ # create is done sometimes async on celery, db transaction
+ # management is handled there.
+ task = RepoModel().create(form_result, request.authuser.user_id)
+ task_id = task.task_id
except Exception:
log.error(traceback.format_exc())
msg = (_('Error creating repository %s')
@@ -181,12 +182,10 @@ class ReposController(BaseRepoController
task_id = request.GET.get('task_id')
if task_id and task_id not in ['None']:
- from kallithea import CELERY_ON
- from kallithea.lib import celerypylons
- if CELERY_ON:
- task = celerypylons.result.AsyncResult(task_id)
- if task.failed():
- raise HTTPInternalServerError(task.traceback)
+ if kallithea.CELERY_APP:
+ task_result = celery.result.AsyncResult(task_id, app=kallithea.CELERY_APP)
+ if task_result.failed():
+ raise HTTPInternalServerError(task_result.traceback)
repo = Repository.get_by_repo_name(repo_name)
if repo and repo.repo_state == Repository.STATE_CREATED:
@@ -406,7 +405,7 @@ class ReposController(BaseRepoController
@HasRepoPermissionLevelDecorator('admin')
def edit_advanced(self, repo_name):
c.repo_info = self._load_repo()
- c.default_user_id = User.get_default_user().user_id
+ c.default_user_id = kallithea.DEFAULT_USER_ID
c.in_public_journal = UserFollowing.query() \
.filter(UserFollowing.user_id == c.default_user_id) \
.filter(UserFollowing.follows_repository == c.repo_info).scalar()
@@ -443,7 +442,7 @@ class ReposController(BaseRepoController
try:
repo_id = Repository.get_by_repo_name(repo_name).repo_id
- user_id = User.get_default_user().user_id
+ user_id = kallithea.DEFAULT_USER_ID
self.scm_model.toggle_following_repo(repo_id, user_id)
h.flash(_('Updated repository visibility in public journal'),
category='success')
@@ -471,7 +470,7 @@ class ReposController(BaseRepoController
category='success')
except RepositoryError as e:
log.error(traceback.format_exc())
- h.flash(str(e), category='error')
+ h.flash(e, category='error')
except Exception as e:
log.error(traceback.format_exc())
h.flash(_('An error occurred during this operation'),
@@ -480,24 +479,6 @@ class ReposController(BaseRepoController
raise HTTPFound(location=url('edit_repo_advanced', repo_name=repo_name))
@HasRepoPermissionLevelDecorator('admin')
- def edit_caches(self, repo_name):
- c.repo_info = self._load_repo()
- c.active = 'caches'
- if request.POST:
- try:
- ScmModel().mark_for_invalidation(repo_name)
- Session().commit()
- h.flash(_('Cache invalidation successful'),
- category='success')
- except Exception as e:
- log.error(traceback.format_exc())
- h.flash(_('An error occurred during cache invalidation'),
- category='error')
-
- raise HTTPFound(location=url('edit_repo_caches', repo_name=c.repo_name))
- return render('admin/repos/repo_edit.html')
-
- @HasRepoPermissionLevelDecorator('admin')
def edit_remote(self, repo_name):
c.repo_info = self._load_repo()
c.active = 'remote'
diff --git a/kallithea/controllers/admin/settings.py b/kallithea/controllers/admin/settings.py
--- a/kallithea/controllers/admin/settings.py
+++ b/kallithea/controllers/admin/settings.py
@@ -42,7 +42,7 @@ from kallithea.lib.base import BaseContr
from kallithea.lib.celerylib import tasks
from kallithea.lib.exceptions import HgsubversionImportError
from kallithea.lib.utils import repo2db_mapper, set_app_settings
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.utils2 import safe_str
from kallithea.lib.vcs import VCSError
from kallithea.model.db import Repository, Setting, Ui
from kallithea.model.forms import ApplicationSettingsForm, ApplicationUiSettingsForm, ApplicationVisualisationForm
@@ -120,6 +120,7 @@ class SettingsController(BaseController)
if sett.ui_active:
try:
import hgsubversion # pragma: no cover
+ assert hgsubversion
except ImportError:
raise HgsubversionImportError
@@ -168,10 +169,10 @@ class SettingsController(BaseController)
user=request.authuser.username,
overwrite_git_hooks=overwrite_git_hooks)
added_msg = h.HTML(', ').join(
- h.link_to(safe_unicode(repo_name), h.url('summary_home', repo_name=repo_name)) for repo_name in added
+ h.link_to(safe_str(repo_name), h.url('summary_home', repo_name=repo_name)) for repo_name in added
) or '-'
removed_msg = h.HTML(', ').join(
- safe_unicode(repo_name) for repo_name in removed
+ safe_str(repo_name) for repo_name in removed
) or '-'
h.flash(h.HTML(_('Repositories successfully rescanned. Added: %s. Removed: %s.')) %
(added_msg, removed_msg), category='success')
@@ -423,7 +424,7 @@ class SettingsController(BaseController)
import kallithea
c.ini = kallithea.CONFIG
server_info = Setting.get_server_info()
- for key, val in server_info.iteritems():
+ for key, val in server_info.items():
setattr(c, key, val)
return htmlfill.render(
diff --git a/kallithea/controllers/admin/user_groups.py b/kallithea/controllers/admin/user_groups.py
--- a/kallithea/controllers/admin/user_groups.py
+++ b/kallithea/controllers/admin/user_groups.py
@@ -32,19 +32,18 @@ import formencode
from formencode import htmlfill
from sqlalchemy.orm import joinedload
from sqlalchemy.sql.expression import func
-from tg import app_globals, config, request
+from tg import app_globals, request
from tg import tmpl_context as c
from tg.i18n import ugettext as _
from webob.exc import HTTPFound, HTTPInternalServerError
-import kallithea
from kallithea.config.routing import url
from kallithea.lib import helpers as h
from kallithea.lib.auth import HasPermissionAnyDecorator, HasUserGroupPermissionLevelDecorator, LoginRequired
from kallithea.lib.base import BaseController, render
from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException
from kallithea.lib.utils import action_logger
-from kallithea.lib.utils2 import safe_int, safe_unicode
+from kallithea.lib.utils2 import safe_int, safe_str
from kallithea.model.db import User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm
from kallithea.model.forms import CustomDefaultPermissionsForm, UserGroupForm, UserGroupPermsForm
from kallithea.model.meta import Session
@@ -61,7 +60,6 @@ class UserGroupsController(BaseControlle
@LoginRequired(allow_default_user=True)
def _before(self, *args, **kwargs):
super(UserGroupsController, self)._before(*args, **kwargs)
- c.available_permissions = config['available_permissions']
def __load_data(self, user_group_id):
c.group_members_obj = sorted((x.user for x in c.user_group.members),
@@ -88,20 +86,18 @@ class UserGroupsController(BaseControlle
.all()
group_iter = UserGroupList(_list, perm_level='admin')
user_groups_data = []
- total_records = len(group_iter)
_tmpl_lookup = app_globals.mako_lookup
template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
- user_group_name = lambda user_group_id, user_group_name: (
- template.get_def("user_group_name")
- .render(user_group_id, user_group_name, _=_, h=h, c=c)
- )
- user_group_actions = lambda user_group_id, user_group_name: (
- template.get_def("user_group_actions")
- .render(user_group_id, user_group_name, _=_, h=h, c=c)
- )
+ def user_group_name(user_group_id, user_group_name):
+ return template.get_def("user_group_name") \
+ .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c)
+
+ def user_group_actions(user_group_id, user_group_name):
+ return template.get_def("user_group_actions") \
+ .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c)
+
for user_gr in group_iter:
-
user_groups_data.append({
"raw_name": user_gr.users_group_name,
"group_name": user_group_name(user_gr.users_group_id,
@@ -163,7 +159,7 @@ class UserGroupsController(BaseControlle
c.active = 'settings'
self.__load_data(id)
- available_members = [safe_unicode(x[0]) for x in c.available_members]
+ available_members = [safe_str(x[0]) for x in c.available_members]
users_group_form = UserGroupForm(edit=True,
old_data=c.user_group.get_dict(),
diff --git a/kallithea/controllers/admin/users.py b/kallithea/controllers/admin/users.py
--- a/kallithea/controllers/admin/users.py
+++ b/kallithea/controllers/admin/users.py
@@ -31,7 +31,7 @@ import traceback
import formencode
from formencode import htmlfill
from sqlalchemy.sql.expression import func
-from tg import app_globals, config, request
+from tg import app_globals, request
from tg import tmpl_context as c
from tg.i18n import ugettext as _
from webob.exc import HTTPFound, HTTPNotFound
@@ -63,7 +63,6 @@ class UsersController(BaseController):
@HasPermissionAnyDecorator('hg.admin')
def _before(self, *args, **kwargs):
super(UsersController, self)._before(*args, **kwargs)
- c.available_permissions = config['available_permissions']
def index(self, format='html'):
c.users_list = User.query().order_by(User.username) \
@@ -72,19 +71,18 @@ class UsersController(BaseController):
.all()
users_data = []
- total_records = len(c.users_list)
_tmpl_lookup = app_globals.mako_lookup
template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
grav_tmpl = '
%s
'
- username = lambda user_id, username: (
- template.get_def("user_name")
- .render(user_id, username, _=_, h=h, c=c))
+ def username(user_id, username):
+ return template.get_def("user_name") \
+ .render_unicode(user_id, username, _=_, h=h, c=c)
- user_actions = lambda user_id, username: (
- template.get_def("user_actions")
- .render(user_id, username, _=_, h=h, c=c))
+ def user_actions(user_id, username):
+ return template.get_def("user_actions") \
+ .render_unicode(user_id, username, _=_, h=h, c=c)
for user in c.users_list:
users_data.append({
@@ -390,7 +388,7 @@ class UsersController(BaseController):
.filter(UserIpMap.user == c.user).all()
c.default_user_ip_map = UserIpMap.query() \
- .filter(UserIpMap.user == User.get_default_user()).all()
+ .filter(UserIpMap.user_id == kallithea.DEFAULT_USER_ID).all()
defaults = c.user.get_dict()
return htmlfill.render(
@@ -454,8 +452,8 @@ class UsersController(BaseController):
Session().commit()
SshKeyModel().write_authorized_keys()
h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success')
- except SshKeyModelException as errors:
- h.flash(errors.message, category='error')
+ except SshKeyModelException as e:
+ h.flash(e.args[0], category='error')
raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id))
@IfSshEnabled
@@ -468,6 +466,6 @@ class UsersController(BaseController):
Session().commit()
SshKeyModel().write_authorized_keys()
h.flash(_("SSH key successfully deleted"), category='success')
- except SshKeyModelException as errors:
- h.flash(errors.message, category='error')
+ except SshKeyModelException as e:
+ h.flash(e.args[0], category='error')
raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id))
diff --git a/kallithea/controllers/api/__init__.py b/kallithea/controllers/api/__init__.py
--- a/kallithea/controllers/api/__init__.py
+++ b/kallithea/controllers/api/__init__.py
@@ -35,11 +35,11 @@ import types
from tg import Response, TGController, request, response
from webob.exc import HTTPError, HTTPException
+from kallithea.lib import ext_json
from kallithea.lib.auth import AuthUser
-from kallithea.lib.base import _get_access_path
from kallithea.lib.base import _get_ip_addr as _get_ip
-from kallithea.lib.compat import json
-from kallithea.lib.utils2 import safe_str, safe_unicode
+from kallithea.lib.base import get_path_info
+from kallithea.lib.utils2 import ascii_bytes
from kallithea.model.db import User
@@ -53,7 +53,7 @@ class JSONRPCError(BaseException):
super(JSONRPCError, self).__init__()
def __str__(self):
- return safe_str(self.message)
+ return self.message
class JSONRPCErrorResponse(Response, HTTPException):
@@ -121,7 +121,7 @@ class JSONRPCController(TGController):
raw_body = environ['wsgi.input'].read(length)
try:
- json_body = json.loads(raw_body)
+ json_body = ext_json.loads(raw_body)
except ValueError as e:
# catch JSON errors Here
raise JSONRPCErrorResponse(retid=self._req_id,
@@ -166,13 +166,13 @@ class JSONRPCController(TGController):
# now that we have a method, add self._req_params to
# self.kargs and dispatch control to WGIController
- argspec = inspect.getargspec(self._func)
- arglist = argspec[0][1:]
- defaults = map(type, argspec[3] or [])
- default_empty = types.NotImplementedType
+ argspec = inspect.getfullargspec(self._func)
+ arglist = argspec.args[1:]
+ argtypes = [type(arg) for arg in argspec.defaults or []]
+ default_empty = type(NotImplemented)
# kw arguments required by this method
- func_kwargs = dict(itertools.izip_longest(reversed(arglist), reversed(defaults),
+ func_kwargs = dict(itertools.zip_longest(reversed(arglist), reversed(argtypes),
fillvalue=default_empty))
# This attribute will need to be first param of a method that uses
@@ -180,7 +180,7 @@ class JSONRPCController(TGController):
USER_SESSION_ATTR = 'apiuser'
# get our arglist and check if we provided them as args
- for arg, default in func_kwargs.iteritems():
+ for arg, default in func_kwargs.items():
if arg == USER_SESSION_ATTR:
# USER_SESSION_ATTR is something translated from API key and
# this is checked before so we don't need validate it
@@ -209,7 +209,7 @@ class JSONRPCController(TGController):
log.info('IP: %s Request to %s time: %.3fs' % (
self._get_ip_addr(environ),
- safe_unicode(_get_access_path(environ)), time.time() - start)
+ get_path_info(environ), time.time() - start)
)
state.set_action(self._rpc_call, [])
@@ -226,28 +226,28 @@ class JSONRPCController(TGController):
if isinstance(raw_response, HTTPError):
self._error = str(raw_response)
except JSONRPCError as e:
- self._error = safe_str(e)
+ self._error = str(e)
except Exception as e:
log.error('Encountered unhandled exception: %s',
traceback.format_exc(),)
json_exc = JSONRPCError('Internal server error')
- self._error = safe_str(json_exc)
+ self._error = str(json_exc)
if self._error is not None:
raw_response = None
response = dict(id=self._req_id, result=raw_response, error=self._error)
try:
- return json.dumps(response)
+ return ascii_bytes(ext_json.dumps(response))
except TypeError as e:
- log.error('API FAILED. Error encoding response: %s', e)
- return json.dumps(
+ log.error('API FAILED. Error encoding response for %s %s: %s\n%s', action, rpc_args, e, traceback.format_exc())
+ return ascii_bytes(ext_json.dumps(
dict(
id=self._req_id,
result=None,
- error="Error encoding response"
+ error="Error encoding response",
)
- )
+ ))
def _find_method(self):
"""
diff --git a/kallithea/controllers/api/api.py b/kallithea/controllers/api/api.py
--- a/kallithea/controllers/api/api.py
+++ b/kallithea/controllers/api/api.py
@@ -32,8 +32,8 @@ from datetime import datetime
from tg import request
from kallithea.controllers.api import JSONRPCController, JSONRPCError
-from kallithea.lib.auth import (
- AuthUser, HasPermissionAny, HasPermissionAnyDecorator, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel)
+from kallithea.lib.auth import (AuthUser, HasPermissionAny, HasPermissionAnyDecorator, HasRepoGroupPermissionLevel, HasRepoPermissionLevel,
+ HasUserGroupPermissionLevel)
from kallithea.lib.exceptions import DefaultUserException, UserGroupsAssignedException
from kallithea.lib.utils import action_logger, repo2db_mapper
from kallithea.lib.utils2 import OAttr, Optional
@@ -433,7 +433,7 @@ class ApiController(JSONRPCController):
@HasPermissionAnyDecorator('hg.admin')
def create_user(self, username, email, password=Optional(''),
- firstname=Optional(u''), lastname=Optional(u''),
+ firstname=Optional(''), lastname=Optional(''),
active=Optional(True), admin=Optional(False),
extern_type=Optional(User.DEFAULT_AUTH_TYPE),
extern_name=Optional('')):
@@ -686,7 +686,7 @@ class ApiController(JSONRPCController):
]
@HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
- def create_user_group(self, group_name, description=Optional(u''),
+ def create_user_group(self, group_name, description=Optional(''),
owner=Optional(OAttr('apiuser')), active=Optional(True)):
"""
Creates new user group. This command can be executed only using api_key
@@ -1160,7 +1160,7 @@ class ApiController(JSONRPCController):
return _map[ret_type]
except KeyError:
raise JSONRPCError('ret_type must be one of %s'
- % (','.join(_map.keys())))
+ % (','.join(sorted(_map))))
except Exception:
log.error(traceback.format_exc())
raise JSONRPCError(
@@ -2339,7 +2339,7 @@ class ApiController(JSONRPCController):
branch_name,
reverse, max_revisions)]
except EmptyRepositoryError as e:
- raise JSONRPCError(e.message)
+ raise JSONRPCError('Repository is empty')
# permission check inside
def get_changeset(self, repoid, raw_id, with_reviews=Optional(False)):
@@ -2373,7 +2373,7 @@ class ApiController(JSONRPCController):
return pull_request.get_api_data()
# permission check inside
- def comment_pullrequest(self, pull_request_id, comment_msg=u'', status=None, close_pr=False):
+ def comment_pullrequest(self, pull_request_id, comment_msg='', status=None, close_pr=False):
"""
Add comment, close and change status of pull request.
"""
@@ -2400,7 +2400,7 @@ class ApiController(JSONRPCController):
pull_request=pull_request.pull_request_id,
f_path=None,
line_no=None,
- status_change=(ChangesetStatus.get_status_lbl(status)),
+ status_change=ChangesetStatus.get_status_lbl(status),
closing_pr=close_pr
)
action_logger(apiuser,
diff --git a/kallithea/controllers/changelog.py b/kallithea/controllers/changelog.py
--- a/kallithea/controllers/changelog.py
+++ b/kallithea/controllers/changelog.py
@@ -38,8 +38,8 @@ from kallithea.config.routing import url
from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
from kallithea.lib.base import BaseRepoController, render
from kallithea.lib.graphmod import graph_data
-from kallithea.lib.page import RepoPage
-from kallithea.lib.utils2 import safe_int, safe_str
+from kallithea.lib.page import Page
+from kallithea.lib.utils2 import safe_int
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, NodeDoesNotExistError, RepositoryError
@@ -67,7 +67,7 @@ class ChangelogController(BaseRepoContro
h.flash(_('There are no changesets yet'), category='error')
except RepositoryError as e:
log.error(traceback.format_exc())
- h.flash(safe_str(e), category='error')
+ h.flash(e, category='error')
raise HTTPBadRequest()
@LoginRequired(allow_default_user=True)
@@ -111,35 +111,34 @@ class ChangelogController(BaseRepoContro
cs = self.__get_cs(revision, repo_name)
collection = cs.get_file_history(f_path)
except RepositoryError as e:
- h.flash(safe_str(e), category='warning')
+ h.flash(e, category='warning')
raise HTTPFound(location=h.url('changelog_home', repo_name=repo_name))
- collection = list(reversed(collection))
else:
collection = c.db_repo_scm_instance.get_changesets(start=0, end=revision,
- branch_name=branch_name)
+ branch_name=branch_name, reverse=True)
c.total_cs = len(collection)
- c.cs_pagination = RepoPage(collection, page=p, item_count=c.total_cs,
- items_per_page=c.size, branch=branch_name,)
+ c.cs_pagination = Page(collection, page=p, item_count=c.total_cs, items_per_page=c.size,
+ branch=branch_name)
page_revisions = [x.raw_id for x in c.cs_pagination]
c.cs_comments = c.db_repo.get_comments(page_revisions)
c.cs_statuses = c.db_repo.statuses(page_revisions)
except EmptyRepositoryError as e:
- h.flash(safe_str(e), category='warning')
+ h.flash(e, category='warning')
raise HTTPFound(location=url('summary_home', repo_name=c.repo_name))
except (RepositoryError, ChangesetDoesNotExistError, Exception) as e:
log.error(traceback.format_exc())
- h.flash(safe_str(e), category='error')
+ h.flash(e, category='error')
raise HTTPFound(location=url('changelog_home', repo_name=c.repo_name))
c.branch_name = branch_name
c.branch_filters = [('', _('None'))] + \
- [(k, k) for k in c.db_repo_scm_instance.branches.keys()]
+ [(k, k) for k in c.db_repo_scm_instance.branches]
if c.db_repo_scm_instance.closed_branches:
prefix = _('(closed)') + ' '
c.branch_filters += [('-', '-')] + \
- [(k, prefix + k) for k in c.db_repo_scm_instance.closed_branches.keys()]
+ [(k, prefix + k) for k in c.db_repo_scm_instance.closed_branches]
revs = []
if not f_path:
revs = [x.revision for x in c.cs_pagination]
diff --git a/kallithea/controllers/changeset.py b/kallithea/controllers/changeset.py
--- a/kallithea/controllers/changeset.py
+++ b/kallithea/controllers/changeset.py
@@ -25,6 +25,7 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
+import binascii
import logging
import traceback
from collections import OrderedDict, defaultdict
@@ -32,7 +33,7 @@ from collections import OrderedDict, def
from tg import request, response
from tg import tmpl_context as c
from tg.i18n import ugettext as _
-from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPFound, HTTPNotFound
+from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPNotFound
import kallithea.lib.helpers as h
from kallithea.lib import diffs
@@ -40,7 +41,7 @@ from kallithea.lib.auth import HasRepoPe
from kallithea.lib.base import BaseRepoController, jsonify, render
from kallithea.lib.graphmod import graph_data
from kallithea.lib.utils import action_logger
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.utils2 import ascii_str, safe_str
from kallithea.lib.vcs.backends.base import EmptyChangeset
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError
from kallithea.model.changeset_status import ChangesetStatusModel
@@ -65,7 +66,7 @@ def anchor_url(revision, path, GET):
def get_ignore_ws(fid, GET):
ig_ws_global = GET.get('ignorews')
- ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
+ ig_ws = [k for k in GET.getall(fid) if k.startswith('WS')]
if ig_ws:
try:
return int(ig_ws[0].split(':')[-1])
@@ -108,9 +109,9 @@ def _ignorews_url(GET, fileid=None):
def get_line_ctx(fid, GET):
ln_ctx_global = GET.get('context')
if fid:
- ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
+ ln_ctx = [k for k in GET.getall(fid) if k.startswith('C')]
else:
- _ln_ctx = filter(lambda k: k.startswith('C'), GET)
+ _ln_ctx = [k for k in GET if k.startswith('C')]
ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
if ln_ctx:
ln_ctx = [ln_ctx]
@@ -214,7 +215,6 @@ def create_cs_pr_comment(repo_name, revi
return {
'location': h.url('my_pullrequests'), # or repo pr list?
}
- raise HTTPFound(location=h.url('my_pullrequests')) # or repo pr list?
raise HTTPForbidden()
text = request.POST.get('text', '').strip()
@@ -256,7 +256,7 @@ def create_cs_pr_comment(repo_name, revi
Session().commit()
data = {
- 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
+ 'target_id': h.safeid(request.POST.get('f_path')),
}
if comment is not None:
c.comment = comment
@@ -395,6 +395,8 @@ class ChangesetController(BaseRepoContro
c.changeset = c.cs_ranges[0]
c.parent_tmpl = ''.join(['# Parent %s\n' % x.raw_id
for x in c.changeset.parents])
+ c.changeset_graft_source_hash = ascii_str(c.changeset.extra.get(b'source', b''))
+ c.changeset_transplant_source_hash = ascii_str(binascii.hexlify(c.changeset.extra.get(b'transplant_source', b'')))
if method == 'download':
response.content_type = 'text/plain'
response.content_disposition = 'attachment; filename=%s.diff' \
@@ -402,7 +404,7 @@ class ChangesetController(BaseRepoContro
return raw_diff
elif method == 'patch':
response.content_type = 'text/plain'
- c.diff = safe_unicode(raw_diff)
+ c.diff = safe_str(raw_diff)
return render('changeset/patch_changeset.html')
elif method == 'raw':
response.content_type = 'text/plain'
diff --git a/kallithea/controllers/compare.py b/kallithea/controllers/compare.py
--- a/kallithea/controllers/compare.py
+++ b/kallithea/controllers/compare.py
@@ -30,6 +30,7 @@ Original author and date, and relevant c
import logging
import re
+import mercurial.unionrepo
from tg import request
from tg import tmpl_context as c
from tg.i18n import ugettext as _
@@ -42,8 +43,7 @@ from kallithea.lib import helpers as h
from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
from kallithea.lib.base import BaseRepoController, render
from kallithea.lib.graphmod import graph_data
-from kallithea.lib.utils2 import safe_int, safe_str
-from kallithea.lib.vcs.utils.hgcompat import unionrepo
+from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes, safe_int
from kallithea.model.db import Repository
@@ -97,14 +97,9 @@ class CompareController(BaseRepoControll
elif alias == 'hg':
# case two independent repos
if org_repo != other_repo:
- try:
- hgrepo = unionrepo.makeunionrepository(other_repo.baseui,
- other_repo.path,
- org_repo.path)
- except AttributeError: # makeunionrepository was introduced in Mercurial 4.8 23f2299e9e53
- hgrepo = unionrepo.unionrepository(other_repo.baseui,
- other_repo.path,
- org_repo.path)
+ hgrepo = mercurial.unionrepo.makeunionrepository(other_repo.baseui,
+ safe_bytes(other_repo.path),
+ safe_bytes(org_repo.path))
# all ancestors of other_rev will be in other_repo and
# rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
@@ -112,21 +107,27 @@ class CompareController(BaseRepoControll
else:
hgrepo = other_repo._repo
- ancestors = [hgrepo[ancestor].hex() for ancestor in
- hgrepo.revs("id(%s) & ::id(%s)", other_rev, org_rev)]
+ ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in
+ hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))]
if ancestors:
log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev)
else:
log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev)
- ancestors = [hgrepo[ancestor].hex() for ancestor in
- hgrepo.revs("heads(::id(%s) & ::id(%s))", org_rev, other_rev)] # FIXME: expensive!
+ ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in
+ hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive!
- other_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
- other_rev, org_rev, org_rev)
- other_changesets = [other_repo.get_changeset(rev) for rev in other_revs]
- org_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
- org_rev, other_rev, other_rev)
- org_changesets = [org_repo.get_changeset(hgrepo[rev].hex()) for rev in org_revs]
+ other_changesets = [
+ other_repo.get_changeset(rev)
+ for rev in hgrepo.revs(
+ b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
+ ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev))
+ ]
+ org_changesets = [
+ org_repo.get_changeset(ascii_str(hgrepo[rev].hex()))
+ for rev in hgrepo.revs(
+ b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
+ ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev))
+ ]
elif alias == 'git':
if org_repo != other_repo:
@@ -134,15 +135,15 @@ class CompareController(BaseRepoControll
from dulwich.client import SubprocessGitClient
gitrepo = Repo(org_repo.path)
- SubprocessGitClient(thin_packs=False).fetch(safe_str(other_repo.path), gitrepo)
+ SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo)
gitrepo_remote = Repo(other_repo.path)
- SubprocessGitClient(thin_packs=False).fetch(safe_str(org_repo.path), gitrepo_remote)
+ SubprocessGitClient(thin_packs=False).fetch(org_repo.path, gitrepo_remote)
revs = [
- x.commit.id
- for x in gitrepo_remote.get_walker(include=[other_rev],
- exclude=[org_rev])
+ ascii_str(x.commit.id)
+ for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)],
+ exclude=[ascii_bytes(org_rev)])
]
other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
if other_changesets:
@@ -155,13 +156,13 @@ class CompareController(BaseRepoControll
gitrepo_remote.close()
else:
- so, se = org_repo.run_git_command(
+ so = org_repo.run_git_command(
['log', '--reverse', '--pretty=format:%H',
'-s', '%s..%s' % (org_rev, other_rev)]
)
other_changesets = [org_repo.get_changeset(cs)
for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
- so, se = org_repo.run_git_command(
+ so = org_repo.run_git_command(
['merge-base', org_rev, other_rev]
)
ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]]
@@ -277,7 +278,7 @@ class CompareController(BaseRepoControll
ignore_whitespace=ignore_whitespace,
context=line_context)
- diff_processor = diffs.DiffProcessor(raw_diff or '', diff_limit=diff_limit)
+ diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit)
c.limited_diff = diff_processor.limited_diff
c.file_diff_data = []
c.lines_added = 0
diff --git a/kallithea/controllers/error.py b/kallithea/controllers/error.py
--- a/kallithea/controllers/error.py
+++ b/kallithea/controllers/error.py
@@ -25,7 +25,7 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-import cgi
+import html
import logging
from tg import config, expose, request
@@ -64,8 +64,7 @@ class ErrorController(BaseController):
'protocol': e.get('wsgi.url_scheme'),
'host': e.get('HTTP_HOST'), }
if resp:
- c.error_message = cgi.escape(request.GET.get('code',
- str(resp.status)))
+ c.error_message = html.escape(request.GET.get('code', str(resp.status)))
c.error_explanation = self.get_error_explanation(resp.status_int)
else:
c.error_message = _('No response')
diff --git a/kallithea/controllers/feed.py b/kallithea/controllers/feed.py
--- a/kallithea/controllers/feed.py
+++ b/kallithea/controllers/feed.py
@@ -32,23 +32,19 @@ from beaker.cache import cache_region
from tg import response
from tg import tmpl_context as c
from tg.i18n import ugettext as _
-from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
from kallithea import CONFIG
+from kallithea.lib import feeds
from kallithea.lib import helpers as h
from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
from kallithea.lib.base import BaseRepoController
from kallithea.lib.diffs import DiffProcessor
-from kallithea.lib.utils2 import safe_int, safe_unicode, str2bool
+from kallithea.lib.utils2 import safe_int, safe_str, str2bool
log = logging.getLogger(__name__)
-language = 'en-us'
-ttl = "5"
-
-
class FeedController(BaseRepoController):
@LoginRequired(allow_default_user=True)
@@ -98,64 +94,41 @@ class FeedController(BaseRepoController)
desc_msg.extend(changes)
if str2bool(CONFIG.get('rss_include_diff', False)):
desc_msg.append('\n\n')
- desc_msg.append(raw_diff)
+ desc_msg.append(safe_str(raw_diff))
desc_msg.append('')
- return map(safe_unicode, desc_msg)
+ return desc_msg
- def atom(self, repo_name):
- """Produce an atom-1.0 feed via feedgenerator module"""
+ def _feed(self, repo_name, feeder):
+ """Produce a simple feed"""
- @cache_region('long_term', '_get_feed_from_cache')
+ @cache_region('long_term_file', '_get_feed_from_cache')
def _get_feed_from_cache(*_cache_keys): # parameters are not really used - only as caching key
- feed = Atom1Feed(
+ header = dict(
title=_('%s %s feed') % (c.site_name, repo_name),
link=h.canonical_url('summary_home', repo_name=repo_name),
description=_('Changes on %s repository') % repo_name,
- language=language,
- ttl=ttl
)
rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
+ entries=[]
for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
- feed.add_item(title=self._get_title(cs),
- link=h.canonical_url('changeset_home', repo_name=repo_name,
- revision=cs.raw_id),
- author_name=cs.author,
- description=''.join(self.__get_desc(cs)),
- pubdate=cs.date,
- )
+ entries.append(dict(
+ title=self._get_title(cs),
+ link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id),
+ author_email=cs.author_email,
+ author_name=cs.author_name,
+ description=''.join(self.__get_desc(cs)),
+ pubdate=cs.date,
+ ))
+ return feeder.render(header, entries)
- response.content_type = feed.mime_type
- return feed.writeString('utf-8')
+ response.content_type = feeder.content_type
+ return _get_feed_from_cache(repo_name, feeder.__name__)
- kind = 'ATOM'
- return _get_feed_from_cache(repo_name, kind, c.db_repo.changeset_cache.get('raw_id'))
+ def atom(self, repo_name):
+ """Produce a simple atom-1.0 feed"""
+ return self._feed(repo_name, feeds.AtomFeed)
def rss(self, repo_name):
- """Produce an rss2 feed via feedgenerator module"""
-
- @cache_region('long_term', '_get_feed_from_cache')
- def _get_feed_from_cache(*_cache_keys): # parameters are not really used - only as caching key
- feed = Rss201rev2Feed(
- title=_('%s %s feed') % (c.site_name, repo_name),
- link=h.canonical_url('summary_home', repo_name=repo_name),
- description=_('Changes on %s repository') % repo_name,
- language=language,
- ttl=ttl
- )
-
- rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
- for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
- feed.add_item(title=self._get_title(cs),
- link=h.canonical_url('changeset_home', repo_name=repo_name,
- revision=cs.raw_id),
- author_name=cs.author,
- description=''.join(self.__get_desc(cs)),
- pubdate=cs.date,
- )
-
- response.content_type = feed.mime_type
- return feed.writeString('utf-8')
-
- kind = 'RSS'
- return _get_feed_from_cache(repo_name, kind, c.db_repo.changeset_cache.get('raw_id'))
+ """Produce a simple rss2 feed"""
+ return self._feed(repo_name, feeds.RssFeed)
diff --git a/kallithea/controllers/files.py b/kallithea/controllers/files.py
--- a/kallithea/controllers/files.py
+++ b/kallithea/controllers/files.py
@@ -49,10 +49,10 @@ from kallithea.lib.utils import action_l
from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_int, safe_str, str2bool
from kallithea.lib.vcs.backends.base import EmptyChangeset
from kallithea.lib.vcs.conf import settings
-from kallithea.lib.vcs.exceptions import (
- ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError, NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
+from kallithea.lib.vcs.exceptions import (ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError,
+ NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
from kallithea.lib.vcs.nodes import FileNode
-from kallithea.model.db import Repository
+from kallithea.model import db
from kallithea.model.repo import RepoModel
from kallithea.model.scm import ScmModel
@@ -90,7 +90,7 @@ class FilesController(BaseRepoController
h.flash(msg, category='error')
raise HTTPNotFound()
except RepositoryError as e:
- h.flash(safe_str(e), category='error')
+ h.flash(e, category='error')
raise HTTPNotFound()
def __get_filenode(self, cs, path):
@@ -110,7 +110,7 @@ class FilesController(BaseRepoController
h.flash(msg, category='error')
raise HTTPNotFound()
except RepositoryError as e:
- h.flash(safe_str(e), category='error')
+ h.flash(e, category='error')
raise HTTPNotFound()
return file_node
@@ -163,7 +163,7 @@ class FilesController(BaseRepoController
c.load_full_history = False
# determine if we're on branch head
_branches = c.db_repo_scm_instance.branches
- c.on_branch_head = revision in _branches.keys() + _branches.values()
+ c.on_branch_head = revision in _branches or revision in _branches.values()
_hist = []
c.file_history = []
if c.load_full_history:
@@ -175,7 +175,7 @@ class FilesController(BaseRepoController
else:
c.authors = c.file_history = []
except RepositoryError as e:
- h.flash(safe_str(e), category='error')
+ h.flash(e, category='error')
raise HTTPNotFound()
if request.environ.get('HTTP_X_PARTIAL_XHR'):
@@ -232,8 +232,8 @@ class FilesController(BaseRepoController
cs = self.__get_cs(revision)
file_node = self.__get_filenode(cs, f_path)
- response.content_disposition = 'attachment; filename=%s' % \
- safe_str(f_path.split(Repository.url_sep())[-1])
+ response.content_disposition = \
+ 'attachment; filename=%s' % f_path.split(db.URL_SEP)[-1]
response.content_type = file_node.mimetype
return file_node.content
@@ -277,8 +277,7 @@ class FilesController(BaseRepoController
mimetype, dispo = 'text/plain', 'inline'
if dispo == 'attachment':
- dispo = 'attachment; filename=%s' % \
- safe_str(f_path.split(os.sep)[-1])
+ dispo = 'attachment; filename=%s' % f_path.split(os.sep)[-1]
response.content_disposition = dispo
response.content_type = mimetype
@@ -292,7 +291,7 @@ class FilesController(BaseRepoController
# create multiple heads via file editing
_branches = repo.scm_instance.branches
# check if revision is a branch name or branch hash
- if revision not in _branches.keys() + _branches.values():
+ if revision not in _branches and revision not in _branches.values():
h.flash(_('You can only delete files with revision '
'being a valid branch'), category='warning')
raise HTTPFound(location=h.url('files_home',
@@ -346,7 +345,7 @@ class FilesController(BaseRepoController
# create multiple heads via file editing
_branches = repo.scm_instance.branches
# check if revision is a branch name or branch hash
- if revision not in _branches.keys() + _branches.values():
+ if revision not in _branches and revision not in _branches.values():
h.flash(_('You can only edit files with revision '
'being a valid branch'), category='warning')
raise HTTPFound(location=h.url('files_home',
@@ -365,8 +364,7 @@ class FilesController(BaseRepoController
c.f_path = f_path
if r_post:
-
- old_content = c.file.content
+ old_content = safe_str(c.file.content)
sl = old_content.splitlines(1)
first_line = sl[0] if sl else ''
# modes: 0 - Unix, 1 - Mac, 2 - DOS
@@ -509,8 +507,7 @@ class FilesController(BaseRepoController
from kallithea import CONFIG
rev_name = cs.raw_id[:12]
- archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')),
- safe_str(rev_name), ext)
+ archive_name = '%s-%s%s' % (repo_name.replace('/', '_'), rev_name, ext)
archive_path = None
cached_archive_path = None
diff --git a/kallithea/controllers/forks.py b/kallithea/controllers/forks.py
--- a/kallithea/controllers/forks.py
+++ b/kallithea/controllers/forks.py
@@ -35,13 +35,14 @@ from tg import tmpl_context as c
from tg.i18n import ugettext as _
from webob.exc import HTTPFound
+import kallithea
import kallithea.lib.helpers as h
from kallithea.config.routing import url
from kallithea.lib.auth import HasPermissionAny, HasPermissionAnyDecorator, HasRepoPermissionLevel, HasRepoPermissionLevelDecorator, LoginRequired
from kallithea.lib.base import BaseRepoController, render
from kallithea.lib.page import Page
from kallithea.lib.utils2 import safe_int
-from kallithea.model.db import Repository, Ui, User, UserFollowing
+from kallithea.model.db import Repository, Ui, UserFollowing
from kallithea.model.forms import RepoForkForm
from kallithea.model.repo import RepoModel
from kallithea.model.scm import AvailableRepoGroupChoices, ScmModel
@@ -76,7 +77,7 @@ class ForksController(BaseRepoController
h.not_mapped_error(c.repo_name)
raise HTTPFound(location=url('repos'))
- c.default_user_id = User.get_default_user().user_id
+ c.default_user_id = kallithea.DEFAULT_USER_ID
c.in_public_journal = UserFollowing.query() \
.filter(UserFollowing.user_id == c.default_user_id) \
.filter(UserFollowing.follows_repository == c.repo_info).scalar()
diff --git a/kallithea/controllers/home.py b/kallithea/controllers/home.py
--- a/kallithea/controllers/home.py
+++ b/kallithea/controllers/home.py
@@ -37,7 +37,7 @@ from webob.exc import HTTPBadRequest
from kallithea.lib import helpers as h
from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
from kallithea.lib.base import BaseController, jsonify, render
-from kallithea.lib.utils import conditional_cache
+from kallithea.lib.utils2 import safe_str
from kallithea.model.db import RepoGroup, Repository, User, UserGroup
from kallithea.model.repo import RepoModel
from kallithea.model.scm import UserGroupList
@@ -67,9 +67,7 @@ class HomeController(BaseController):
@LoginRequired(allow_default_user=True)
@jsonify
def repo_switcher_data(self):
- # wrapper for conditional cache
- def _c():
- log.debug('generating switcher repo/groups list')
+ if request.is_xhr:
all_repos = Repository.query(sorted=True).all()
repo_iter = self.scm_model.get_repos(all_repos)
all_groups = RepoGroup.query(sorted=True).all()
@@ -96,17 +94,16 @@ class HomeController(BaseController):
],
}]
+ for res_dict in res:
+ for child in (res_dict['children']):
+ child['obj'].pop('_changeset_cache', None) # bytes cannot be encoded in json ... but this value isn't relevant on client side at all ...
+
data = {
'more': False,
'results': res,
}
return data
- if request.is_xhr:
- condition = False
- compute = conditional_cache('short_term', 'cache_desc',
- condition=condition, func=_c)
- return compute()
else:
raise HTTPBadRequest()
@@ -120,25 +117,25 @@ class HomeController(BaseController):
if _branches:
res.append({
'text': _('Branch'),
- 'children': [{'id': rev, 'text': name, 'type': 'branch'} for name, rev in _branches]
+ 'children': [{'id': safe_str(rev), 'text': safe_str(name), 'type': 'branch'} for name, rev in _branches]
})
_closed_branches = repo.closed_branches.items()
if _closed_branches:
res.append({
'text': _('Closed Branches'),
- 'children': [{'id': rev, 'text': name, 'type': 'closed-branch'} for name, rev in _closed_branches]
+ 'children': [{'id': safe_str(rev), 'text': safe_str(name), 'type': 'closed-branch'} for name, rev in _closed_branches]
})
_tags = repo.tags.items()
if _tags:
res.append({
'text': _('Tag'),
- 'children': [{'id': rev, 'text': name, 'type': 'tag'} for name, rev in _tags]
+ 'children': [{'id': safe_str(rev), 'text': safe_str(name), 'type': 'tag'} for name, rev in _tags]
})
_bookmarks = repo.bookmarks.items()
if _bookmarks:
res.append({
'text': _('Bookmark'),
- 'children': [{'id': rev, 'text': name, 'type': 'book'} for name, rev in _bookmarks]
+ 'children': [{'id': safe_str(rev), 'text': safe_str(name), 'type': 'book'} for name, rev in _bookmarks]
})
data = {
'more': False,
diff --git a/kallithea/controllers/journal.py b/kallithea/controllers/journal.py
--- a/kallithea/controllers/journal.py
+++ b/kallithea/controllers/journal.py
@@ -23,7 +23,6 @@ Original author and date, and relevant c
:author: marcink
:copyright: (c) 2013 RhodeCode GmbH, and others.
:license: GPLv3, see LICENSE.md for more details.
-
"""
import logging
@@ -35,12 +34,11 @@ from sqlalchemy.orm import joinedload
from tg import request, response
from tg import tmpl_context as c
from tg.i18n import ugettext as _
-from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
from webob.exc import HTTPBadRequest
import kallithea.lib.helpers as h
-from kallithea.config.routing import url
from kallithea.controllers.admin.admin import _journal_filter
+from kallithea.lib import feeds
from kallithea.lib.auth import LoginRequired
from kallithea.lib.base import BaseController, render
from kallithea.lib.page import Page
@@ -105,22 +103,17 @@ class JournalController(BaseController):
return journal
- def _atom_feed(self, repos, public=True):
+ def _feed(self, repos, feeder, link, desc):
+ response.content_type = feeder.content_type
journal = self._get_journal_data(repos)
- if public:
- _link = h.canonical_url('public_journal_atom')
- _desc = '%s %s %s' % (c.site_name, _('Public Journal'),
- 'atom feed')
- else:
- _link = h.canonical_url('journal_atom')
- _desc = '%s %s %s' % (c.site_name, _('Journal'), 'atom feed')
- feed = Atom1Feed(title=_desc,
- link=_link,
- description=_desc,
- language=language,
- ttl=ttl)
+ header = dict(
+ title=desc,
+ link=link,
+ description=desc,
+ )
+ entries=[]
for entry in journal[:feed_nr]:
user = entry.user
if user is None:
@@ -131,63 +124,43 @@ class JournalController(BaseController):
action, action_extra, ico = h.action_parser(entry, feed=True)
title = "%s - %s %s" % (user.short_contact, action(),
entry.repository.repo_name)
- desc = action_extra()
_url = None
if entry.repository is not None:
_url = h.canonical_url('changelog_home',
repo_name=entry.repository.repo_name)
- feed.add_item(title=title,
- pubdate=entry.action_date,
- link=_url or h.canonical_url(''),
- author_email=user.email,
- author_name=user.full_contact,
- description=desc)
+ entries.append(dict(
+ title=title,
+ pubdate=entry.action_date,
+ link=_url or h.canonical_url(''),
+ author_email=user.email,
+ author_name=user.full_name_or_username,
+ description=action_extra(),
+ ))
+
+ return feeder.render(header, entries)
- response.content_type = feed.mime_type
- return feed.writeString('utf-8')
+ def _atom_feed(self, repos, public=True):
+ if public:
+ link = h.canonical_url('public_journal_atom')
+ desc = '%s %s %s' % (c.site_name, _('Public Journal'),
+ 'atom feed')
+ else:
+ link = h.canonical_url('journal_atom')
+ desc = '%s %s %s' % (c.site_name, _('Journal'), 'atom feed')
+
+ return self._feed(repos, feeds.AtomFeed, link, desc)
def _rss_feed(self, repos, public=True):
- journal = self._get_journal_data(repos)
if public:
- _link = h.canonical_url('public_journal_atom')
- _desc = '%s %s %s' % (c.site_name, _('Public Journal'),
+ link = h.canonical_url('public_journal_atom')
+ desc = '%s %s %s' % (c.site_name, _('Public Journal'),
'rss feed')
else:
- _link = h.canonical_url('journal_atom')
- _desc = '%s %s %s' % (c.site_name, _('Journal'), 'rss feed')
-
- feed = Rss201rev2Feed(title=_desc,
- link=_link,
- description=_desc,
- language=language,
- ttl=ttl)
+ link = h.canonical_url('journal_atom')
+ desc = '%s %s %s' % (c.site_name, _('Journal'), 'rss feed')
- for entry in journal[:feed_nr]:
- user = entry.user
- if user is None:
- # fix deleted users
- user = AttributeDict({'short_contact': entry.username,
- 'email': '',
- 'full_contact': ''})
- action, action_extra, ico = h.action_parser(entry, feed=True)
- title = "%s - %s %s" % (user.short_contact, action(),
- entry.repository.repo_name)
- desc = action_extra()
- _url = None
- if entry.repository is not None:
- _url = h.canonical_url('changelog_home',
- repo_name=entry.repository.repo_name)
-
- feed.add_item(title=title,
- pubdate=entry.action_date,
- link=_url or h.canonical_url(''),
- author_email=user.email,
- author_name=user.full_contact,
- description=desc)
-
- response.content_type = feed.mime_type
- return feed.writeString('utf-8')
+ return self._feed(repos, feeds.RssFeed, link, desc)
@LoginRequired()
def index(self):
@@ -201,10 +174,8 @@ class JournalController(BaseController):
journal = self._get_journal_data(c.following)
- def url_generator(**kw):
- return url.current(filter=c.search_term, **kw)
-
- c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator)
+ c.journal_pager = Page(journal, page=p, items_per_page=20,
+ filter=c.search_term)
c.journal_day_aggregate = self._get_daily_aggregate(c.journal_pager)
if request.environ.get('HTTP_X_PARTIAL_XHR'):
@@ -221,9 +192,7 @@ class JournalController(BaseController):
@LoginRequired()
def journal_atom(self):
- """
- Produce an atom-1.0 feed via feedgenerator module
- """
+ """Produce a simple atom-1.0 feed"""
following = UserFollowing.query() \
.filter(UserFollowing.user_id == request.authuser.user_id) \
.options(joinedload(UserFollowing.follows_repository)) \
@@ -232,9 +201,7 @@ class JournalController(BaseController):
@LoginRequired()
def journal_rss(self):
- """
- Produce an rss feed via feedgenerator module
- """
+ """Produce a simple rss2 feed"""
following = UserFollowing.query() \
.filter(UserFollowing.user_id == request.authuser.user_id) \
.options(joinedload(UserFollowing.follows_repository)) \
@@ -290,9 +257,7 @@ class JournalController(BaseController):
@LoginRequired(allow_default_user=True)
def public_journal_atom(self):
- """
- Produce an atom-1.0 feed via feedgenerator module
- """
+ """Produce a simple atom-1.0 feed"""
c.following = UserFollowing.query() \
.filter(UserFollowing.user_id == request.authuser.user_id) \
.options(joinedload(UserFollowing.follows_repository)) \
@@ -302,9 +267,7 @@ class JournalController(BaseController):
@LoginRequired(allow_default_user=True)
def public_journal_rss(self):
- """
- Produce an rss2 feed via feedgenerator module
- """
+ """Produce a simple rss2 feed"""
c.following = UserFollowing.query() \
.filter(UserFollowing.user_id == request.authuser.user_id) \
.options(joinedload(UserFollowing.follows_repository)) \
diff --git a/kallithea/controllers/login.py b/kallithea/controllers/login.py
--- a/kallithea/controllers/login.py
+++ b/kallithea/controllers/login.py
@@ -41,7 +41,6 @@ from kallithea.config.routing import url
from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator
from kallithea.lib.base import BaseController, log_in_user, render
from kallithea.lib.exceptions import UserCreationError
-from kallithea.lib.utils2 import safe_str
from kallithea.model.db import Setting, User
from kallithea.model.forms import LoginForm, PasswordResetConfirmationForm, PasswordResetRequestForm, RegisterForm
from kallithea.model.meta import Session
@@ -68,7 +67,7 @@ class LoginController(BaseController):
return _re.match(came_from) is not None
def index(self):
- c.came_from = safe_str(request.GET.get('came_from', ''))
+ c.came_from = request.GET.get('came_from', '')
if c.came_from:
if not self._validate_came_from(c.came_from):
log.error('Invalid came_from (not server-relative): %r', c.came_from)
@@ -80,10 +79,11 @@ class LoginController(BaseController):
# import Login Form validator class
login_form = LoginForm()()
try:
+ # login_form will check username/password using ValidAuth and report failure to the user
c.form_result = login_form.to_python(dict(request.POST))
- # form checks for username/password, now we're authenticated
username = c.form_result['username']
- user = User.get_by_username_or_email(username, case_insensitive=True)
+ user = User.get_by_username_or_email(username)
+ assert user is not None # the same user get just passed in the form validation
except formencode.Invalid as errors:
defaults = errors.value
# remove password from filling in form again
@@ -102,9 +102,11 @@ class LoginController(BaseController):
# Exception itself
h.flash(e, 'error')
else:
+ # login_form already validated the password - now set the session cookie accordingly
auth_user = log_in_user(user, c.form_result['remember'], is_external_auth=False, ip_addr=request.ip_addr)
- # TODO: handle auth_user is None as failed authentication?
- raise HTTPFound(location=c.came_from)
+ if auth_user:
+ raise HTTPFound(location=c.came_from)
+ h.flash(_('Authentication failed.'), 'error')
else:
# redirect if already logged in
if not request.authuser.is_anonymous:
diff --git a/kallithea/controllers/pullrequests.py b/kallithea/controllers/pullrequests.py
--- a/kallithea/controllers/pullrequests.py
+++ b/kallithea/controllers/pullrequests.py
@@ -29,6 +29,7 @@ import logging
import traceback
import formencode
+import mercurial.unionrepo
from tg import request
from tg import tmpl_context as c
from tg.i18n import ugettext as _
@@ -42,10 +43,8 @@ from kallithea.lib.auth import HasRepoPe
from kallithea.lib.base import BaseRepoController, jsonify, render
from kallithea.lib.graphmod import graph_data
from kallithea.lib.page import Page
-from kallithea.lib.utils2 import safe_int
+from kallithea.lib.utils2 import ascii_bytes, safe_bytes, safe_int
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError
-from kallithea.lib.vcs.utils import safe_str
-from kallithea.lib.vcs.utils.hgcompat import unionrepo
from kallithea.model.changeset_status import ChangesetStatusModel
from kallithea.model.comment import ChangesetCommentsModel
from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, Repository, User
@@ -83,22 +82,15 @@ class PullrequestsController(BaseRepoCon
# list named branches that has been merged to this named branch - it should probably merge back
peers = []
- if rev:
- rev = safe_str(rev)
-
- if branch:
- branch = safe_str(branch)
-
if branch_rev:
- branch_rev = safe_str(branch_rev)
# a revset not restricting to merge() would be better
# (especially because it would get the branch point)
# ... but is currently too expensive
# including branches of children could be nice too
peerbranches = set()
for i in repo._repo.revs(
- "sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)",
- branch_rev, branch_rev
+ b"sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)",
+ ascii_bytes(branch_rev), ascii_bytes(branch_rev),
):
for abranch in repo.get_changeset(i).branches:
if abranch not in peerbranches:
@@ -111,7 +103,7 @@ class PullrequestsController(BaseRepoCon
tipbranch = None
branches = []
- for abranch, branchrev in repo.branches.iteritems():
+ for abranch, branchrev in repo.branches.items():
n = 'branch:%s:%s' % (abranch, branchrev)
desc = abranch
if branchrev == tiprev:
@@ -135,14 +127,14 @@ class PullrequestsController(BaseRepoCon
log.debug('branch %r not found in %s', branch, repo)
bookmarks = []
- for bookmark, bookmarkrev in repo.bookmarks.iteritems():
+ for bookmark, bookmarkrev in repo.bookmarks.items():
n = 'book:%s:%s' % (bookmark, bookmarkrev)
bookmarks.append((n, bookmark))
if rev == bookmarkrev:
selected = n
tags = []
- for tag, tagrev in repo.tags.iteritems():
+ for tag, tagrev in repo.tags.items():
if tag == 'tip':
continue
n = 'tag:%s:%s' % (tag, tagrev)
@@ -173,7 +165,7 @@ class PullrequestsController(BaseRepoCon
if 'master' in repo.branches:
selected = 'branch:master:%s' % repo.branches['master']
else:
- k, v = repo.branches.items()[0]
+ k, v = list(repo.branches.items())[0]
selected = 'branch:%s:%s' % (k, v)
groups = [(specials, _("Special")),
@@ -201,6 +193,11 @@ class PullrequestsController(BaseRepoCon
def show_all(self, repo_name):
c.from_ = request.GET.get('from_') or ''
c.closed = request.GET.get('closed') or ''
+ url_params = {}
+ if c.from_:
+ url_params['from_'] = 1
+ if c.closed:
+ url_params['closed'] = 1
p = safe_int(request.GET.get('page'), 1)
q = PullRequest.query(include_closed=c.closed, sorted=True)
@@ -210,7 +207,7 @@ class PullrequestsController(BaseRepoCon
q = q.filter_by(other_repo=c.db_repo)
c.pull_requests = q.all()
- c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100)
+ c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100, **url_params)
return render('/pullrequests/pullrequest_show_all.html')
@@ -335,7 +332,7 @@ class PullrequestsController(BaseRepoCon
try:
cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers)
except CreatePullRequestAction.ValidationError as e:
- h.flash(str(e), category='error', logf=log.error)
+ h.flash(e, category='error', logf=log.error)
raise HTTPNotFound
try:
@@ -358,7 +355,7 @@ class PullrequestsController(BaseRepoCon
try:
cmd = CreatePullRequestIterationAction(old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers)
except CreatePullRequestAction.ValidationError as e:
- h.flash(str(e), category='error', logf=log.error)
+ h.flash(e, category='error', logf=log.error)
raise HTTPNotFound
try:
@@ -531,14 +528,9 @@ class PullrequestsController(BaseRepoCon
# Note: org_scm_instance.path must come first so all
# valid revision numbers are 100% org_scm compatible
# - both for avail_revs and for revset results
- try:
- hgrepo = unionrepo.makeunionrepository(org_scm_instance.baseui,
- org_scm_instance.path,
- other_scm_instance.path)
- except AttributeError: # makeunionrepository was introduced in Mercurial 4.8 23f2299e9e53
- hgrepo = unionrepo.unionrepository(org_scm_instance.baseui,
- org_scm_instance.path,
- other_scm_instance.path)
+ hgrepo = mercurial.unionrepo.makeunionrepository(org_scm_instance.baseui,
+ safe_bytes(org_scm_instance.path),
+ safe_bytes(other_scm_instance.path))
else:
hgrepo = org_scm_instance._repo
show = set(hgrepo.revs('::%ld & !::parents(%s) & !::%s',
@@ -588,11 +580,11 @@ class PullrequestsController(BaseRepoCon
log.debug('running diff between %s and %s in %s',
c.a_rev, c.cs_rev, org_scm_instance.path)
try:
- raw_diff = diffs.get_diff(org_scm_instance, rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev),
+ raw_diff = diffs.get_diff(org_scm_instance, rev1=c.a_rev, rev2=c.cs_rev,
ignore_whitespace=ignore_whitespace, context=line_context)
except ChangesetDoesNotExistError:
- raw_diff = _("The diff can't be shown - the PR revisions could not be found.")
- diff_processor = diffs.DiffProcessor(raw_diff or '', diff_limit=diff_limit)
+ raw_diff = safe_bytes(_("The diff can't be shown - the PR revisions could not be found."))
+ diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit)
c.limited_diff = diff_processor.limited_diff
c.file_diff_data = []
c.lines_added = 0
diff --git a/kallithea/controllers/search.py b/kallithea/controllers/search.py
--- a/kallithea/controllers/search.py
+++ b/kallithea/controllers/search.py
@@ -27,12 +27,10 @@ Original author and date, and relevant c
import logging
import traceback
-import urllib
from tg import config, request
from tg import tmpl_context as c
from tg.i18n import ugettext as _
-from webhelpers2.html.tools import update_params
from whoosh.index import EmptyIndexError, exists_in, open_dir
from whoosh.qparser import QueryParser, QueryParserError
from whoosh.query import Phrase, Prefix
@@ -41,7 +39,7 @@ from kallithea.lib.auth import LoginRequ
from kallithea.lib.base import BaseRepoController, render
from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA, WhooshResultWrapper
from kallithea.lib.page import Page
-from kallithea.lib.utils2 import safe_int, safe_str
+from kallithea.lib.utils2 import safe_int
from kallithea.model.repo import RepoModel
@@ -96,9 +94,9 @@ class SearchController(BaseRepoControlle
if c.repo_name:
# use "repository_rawname:" instead of "repository:"
# for case-sensitive matching
- cur_query = u'repository_rawname:%s %s' % (c.repo_name, cur_query)
+ cur_query = 'repository_rawname:%s %s' % (c.repo_name, cur_query)
try:
- query = qp.parse(unicode(cur_query))
+ query = qp.parse(cur_query)
# extract words for highlight
if isinstance(query, Phrase):
highlight_items.update(query.words)
@@ -119,9 +117,6 @@ class SearchController(BaseRepoControlle
res_ln, results.runtime
)
- def url_generator(**kw):
- q = urllib.quote(safe_str(c.cur_query))
- return update_params("?q=%s&type=%s" % (q, safe_str(c.cur_type)), **kw)
repo_location = RepoModel().repos_path
c.formated_results = Page(
WhooshResultWrapper(search_type, searcher, matcher,
@@ -129,7 +124,8 @@ class SearchController(BaseRepoControlle
page=p,
item_count=res_ln,
items_per_page=10,
- url=url_generator
+ type=c.cur_type,
+ q=c.cur_query,
)
except QueryParserError:
diff --git a/kallithea/controllers/summary.py b/kallithea/controllers/summary.py
--- a/kallithea/controllers/summary.py
+++ b/kallithea/controllers/summary.py
@@ -38,14 +38,15 @@ from tg import tmpl_context as c
from tg.i18n import ugettext as _
from webob.exc import HTTPBadRequest
+import kallithea.lib.helpers as h
from kallithea.config.conf import ALL_EXTS, ALL_READMES, LANGUAGES_EXTENSIONS_MAP
+from kallithea.lib import ext_json
from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
from kallithea.lib.base import BaseRepoController, jsonify, render
from kallithea.lib.celerylib.tasks import get_commits_stats
-from kallithea.lib.compat import json
from kallithea.lib.markup_renderer import MarkupRenderer
-from kallithea.lib.page import RepoPage
-from kallithea.lib.utils2 import safe_int
+from kallithea.lib.page import Page
+from kallithea.lib.utils2 import safe_int, safe_str
from kallithea.lib.vcs.backends.base import EmptyChangeset
from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, NodeDoesNotExistError
from kallithea.lib.vcs.nodes import FileNode
@@ -65,7 +66,7 @@ class SummaryController(BaseRepoControll
repo_name = db_repo.repo_name
log.debug('Looking for README file')
- @cache_region('long_term', '_get_readme_from_cache')
+ @cache_region('long_term_file', '_get_readme_from_cache')
def _get_readme_from_cache(*_cache_keys): # parameters are not really used - only as caching key
readme_data = None
readme_file = None
@@ -83,7 +84,7 @@ class SummaryController(BaseRepoControll
readme_file = f
log.debug('Found README file `%s` rendering...',
readme_file)
- readme_data = renderer.render(readme.content,
+ readme_data = renderer.render(safe_str(readme.content),
filename=f)
break
except NodeDoesNotExistError:
@@ -104,8 +105,12 @@ class SummaryController(BaseRepoControll
def index(self, repo_name):
p = safe_int(request.GET.get('page'), 1)
size = safe_int(request.GET.get('size'), 10)
- collection = c.db_repo_scm_instance
- c.cs_pagination = RepoPage(collection, page=p, items_per_page=size)
+ try:
+ collection = c.db_repo_scm_instance.get_changesets(reverse=True)
+ except EmptyRepositoryError as e:
+ h.flash(e, category='warning')
+ collection = []
+ c.cs_pagination = Page(collection, page=p, items_per_page=size)
page_revisions = [x.raw_id for x in list(c.cs_pagination)]
c.cs_comments = c.db_repo.get_comments(page_revisions)
c.cs_statuses = c.db_repo.statuses(page_revisions)
@@ -133,17 +138,13 @@ class SummaryController(BaseRepoControll
c.stats_percentage = 0
if stats and stats.languages:
- c.no_data = False is c.db_repo.enable_statistics
- lang_stats_d = json.loads(stats.languages)
-
+ lang_stats_d = ext_json.loads(stats.languages)
lang_stats = [(x, {"count": y,
"desc": LANGUAGES_EXTENSIONS_MAP.get(x, '?')})
for x, y in lang_stats_d.items()]
lang_stats.sort(key=lambda k: (-k[1]['count'], k[0]))
-
c.trending_languages = lang_stats[:10]
else:
- c.no_data = True
c.trending_languages = []
c.enable_downloads = c.db_repo.enable_downloads
@@ -171,7 +172,7 @@ class SummaryController(BaseRepoControll
c.no_data_msg = _('Statistics are disabled for this repository')
td = date.today() + timedelta(days=1)
- td_1m = td - timedelta(days=calendar.mdays[td.month])
+ td_1m = td - timedelta(days=calendar.monthrange(td.year, td.month)[1])
td_1y = td - timedelta(days=365)
ts_min_m = mktime(td_1m.timetuple())
@@ -185,18 +186,16 @@ class SummaryController(BaseRepoControll
.scalar()
c.stats_percentage = 0
if stats and stats.languages:
- c.no_data = False is c.db_repo.enable_statistics
- lang_stats_d = json.loads(stats.languages)
- c.commit_data = json.loads(stats.commit_activity)
- c.overview_data = json.loads(stats.commit_activity_combined)
+ c.commit_data = ext_json.loads(stats.commit_activity)
+ c.overview_data = ext_json.loads(stats.commit_activity_combined)
- lang_stats = ((x, {"count": y,
- "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
- for x, y in lang_stats_d.items())
+ lang_stats_d = ext_json.loads(stats.languages)
+ lang_stats = [(x, {"count": y,
+ "desc": LANGUAGES_EXTENSIONS_MAP.get(x, '?')})
+ for x, y in lang_stats_d.items()]
+ lang_stats.sort(key=lambda k: (-k[1]['count'], k[0]))
+ c.trending_languages = lang_stats[:10]
- c.trending_languages = (
- sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
- )
last_rev = stats.stat_on_revision + 1
c.repo_last_rev = c.db_repo_scm_instance.count() \
if c.db_repo_scm_instance.revisions else 0
@@ -208,8 +207,7 @@ class SummaryController(BaseRepoControll
else:
c.commit_data = {}
c.overview_data = ([[ts_min_y, 0], [ts_max_y, 10]])
- c.trending_languages = {}
- c.no_data = True
+ c.trending_languages = []
recurse_limit = 500 # don't recurse more than 500 times when parsing
get_commits_stats(c.db_repo.repo_name, ts_min_y, ts_max_y, recurse_limit)
diff --git a/kallithea/front-end/package-lock.json b/kallithea/front-end/package-lock.json
--- a/kallithea/front-end/package-lock.json
+++ b/kallithea/front-end/package-lock.json
@@ -3,18 +3,49 @@
"requires": true,
"lockfileVersion": 1,
"dependencies": {
+ "@babel/code-frame": {
+ "version": "7.8.3",
+ "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz",
+ "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==",
+ "dev": true,
+ "requires": {
+ "@babel/highlight": "^7.8.3"
+ }
+ },
+ "@babel/highlight": {
+ "version": "7.8.3",
+ "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.8.3.tgz",
+ "integrity": "sha512-PX4y5xQUvy0fnEVHrYOarRPXVWafSjTW9T0Hab8gVIawpl2Sj0ORyrygANq+KjcNlSSTw0YCLSNA8OyZ1I4yEg==",
+ "dev": true,
+ "requires": {
+ "chalk": "^2.0.0",
+ "esutils": "^2.0.2",
+ "js-tokens": "^4.0.0"
+ }
+ },
"abbrev": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
"integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==",
"dev": true
},
+ "acorn": {
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz",
+ "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==",
+ "dev": true
+ },
+ "acorn-jsx": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.1.0.tgz",
+ "integrity": "sha512-tMUqwBWfLFbJbizRmEcWSLw6HnFzfdJs2sOJEOwwtVPMoH/0Ay+E703oZz78VSXZiiDcZrQ5XKjPIUQixhmgVw==",
+ "dev": true
+ },
"ajv": {
"version": "6.10.2",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz",
"integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==",
"dev": true,
- "optional": true,
"requires": {
"fast-deep-equal": "^2.0.1",
"fast-json-stable-stringify": "^2.0.0",
@@ -28,6 +59,21 @@
"integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=",
"dev": true
},
+ "ansi-escapes": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz",
+ "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==",
+ "dev": true,
+ "requires": {
+ "type-fest": "^0.8.1"
+ }
+ },
+ "ansi-regex": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
+ "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==",
+ "dev": true
+ },
"ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
@@ -37,6 +83,15 @@
"color-convert": "^1.9.0"
}
},
+ "argparse": {
+ "version": "1.0.10",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
+ "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+ "dev": true,
+ "requires": {
+ "sprintf-js": "~1.0.2"
+ }
+ },
"array-find-index": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz",
@@ -66,6 +121,12 @@
"dev": true,
"optional": true
},
+ "astral-regex": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz",
+ "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==",
+ "dev": true
+ },
"asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
@@ -123,6 +184,12 @@
"concat-map": "0.0.1"
}
},
+ "callsites": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
+ "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
+ "dev": true
+ },
"caseless": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
@@ -141,6 +208,12 @@
"supports-color": "^5.3.0"
}
},
+ "chardet": {
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz",
+ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==",
+ "dev": true
+ },
"clean-css": {
"version": "3.4.28",
"resolved": "https://registry.npmjs.org/clean-css/-/clean-css-3.4.28.tgz",
@@ -162,6 +235,21 @@
}
}
},
+ "cli-cursor": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz",
+ "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==",
+ "dev": true,
+ "requires": {
+ "restore-cursor": "^3.1.0"
+ }
+ },
+ "cli-width": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz",
+ "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=",
+ "dev": true
+ },
"clone": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz",
@@ -220,6 +308,19 @@
"dev": true,
"optional": true
},
+ "cross-spawn": {
+ "version": "6.0.5",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz",
+ "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==",
+ "dev": true,
+ "requires": {
+ "nice-try": "^1.0.4",
+ "path-key": "^2.0.1",
+ "semver": "^5.5.0",
+ "shebang-command": "^1.2.0",
+ "which": "^1.2.9"
+ }
+ },
"dashdash": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
@@ -262,6 +363,12 @@
"integrity": "sha1-qiT/uaw9+aI1GDfPstJ5NgzXhJI=",
"dev": true
},
+ "deep-is": {
+ "version": "0.1.3",
+ "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz",
+ "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=",
+ "dev": true
+ },
"delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
@@ -279,6 +386,64 @@
"wrappy": "1"
}
},
+ "doctrine": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
+ "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
+ "dev": true,
+ "requires": {
+ "esutils": "^2.0.2"
+ }
+ },
+ "dom-serializer": {
+ "version": "0.2.2",
+ "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz",
+ "integrity": "sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g==",
+ "dev": true,
+ "requires": {
+ "domelementtype": "^2.0.1",
+ "entities": "^2.0.0"
+ },
+ "dependencies": {
+ "domelementtype": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.0.1.tgz",
+ "integrity": "sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ==",
+ "dev": true
+ },
+ "entities": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/entities/-/entities-2.0.0.tgz",
+ "integrity": "sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw==",
+ "dev": true
+ }
+ }
+ },
+ "domelementtype": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz",
+ "integrity": "sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==",
+ "dev": true
+ },
+ "domhandler": {
+ "version": "2.4.2",
+ "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.2.tgz",
+ "integrity": "sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==",
+ "dev": true,
+ "requires": {
+ "domelementtype": "1"
+ }
+ },
+ "domutils": {
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.7.0.tgz",
+ "integrity": "sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg==",
+ "dev": true,
+ "requires": {
+ "dom-serializer": "0",
+ "domelementtype": "1"
+ }
+ },
"ecc-jsbn": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
@@ -290,6 +455,18 @@
"safer-buffer": "^2.1.0"
}
},
+ "emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "entities": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz",
+ "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==",
+ "dev": true
+ },
"errno": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/errno/-/errno-0.1.7.tgz",
@@ -306,6 +483,149 @@
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
"dev": true
},
+ "eslint": {
+ "version": "6.8.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz",
+ "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==",
+ "dev": true,
+ "requires": {
+ "@babel/code-frame": "^7.0.0",
+ "ajv": "^6.10.0",
+ "chalk": "^2.1.0",
+ "cross-spawn": "^6.0.5",
+ "debug": "^4.0.1",
+ "doctrine": "^3.0.0",
+ "eslint-scope": "^5.0.0",
+ "eslint-utils": "^1.4.3",
+ "eslint-visitor-keys": "^1.1.0",
+ "espree": "^6.1.2",
+ "esquery": "^1.0.1",
+ "esutils": "^2.0.2",
+ "file-entry-cache": "^5.0.1",
+ "functional-red-black-tree": "^1.0.1",
+ "glob-parent": "^5.0.0",
+ "globals": "^12.1.0",
+ "ignore": "^4.0.6",
+ "import-fresh": "^3.0.0",
+ "imurmurhash": "^0.1.4",
+ "inquirer": "^7.0.0",
+ "is-glob": "^4.0.0",
+ "js-yaml": "^3.13.1",
+ "json-stable-stringify-without-jsonify": "^1.0.1",
+ "levn": "^0.3.0",
+ "lodash": "^4.17.14",
+ "minimatch": "^3.0.4",
+ "mkdirp": "^0.5.1",
+ "natural-compare": "^1.4.0",
+ "optionator": "^0.8.3",
+ "progress": "^2.0.0",
+ "regexpp": "^2.0.1",
+ "semver": "^6.1.2",
+ "strip-ansi": "^5.2.0",
+ "strip-json-comments": "^3.0.1",
+ "table": "^5.2.3",
+ "text-table": "^0.2.0",
+ "v8-compile-cache": "^2.0.3"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
+ "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
+ "dev": true,
+ "requires": {
+ "ms": "^2.1.1"
+ }
+ },
+ "semver": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+ "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
+ "dev": true
+ }
+ }
+ },
+ "eslint-plugin-html": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-html/-/eslint-plugin-html-6.0.0.tgz",
+ "integrity": "sha512-PQcGippOHS+HTbQCStmH5MY1BF2MaU8qW/+Mvo/8xTa/ioeMXdSP+IiaBw2+nh0KEMfYQKuTz1Zo+vHynjwhbg==",
+ "dev": true,
+ "requires": {
+ "htmlparser2": "^3.10.1"
+ }
+ },
+ "eslint-scope": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz",
+ "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==",
+ "dev": true,
+ "requires": {
+ "esrecurse": "^4.1.0",
+ "estraverse": "^4.1.1"
+ }
+ },
+ "eslint-utils": {
+ "version": "1.4.3",
+ "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz",
+ "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==",
+ "dev": true,
+ "requires": {
+ "eslint-visitor-keys": "^1.1.0"
+ }
+ },
+ "eslint-visitor-keys": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz",
+ "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==",
+ "dev": true
+ },
+ "espree": {
+ "version": "6.1.2",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz",
+ "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==",
+ "dev": true,
+ "requires": {
+ "acorn": "^7.1.0",
+ "acorn-jsx": "^5.1.0",
+ "eslint-visitor-keys": "^1.1.0"
+ }
+ },
+ "esprima": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
+ "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
+ "dev": true
+ },
+ "esquery": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.1.0.tgz",
+ "integrity": "sha512-MxYW9xKmROWF672KqjO75sszsA8Mxhw06YFeS5VHlB98KDHbOSurm3ArsjO60Eaf3QmGMCP1yn+0JQkNLo/97Q==",
+ "dev": true,
+ "requires": {
+ "estraverse": "^4.0.0"
+ }
+ },
+ "esrecurse": {
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz",
+ "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==",
+ "dev": true,
+ "requires": {
+ "estraverse": "^4.1.0"
+ }
+ },
+ "estraverse": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
+ "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
+ "dev": true
+ },
+ "esutils": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
+ "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
+ "dev": true
+ },
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
@@ -313,6 +633,17 @@
"dev": true,
"optional": true
},
+ "external-editor": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz",
+ "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==",
+ "dev": true,
+ "requires": {
+ "chardet": "^0.7.0",
+ "iconv-lite": "^0.4.24",
+ "tmp": "^0.0.33"
+ }
+ },
"extsprintf": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
@@ -324,15 +655,54 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
"integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=",
- "dev": true,
- "optional": true
+ "dev": true
},
"fast-json-stable-stringify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz",
"integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=",
+ "dev": true
+ },
+ "fast-levenshtein": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
+ "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=",
+ "dev": true
+ },
+ "figures": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz",
+ "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==",
"dev": true,
- "optional": true
+ "requires": {
+ "escape-string-regexp": "^1.0.5"
+ }
+ },
+ "file-entry-cache": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz",
+ "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==",
+ "dev": true,
+ "requires": {
+ "flat-cache": "^2.0.1"
+ }
+ },
+ "flat-cache": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz",
+ "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==",
+ "dev": true,
+ "requires": {
+ "flatted": "^2.0.0",
+ "rimraf": "2.6.3",
+ "write": "1.0.3"
+ }
+ },
+ "flatted": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz",
+ "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==",
+ "dev": true
},
"forever-agent": {
"version": "0.6.1",
@@ -359,6 +729,12 @@
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
"dev": true
},
+ "functional-red-black-tree": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz",
+ "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=",
+ "dev": true
+ },
"getpass": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
@@ -383,6 +759,24 @@
"path-is-absolute": "^1.0.0"
}
},
+ "glob-parent": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz",
+ "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==",
+ "dev": true,
+ "requires": {
+ "is-glob": "^4.0.1"
+ }
+ },
+ "globals": {
+ "version": "12.3.0",
+ "resolved": "https://registry.npmjs.org/globals/-/globals-12.3.0.tgz",
+ "integrity": "sha512-wAfjdLgFsPZsklLJvOBUBmzYE8/CwhEqSBEMRXA3qxIiNtyqvjYurAtIfDh6chlEPUfmTY3MnZh5Hfh4q0UlIw==",
+ "dev": true,
+ "requires": {
+ "type-fest": "^0.8.1"
+ }
+ },
"graceful-fs": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz",
@@ -425,6 +819,20 @@
"integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==",
"dev": true
},
+ "htmlparser2": {
+ "version": "3.10.1",
+ "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.10.1.tgz",
+ "integrity": "sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ==",
+ "dev": true,
+ "requires": {
+ "domelementtype": "^1.3.1",
+ "domhandler": "^2.3.0",
+ "domutils": "^1.5.1",
+ "entities": "^1.1.1",
+ "inherits": "^2.0.1",
+ "readable-stream": "^3.1.1"
+ }
+ },
"http-signature": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
@@ -437,6 +845,21 @@
"sshpk": "^1.7.0"
}
},
+ "iconv-lite": {
+ "version": "0.4.24",
+ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
+ "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
+ "dev": true,
+ "requires": {
+ "safer-buffer": ">= 2.1.2 < 3"
+ }
+ },
+ "ignore": {
+ "version": "4.0.6",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz",
+ "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==",
+ "dev": true
+ },
"image-size": {
"version": "0.5.5",
"resolved": "https://registry.npmjs.org/image-size/-/image-size-0.5.5.tgz",
@@ -444,6 +867,22 @@
"dev": true,
"optional": true
},
+ "import-fresh": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz",
+ "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==",
+ "dev": true,
+ "requires": {
+ "parent-module": "^1.0.0",
+ "resolve-from": "^4.0.0"
+ }
+ },
+ "imurmurhash": {
+ "version": "0.1.4",
+ "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
+ "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=",
+ "dev": true
+ },
"inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
@@ -460,6 +899,54 @@
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
"dev": true
},
+ "inquirer": {
+ "version": "7.0.4",
+ "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz",
+ "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==",
+ "dev": true,
+ "requires": {
+ "ansi-escapes": "^4.2.1",
+ "chalk": "^2.4.2",
+ "cli-cursor": "^3.1.0",
+ "cli-width": "^2.0.0",
+ "external-editor": "^3.0.3",
+ "figures": "^3.0.0",
+ "lodash": "^4.17.15",
+ "mute-stream": "0.0.8",
+ "run-async": "^2.2.0",
+ "rxjs": "^6.5.3",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^5.1.0",
+ "through": "^2.3.6"
+ }
+ },
+ "is-extglob": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+ "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=",
+ "dev": true
+ },
+ "is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true
+ },
+ "is-glob": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz",
+ "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==",
+ "dev": true,
+ "requires": {
+ "is-extglob": "^2.1.1"
+ }
+ },
+ "is-promise": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz",
+ "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=",
+ "dev": true
+ },
"is-typedarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
@@ -467,6 +954,12 @@
"dev": true,
"optional": true
},
+ "isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=",
+ "dev": true
+ },
"isstream": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
@@ -489,6 +982,22 @@
"resolved": "https://registry.npmjs.org/jquery.flot/-/jquery.flot-0.8.3.tgz",
"integrity": "sha512-/tEE8J5NjwvStHDaCHkvTJpD7wDS4hE1OEL8xEmhgQfUe0gLUem923PIceNez1mz4yBNx6Hjv7pJcowLNd+nbg=="
},
+ "js-tokens": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
+ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
+ "dev": true
+ },
+ "js-yaml": {
+ "version": "3.13.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz",
+ "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==",
+ "dev": true,
+ "requires": {
+ "argparse": "^1.0.7",
+ "esprima": "^4.0.0"
+ }
+ },
"jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
@@ -513,8 +1022,13 @@
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
- "dev": true,
- "optional": true
+ "dev": true
+ },
+ "json-stable-stringify-without-jsonify": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
+ "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=",
+ "dev": true
},
"json-stringify-safe": {
"version": "5.0.1",
@@ -562,6 +1076,16 @@
"clean-css": "^3.0.1"
}
},
+ "levn": {
+ "version": "0.3.0",
+ "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
+ "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=",
+ "dev": true,
+ "requires": {
+ "prelude-ls": "~1.1.2",
+ "type-check": "~0.3.2"
+ }
+ },
"license-checker": {
"version": "25.0.1",
"resolved": "https://registry.npmjs.org/license-checker/-/license-checker-25.0.1.tgz",
@@ -580,6 +1104,12 @@
"treeify": "^1.1.0"
}
},
+ "lodash": {
+ "version": "4.17.15",
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
+ "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==",
+ "dev": true
+ },
"mime": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
@@ -604,6 +1134,12 @@
"mime-db": "1.40.0"
}
},
+ "mimic-fn": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
+ "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
+ "dev": true
+ },
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
@@ -634,6 +1170,24 @@
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
"dev": true
},
+ "mute-stream": {
+ "version": "0.0.8",
+ "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz",
+ "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==",
+ "dev": true
+ },
+ "natural-compare": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
+ "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=",
+ "dev": true
+ },
+ "nice-try": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz",
+ "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==",
+ "dev": true
+ },
"nopt": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.1.tgz",
@@ -672,6 +1226,29 @@
"wrappy": "1"
}
},
+ "onetime": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz",
+ "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==",
+ "dev": true,
+ "requires": {
+ "mimic-fn": "^2.1.0"
+ }
+ },
+ "optionator": {
+ "version": "0.8.3",
+ "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz",
+ "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==",
+ "dev": true,
+ "requires": {
+ "deep-is": "~0.1.3",
+ "fast-levenshtein": "~2.0.6",
+ "levn": "~0.3.0",
+ "prelude-ls": "~1.1.2",
+ "type-check": "~0.3.2",
+ "word-wrap": "~1.2.3"
+ }
+ },
"os-homedir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
@@ -694,12 +1271,27 @@
"os-tmpdir": "^1.0.0"
}
},
+ "parent-module": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
+ "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
+ "dev": true,
+ "requires": {
+ "callsites": "^3.0.0"
+ }
+ },
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
"dev": true
},
+ "path-key": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz",
+ "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=",
+ "dev": true
+ },
"path-parse": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
@@ -713,6 +1305,18 @@
"dev": true,
"optional": true
},
+ "prelude-ls": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz",
+ "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=",
+ "dev": true
+ },
+ "progress": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
+ "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
+ "dev": true
+ },
"promise": {
"version": "7.3.1",
"resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz",
@@ -741,8 +1345,7 @@
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
- "dev": true,
- "optional": true
+ "dev": true
},
"qs": {
"version": "6.5.2",
@@ -779,6 +1382,17 @@
"slash": "^1.0.0"
}
},
+ "readable-stream": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
+ "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+ "dev": true,
+ "requires": {
+ "inherits": "^2.0.3",
+ "string_decoder": "^1.1.1",
+ "util-deprecate": "^1.0.1"
+ }
+ },
"readdir-scoped-modules": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz",
@@ -791,6 +1405,12 @@
"once": "^1.3.0"
}
},
+ "regexpp": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz",
+ "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==",
+ "dev": true
+ },
"request": {
"version": "2.88.0",
"resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz",
@@ -829,19 +1449,60 @@
"path-parse": "^1.0.6"
}
},
+ "resolve-from": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
+ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
+ "dev": true
+ },
+ "restore-cursor": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz",
+ "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==",
+ "dev": true,
+ "requires": {
+ "onetime": "^5.1.0",
+ "signal-exit": "^3.0.2"
+ }
+ },
+ "rimraf": {
+ "version": "2.6.3",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz",
+ "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==",
+ "dev": true,
+ "requires": {
+ "glob": "^7.1.3"
+ }
+ },
+ "run-async": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz",
+ "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=",
+ "dev": true,
+ "requires": {
+ "is-promise": "^2.1.0"
+ }
+ },
+ "rxjs": {
+ "version": "6.5.4",
+ "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.4.tgz",
+ "integrity": "sha512-naMQXcgEo3csAEGvw/NydRA0fuS2nDZJiw1YUWFKU7aPPAPGZEsD4Iimit96qwCieH6y614MCLYwdkrWx7z/7Q==",
+ "dev": true,
+ "requires": {
+ "tslib": "^1.9.0"
+ }
+ },
"safe-buffer": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz",
"integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==",
- "dev": true,
- "optional": true
+ "dev": true
},
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
- "dev": true,
- "optional": true
+ "dev": true
},
"select2": {
"version": "3.5.1",
@@ -859,12 +1520,52 @@
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
"dev": true
},
+ "shebang-command": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz",
+ "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=",
+ "dev": true,
+ "requires": {
+ "shebang-regex": "^1.0.0"
+ }
+ },
+ "shebang-regex": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz",
+ "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=",
+ "dev": true
+ },
+ "signal-exit": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
+ "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=",
+ "dev": true
+ },
"slash": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz",
"integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=",
"dev": true
},
+ "slice-ansi": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz",
+ "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^3.2.0",
+ "astral-regex": "^1.0.0",
+ "is-fullwidth-code-point": "^2.0.0"
+ },
+ "dependencies": {
+ "is-fullwidth-code-point": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
+ "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=",
+ "dev": true
+ }
+ }
+ },
"slide": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz",
@@ -938,6 +1639,12 @@
"spdx-ranges": "^2.0.0"
}
},
+ "sprintf-js": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
+ "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=",
+ "dev": true
+ },
"sshpk": {
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
@@ -956,6 +1663,60 @@
"tweetnacl": "~0.14.0"
}
},
+ "string-width": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
+ "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==",
+ "dev": true,
+ "requires": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "dependencies": {
+ "strip-ansi": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
+ "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.0"
+ }
+ }
+ }
+ },
+ "string_decoder": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
+ "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
+ "dev": true,
+ "requires": {
+ "safe-buffer": "~5.2.0"
+ }
+ },
+ "strip-ansi": {
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz",
+ "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^4.1.0"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz",
+ "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==",
+ "dev": true
+ }
+ }
+ },
+ "strip-json-comments": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz",
+ "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==",
+ "dev": true
+ },
"supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
@@ -965,6 +1726,64 @@
"has-flag": "^3.0.0"
}
},
+ "table": {
+ "version": "5.4.6",
+ "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz",
+ "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==",
+ "dev": true,
+ "requires": {
+ "ajv": "^6.10.2",
+ "lodash": "^4.17.14",
+ "slice-ansi": "^2.1.0",
+ "string-width": "^3.0.0"
+ },
+ "dependencies": {
+ "emoji-regex": {
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz",
+ "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==",
+ "dev": true
+ },
+ "is-fullwidth-code-point": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
+ "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=",
+ "dev": true
+ },
+ "string-width": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz",
+ "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==",
+ "dev": true,
+ "requires": {
+ "emoji-regex": "^7.0.1",
+ "is-fullwidth-code-point": "^2.0.0",
+ "strip-ansi": "^5.1.0"
+ }
+ }
+ }
+ },
+ "text-table": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
+ "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=",
+ "dev": true
+ },
+ "through": {
+ "version": "2.3.8",
+ "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
+ "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=",
+ "dev": true
+ },
+ "tmp": {
+ "version": "0.0.33",
+ "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
+ "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
+ "dev": true,
+ "requires": {
+ "os-tmpdir": "~1.0.2"
+ }
+ },
"tough-cookie": {
"version": "2.4.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz",
@@ -991,6 +1810,12 @@
"integrity": "sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A==",
"dev": true
},
+ "tslib": {
+ "version": "1.11.0",
+ "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.0.tgz",
+ "integrity": "sha512-BmndXUtiTn/VDDrJzQE7Mm22Ix3PxgLltW9bSNLoeCY31gnG2OPx0QqJnuc9oMIKioYrz487i6K9o4Pdn0j+Kg==",
+ "dev": true
+ },
"tunnel-agent": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
@@ -1008,16 +1833,36 @@
"dev": true,
"optional": true
},
+ "type-check": {
+ "version": "0.3.2",
+ "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz",
+ "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=",
+ "dev": true,
+ "requires": {
+ "prelude-ls": "~1.1.2"
+ }
+ },
+ "type-fest": {
+ "version": "0.8.1",
+ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
+ "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
+ "dev": true
+ },
"uri-js": {
"version": "4.2.2",
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz",
"integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==",
"dev": true,
- "optional": true,
"requires": {
"punycode": "^2.1.0"
}
},
+ "util-deprecate": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
+ "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=",
+ "dev": true
+ },
"util-extend": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/util-extend/-/util-extend-1.0.3.tgz",
@@ -1031,6 +1876,12 @@
"dev": true,
"optional": true
},
+ "v8-compile-cache": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz",
+ "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==",
+ "dev": true
+ },
"validate-npm-package-license": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
@@ -1053,11 +1904,35 @@
"extsprintf": "^1.2.0"
}
},
+ "which": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz",
+ "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==",
+ "dev": true,
+ "requires": {
+ "isexe": "^2.0.0"
+ }
+ },
+ "word-wrap": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
+ "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
+ "dev": true
+ },
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
"dev": true
+ },
+ "write": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz",
+ "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==",
+ "dev": true,
+ "requires": {
+ "mkdirp": "^0.5.1"
+ }
}
}
}
diff --git a/kallithea/front-end/package.json b/kallithea/front-end/package.json
--- a/kallithea/front-end/package.json
+++ b/kallithea/front-end/package.json
@@ -14,6 +14,8 @@
"select2-bootstrap-css": "1.4.6"
},
"devDependencies": {
+ "eslint": "6.8.0",
+ "eslint-plugin-html": "6.0.0",
"less": "3.10.3",
"less-plugin-clean-css": "1.5.1",
"license-checker": "25.0.1"
diff --git a/kallithea/front-end/style.less b/kallithea/front-end/style.less
--- a/kallithea/front-end/style.less
+++ b/kallithea/front-end/style.less
@@ -937,8 +937,8 @@ div.annotatediv {
background-color: @kallithea-theme-main-color;
border: 0;
}
-#content #context-pages .follow .show-following,
-#content #context-pages .following .show-follow {
+#content .follow .show-following,
+#content .following .show-follow {
display: none;
}
diff --git a/kallithea/i18n/how_to b/kallithea/i18n/how_to
--- a/kallithea/i18n/how_to
+++ b/kallithea/i18n/how_to
@@ -55,9 +55,9 @@ the translation files (`*.po`).
First update the translation strings::
- python2 setup.py extract_messages
+ python3 setup.py extract_messages
-Then regenerate the translation files. This could either be done with `python2
+Then regenerate the translation files. This could either be done with `python3
setup.py update_catalog` or with `msgmerge` from the `gettext` package. As
Weblate is also touching these translation files, it is preferred to use the
same tools (`msgmerge`) and settings as Weblate to minimize the diff::
@@ -73,11 +73,11 @@ Manual creation of a new language transl
In the prepared development environment, run the following to ensure
all translation strings are extracted and up-to-date::
- python2 setup.py extract_messages
+ python3 setup.py extract_messages
Create new language by executing following command::
- python2 setup.py init_catalog -l
+ python3 setup.py init_catalog -l
This creates a new translation under directory `kallithea/i18n/`
based on the translation template file, `kallithea/i18n/kallithea.pot`.
@@ -90,7 +90,7 @@ translation file for errors by executing
Finally, compile the translations::
- python2 setup.py compile_catalog -l
+ python3 setup.py compile_catalog -l
Manually updating translations
@@ -98,11 +98,11 @@ Manually updating translations
Extract the latest versions of strings for translation by running::
- python2 setup.py extract_messages
+ python3 setup.py extract_messages
Update the PO file by doing::
- python2 setup.py update_catalog -l
+ python3 setup.py update_catalog -l
Edit the newly updated translation file. Repeat all steps after the
`init_catalog` step from the 'new translation' instructions above.
diff --git a/kallithea/i18n/nl_BE/LC_MESSAGES/kallithea.po b/kallithea/i18n/nl_BE/LC_MESSAGES/kallithea.po
--- a/kallithea/i18n/nl_BE/LC_MESSAGES/kallithea.po
+++ b/kallithea/i18n/nl_BE/LC_MESSAGES/kallithea.po
@@ -206,9 +206,6 @@ msgstr "Er is een fout opgetreden tijden
msgid "Changeset %s not found"
msgstr "Changeset %s werd niet gevonden"
-msgid "SSH key %r not found"
-msgstr "SSH key %r werd niet gevonden"
-
msgid "Add repos"
msgstr "Repositories toevoegen"
diff --git a/kallithea/i18n/pl/LC_MESSAGES/kallithea.po b/kallithea/i18n/pl/LC_MESSAGES/kallithea.po
--- a/kallithea/i18n/pl/LC_MESSAGES/kallithea.po
+++ b/kallithea/i18n/pl/LC_MESSAGES/kallithea.po
@@ -36,6 +36,9 @@ msgstr "Nie można znaleźć innego repozytorium %s"
msgid "No response"
msgstr "Brak odpowiedzi"
+msgid "Unknown error"
+msgstr "Nieznany błąd"
+
msgid ""
"The request could not be understood by the server due to malformed syntax."
msgstr ""
@@ -125,6 +128,9 @@ msgstr "Etykiety"
msgid "An error occurred during repository forking %s"
msgstr "Wystąpił błąd podczas rozgałęzienia %s repozytorium"
+msgid "Groups"
+msgstr "Grupy"
+
msgid "Repositories"
msgstr "Repozytoria"
@@ -155,6 +161,9 @@ msgstr "Twój link zresetowania hasła został wysłany"
msgid "Invalid password reset token"
msgstr "Nieprawidłowy token resetowania hasła"
+msgid "Successfully updated password"
+msgstr "Pomyślnie zaktualizowano hasło"
+
msgid "%s (closed)"
msgstr "%s (zamknięty)"
@@ -247,6 +256,9 @@ msgstr "Twoje konto zostało pomyślnie zaktualizowane"
msgid "Error occurred during update of user %s"
msgstr "wystąpił błąd podczas aktualizacji użytkownika %s"
+msgid "Error occurred during update of user password"
+msgstr "Wystąpił błąd w trakcie aktualizacji hasła użytkownika"
+
msgid "Added email %s to user"
msgstr "Dodano e-mail %s do użytkownika"
diff --git a/kallithea/i18n/ru/LC_MESSAGES/kallithea.po b/kallithea/i18n/ru/LC_MESSAGES/kallithea.po
--- a/kallithea/i18n/ru/LC_MESSAGES/kallithea.po
+++ b/kallithea/i18n/ru/LC_MESSAGES/kallithea.po
@@ -12,7 +12,7 @@ msgstr ""
"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n"
msgid "There are no changesets yet"
-msgstr "Ещё не было изменений"
+msgstr "Наборы изменений отсутствуют"
msgid "None"
msgstr "Ничего"
@@ -29,9 +29,30 @@ msgstr "Игнорировать пробелы"
msgid "Increase diff context to %(num)s lines"
msgstr "Увеличить контекст до %(num)s строк"
+msgid "No permission to change status"
+msgstr "Недостаточно привилегий для изменения статуса"
+
+msgid "Successfully deleted pull request %s"
+msgstr "Pull-запрос %s успешно удалён"
+
msgid "Such revision does not exist for this repository"
msgstr "Нет такой ревизии в этом репозитории"
+msgid "Could not find other repository %s"
+msgstr "Не найден другой репозиторий %s"
+
+msgid "Cannot compare repositories of different types"
+msgstr "Невозможно сравнивать репозитории различных типов"
+
+msgid "Cannot show empty diff"
+msgstr "Отсутствуют изменения для отображения"
+
+msgid "No ancestor found for merge diff"
+msgstr "Не найдено предка для слияния"
+
+msgid "Multiple merge ancestors found for merge compare"
+msgstr "Найдено несколько предков для сравнения слияния"
+
msgid "Cannot compare repositories without using common ancestor"
msgstr "Невозможно сравнивать репозитории без общего предка"
@@ -64,7 +85,9 @@ msgid "%s committed on %s"
msgstr "%s выполнил коммит в %s"
msgid "Changeset was too big and was cut off..."
-msgstr "Изменения оказались слишком большими и были вырезаны..."
+msgstr ""
+"Список изменений оказался слишком большим для отображения и был "
+"сокращён..."
msgid "%s %s feed"
msgstr "Лента новостей %s %s"
@@ -75,6 +98,9 @@ msgstr "Изменения в репозитории %s"
msgid "Click here to add new file"
msgstr "Нажмите чтобы добавить новый файл"
+msgid "There are no files yet."
+msgstr "Нет файлов."
+
msgid "%s at %s"
msgstr "%s (%s)"
@@ -182,6 +208,9 @@ msgstr "Неверный код сброса пароля"
msgid "Successfully updated password"
msgstr "Пароль обновлён"
+msgid "Invalid reviewer \"%s\" specified"
+msgstr "Некорректно задан ревьювер «%s»"
+
msgid "%s (closed)"
msgstr "%s (закрыта)"
@@ -192,7 +221,7 @@ msgid "Special"
msgstr "Специальный"
msgid "Peer branches"
-msgstr "Ветки участника"
+msgstr "Ветви участника"
msgid "Bookmarks"
msgstr "Закладки"
@@ -204,7 +233,16 @@ msgid "Error occurred while creating pul
msgstr "Произошла ошибка при создании pull-запроса"
msgid "Successfully opened new pull request"
-msgstr "Pull-запрос создан успешно"
+msgstr "Pull-запрос успешно открыт"
+
+msgid "New pull request iteration created"
+msgstr "Создана новая итерация pull-запросов"
+
+msgid "Meanwhile, the following reviewers have been added: %s"
+msgstr "В то же время, добавлены следующие ревьюверы: %s"
+
+msgid "Meanwhile, the following reviewers have been removed: %s"
+msgstr "В то же время, удалены следующие ревьюверы: %s"
msgid "No description"
msgstr "Нет описания"
@@ -215,18 +253,44 @@ msgstr "Pull-запрос обновлён"
msgid "Successfully deleted pull request"
msgstr "Pull-запрос успешно удалён"
+msgid "Revision %s not found in %s"
+msgstr "Ревизия %s не найдена в %s"
+
+msgid "Error: changesets not found when displaying pull request from %s."
+msgstr "Ошибка: не найдены изменения при отображении pull-запроса от %s."
+
msgid "This pull request has already been merged to %s."
msgstr "Этот pull-запрос уже принят на ветку %s."
msgid "This pull request has been closed and can not be updated."
msgstr "Этот pull-запрос был закрыт и не может быть обновлён."
+msgid "The following additional changes are available on %s:"
+msgstr "Следующие дополнительные изменения доступны на %s:"
+
+msgid "No additional changesets found for iterating on this pull request."
+msgstr "Нет дополнительных изменений для итерации в этом pull-запросе."
+
msgid "Note: Branch %s has another head: %s."
msgstr "Внимание: Ветка %s имеет ещё одну верхушку: %s."
+msgid "Git pull requests don't support iterating yet."
+msgstr "Pull-запросы git пока не поддерживают итерации."
+
+msgid ""
+"Error: some changesets not found when displaying pull request from %s."
+msgstr ""
+"Ошибка: не найдены некоторые изменения при отображении pull-запроса от %s."
+
+msgid "The diff can't be shown - the PR revisions could not be found."
+msgstr "Невозможно отобразить различия — не найдены ревизии PR."
+
msgid "Invalid search query. Try quoting it."
msgstr "Недопустимый поисковый запрос. Попробуйте заключить его в кавычки."
+msgid "The server has no search index."
+msgstr "На сервере отсутствует поисковый индекс."
+
msgid "An error occurred during search operation."
msgstr "Произошла ошибка при выполнении этого поиска."
@@ -243,11 +307,14 @@ msgid "error occurred during update of a
msgstr "произошла ошибка при обновлении настроек авторизации"
msgid "Default settings updated successfully"
-msgstr "Стандартные настройки успешно обновлены"
+msgstr "Настройки по умолчанию успешно обновлены"
msgid "Error occurred during update of defaults"
msgstr "Произошла ошибка при обновлении стандартных настроек"
+msgid "Forever"
+msgstr "Не ограничено"
+
msgid "5 minutes"
msgstr "5 минут"
@@ -272,6 +339,9 @@ msgstr "Gist-запись %s удалена"
msgid "Unmodified"
msgstr "Неизменный"
+msgid "Successfully updated gist content"
+msgstr "Содержимое gist-записи обновлено"
+
msgid "Successfully updated gist data"
msgstr "Данные gist-записи обновлены"
@@ -310,6 +380,12 @@ msgstr "API-ключ успешно сброшен"
msgid "API key successfully deleted"
msgstr "API-ключ успешно удалён"
+msgid "SSH key %s successfully added"
+msgstr "Ключ SSH %s успешно добавлен"
+
+msgid "SSH key successfully deleted"
+msgstr "Ключ SSH успешно удалён"
+
msgid "Read"
msgstr "Чтение"
@@ -383,7 +459,7 @@ msgid "Created repository %s from %s"
msgstr "Репозиторий %s создан из %s"
msgid "Forked repository %s as %s"
-msgstr "Сделан форк(копия) репозитория %s на %s"
+msgstr "Создан форк репозитория %s с именем %s"
msgid "Created repository %s"
msgstr "Репозиторий %s создан"
@@ -404,7 +480,7 @@ msgid "Deleted repository %s"
msgstr "Репозиторий %s удалён"
msgid "Cannot delete repository %s which still has forks"
-msgstr "Невозможно удалить %s, у него всё ещё есть форки"
+msgstr "Невозможно удалить репозиторий %s, поскольку существуют его форки"
msgid "An error occurred during deletion of %s"
msgstr "Произошла ошибка во время удаления %s"
@@ -412,11 +488,17 @@ msgstr "Произошла ошибка во время удаления %s"
msgid "Repository permissions updated"
msgstr "Привилегии репозитория обновлены"
+msgid "Field validation error: %s"
+msgstr "Ошибка валидации поля: %s"
+
+msgid "An error occurred during creation of field: %r"
+msgstr "Произошла ошибка при создании поля: %r"
+
msgid "An error occurred during removal of field"
msgstr "Произошла ошибка при удалении поля"
msgid "-- Not a fork --"
-msgstr "-- Не форк --"
+msgstr "-- Не является форком --"
msgid "Updated repository visibility in public journal"
msgstr "Видимость репозитория в публичном журнале обновлена"
@@ -425,10 +507,10 @@ msgid "An error occurred during setting
msgstr "Произошла ошибка при установке репозитария в общедоступный журнал"
msgid "Nothing"
-msgstr "Ничего"
+msgstr "Отсутствуют"
msgid "Marked repository %s as fork of %s"
-msgstr "Репозиторий %s отмечен как форк %s"
+msgstr "Репозиторий %s отмечен как форк от %s"
msgid "An error occurred during this operation"
msgstr "Произошла ошибка при выполнении операции"
@@ -464,6 +546,9 @@ msgstr "Произошла ошибка при обновлении настроек приложения"
msgid "Repositories successfully rescanned. Added: %s. Removed: %s."
msgstr "Репозитории успешно пересканированы, добавлено: %s, удалено: %s."
+msgid "Invalidated %s repositories"
+msgstr "Сброшена валидация для %s репозиториев"
+
msgid "Updated application settings"
msgstr "Обновленные параметры настройки приложения"
@@ -479,6 +564,14 @@ msgstr "Пожалуйста, введите адрес электронной почты"
msgid "Send email task created"
msgstr "Задача отправки Email создана"
+msgid "Hook already exists"
+msgstr "Хук уже существует"
+
+msgid "Builtin hooks are read-only. Please use another hook name."
+msgstr ""
+"Встроенные хуки предназначены только для чтения. Пожалуйста, используйте "
+"другое имя."
+
msgid "Added new hook"
msgstr "Добавлена новая ловушка"
@@ -489,7 +582,7 @@ msgid "Error occurred during hook creati
msgstr "произошла ошибка при создании хука"
msgid "Whoosh reindex task scheduled"
-msgstr "Запланирована переиндексация базы Whoosh"
+msgstr "Переиндексация базы Whoosh успешно запланирована"
msgid "Created user group %s"
msgstr "Создана группа пользователей %s"
@@ -536,6 +629,9 @@ msgstr "Пользователь успешно удалён"
msgid "An error occurred during deletion of user"
msgstr "Произошла ошибка при удалении пользователя"
+msgid "The default user cannot be edited"
+msgstr "Нельзя редактировать пользователя по умолчанию"
+
msgid "Added IP address %s to user whitelist"
msgstr "Добавлен IP %s в белый список пользователя"
@@ -553,9 +649,19 @@ msgstr ""
msgid "You need to be signed in to view this page"
msgstr "Страница доступна только авторизованным пользователям"
+msgid ""
+"CSRF token leak has been detected - all form tokens have been expired"
+msgstr "Обнаружена утечка CSRF-токена — истёк срок действия токенов форм"
+
msgid "Repository not found in the filesystem"
msgstr "Репозиторий не найден на файловой системе"
+msgid "Changeset for %s %s not found in %s"
+msgstr "Набор изменений для %s %s не найден в %s"
+
+msgid "SSH access is disabled."
+msgstr "Доступ по SSH отключен."
+
msgid "Binary file"
msgstr "Двоичный файл"
@@ -574,9 +680,15 @@ msgstr "Удалена ветка: %s"
msgid "Created tag: %s"
msgstr "Создан тег: %s"
+msgid "Changeset %s not found"
+msgstr "Набор изменений %s не найден"
+
msgid "Show all combined changesets %s->%s"
msgstr "Показать отличия вместе %s->%s"
+msgid "Compare view"
+msgstr "Сравнить вид"
+
msgid "and"
msgstr "и"
@@ -586,6 +698,9 @@ msgstr "на %s больше"
msgid "revisions"
msgstr "версии"
+msgid "Fork name %s"
+msgstr "Имя форка %s"
+
msgid "Pull request %s"
msgstr "Pull-запрос %s"
@@ -596,10 +711,10 @@ msgid "[created] repository"
msgstr "[создан] репозиторий"
msgid "[created] repository as fork"
-msgstr "[создан] репозиторий как форк"
+msgstr "[создан] репозиторий в качестве форка"
msgid "[forked] repository"
-msgstr "[форкнут] репозиторий"
+msgstr "[создан форк] репозитория"
msgid "[updated] repository"
msgstr "[обновлён] репозиторий"
@@ -626,10 +741,10 @@ msgid "[commented] on revision in reposi
msgstr "[комментарий] к ревизии в репозитории"
msgid "[commented] on pull request for"
-msgstr "[прокомментировано] в запросе на внесение изменений для"
+msgstr "[прокомментировано] в pull-запросе для"
msgid "[closed] pull request for"
-msgstr "[закрыт] Pull-запрос для"
+msgstr "[закрыт] pull-запрос для"
msgid "[pushed] into"
msgstr "[отправлено] в"
@@ -644,10 +759,10 @@ msgid "[pulled] from"
msgstr "[внесены изменения] из"
msgid "[started following] repository"
-msgstr "[добавлен в наблюдения] репозиторий"
+msgstr "[подписка] на репозиторий"
msgid "[stopped following] repository"
-msgstr "[удалён из наблюдения] репозиторий"
+msgstr "[отписка] от репозитория"
msgid " and %s more"
msgstr " и на %s больше"
@@ -679,11 +794,28 @@ msgstr ""
"переименован из файловой системы. Пожалуйста, перезапустите приложение "
"для сканирования репозиториев"
+msgid "SSH key is missing"
+msgstr "Отсутствует ключ SSH"
+
+msgid "Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'"
+msgstr ""
+"Некорректный ключ SSH — он должен начинаться с 'ssh-(rsa|dss|ed25519)'"
+
+msgid "Incorrect SSH key - unexpected characters in base64 part %r"
+msgstr ""
+"Некорректный ключ SSH — присутствуют некорректные символы в коде base64 %r"
+
+msgid "Incorrect SSH key - failed to decode base64 part %r"
+msgstr "Некорректный ключ SSH — ошибка декодирования кода base64 %r"
+
+msgid "Incorrect SSH key - base64 part is not %r as claimed but %r"
+msgstr "Некорректный ключ SSH — код base64 соответствует не %r, а %r"
+
msgid "%d year"
msgid_plural "%d years"
msgstr[0] "%d год"
-msgstr[1] "%d лет"
-msgstr[2] "%d года"
+msgstr[1] "%d года"
+msgstr[2] "%d лет"
msgid "%d month"
msgid_plural "%d months"
@@ -700,14 +832,14 @@ msgstr[2] "%d дней"
msgid "%d hour"
msgid_plural "%d hours"
msgstr[0] "%d час"
-msgstr[1] "%d часов"
-msgstr[2] "%d часа"
+msgstr[1] "%d часа"
+msgstr[2] "%d часов"
msgid "%d minute"
msgid_plural "%d minutes"
msgstr[0] "%d минута"
-msgstr[1] "%d минут"
-msgstr[2] "%d минуты"
+msgstr[1] "%d минуты"
+msgstr[2] "%d минут"
msgid "%d second"
msgid_plural "%d seconds"
@@ -728,7 +860,7 @@ msgid "%s and %s ago"
msgstr "%s и %s назад"
msgid "just now"
-msgstr "прямо сейчас"
+msgstr "только что"
msgid "on line %s"
msgstr "на строке %s"
@@ -742,6 +874,10 @@ msgstr "верхний уровень"
msgid "Kallithea Administrator"
msgstr "Администратор Kallithea"
+msgid "Default user has no access to new repositories"
+msgstr ""
+"Неавторизованные пользователи не имеют прав доступа к новым репозиториям"
+
msgid "Default user has read access to new repositories"
msgstr "Неавторизованные пользователи имеют право чтения новых репозиториев"
@@ -749,12 +885,103 @@ msgid "Default user has write access to
msgstr ""
"Неавторизованные пользователи имеют право записи в новые репозитории"
+msgid "Default user has admin access to new repositories"
+msgstr ""
+"Неавторизованные пользователи имеют права администратора к новым "
+"репозиториям"
+
+msgid "Default user has no access to new repository groups"
+msgstr ""
+"Неавторизованные пользователи не имеют прав доступа к новым группам "
+"репозиториев"
+
+msgid "Default user has read access to new repository groups"
+msgstr ""
+"Неавторизованные пользователи имеют право чтения в новых группах "
+"репозиториев"
+
+msgid "Default user has write access to new repository groups"
+msgstr ""
+"Неавторизованные пользователи имеют право записи в новых группах "
+"репозиториев"
+
+msgid "Default user has admin access to new repository groups"
+msgstr ""
+"Неавторизованные пользователи имеют права администратора к новым групппам "
+"репозиториев"
+
+msgid "Default user has no access to new user groups"
+msgstr ""
+"Неавторизованные пользователи не имеют прав доступа к новым группам "
+"пользователей"
+
+msgid "Default user has read access to new user groups"
+msgstr ""
+"Неавторизованные пользователи имеют право чтения в новых группах "
+"пользователей"
+
+msgid "Default user has write access to new user groups"
+msgstr ""
+"Неавторизованные пользователи имеют право записи в новых группах "
+"пользователей"
+
+msgid "Default user has admin access to new user groups"
+msgstr ""
+"Неавторизованные пользователи имеют права администратора к новым групппам "
+"пользователей"
+
msgid "Only admins can create repository groups"
-msgstr "Только администраторы могут создавать группы"
+msgstr "Только администраторы могут создавать группы репозиториев"
+
+msgid "Non-admins can create repository groups"
+msgstr "Группы репозиториев могут создаваться любыми пользователями"
+
+msgid "Only admins can create user groups"
+msgstr "Группы пользователей могут создаваться только администраторами"
+
+msgid "Non-admins can create user groups"
+msgstr "Группы пользователей могут создаваться любыми пользователями"
+
+msgid "Only admins can create top level repositories"
+msgstr "Только администраторы могут создавать репозитории верхнего уровня"
+
+msgid "Non-admins can create top level repositories"
+msgstr "Любой пользователь может создавать репозитории верхнего уровня"
+
+msgid ""
+"Repository creation enabled with write permission to a repository group"
+msgstr ""
+"Создание репозиториев доступно с правом на запись в группу репозиториев"
+
+msgid ""
+"Repository creation disabled with write permission to a repository group"
+msgstr ""
+"Создание репозиториев недоступно с правом на запись в группу репозиториев"
+
+msgid "Only admins can fork repositories"
+msgstr "Форки репозиториев могут создаваться только администраторами"
+
+msgid "Non-admins can fork repositories"
+msgstr "Форки репозиториев могут создаваться любыми пользователями"
msgid "Registration disabled"
msgstr "Регистрация отключена"
+msgid "User registration with manual account activation"
+msgstr "Регистрация пользователя с ручной активацией учётной записи"
+
+msgid "User registration with automatic account activation"
+msgstr "Регистрация пользователя с автоматической активацией"
+
+msgid "Not reviewed"
+msgstr "Не проверено"
+
+msgid "Under review"
+msgstr "На проверке"
+
+msgid "Not approved"
+msgstr "Не одобрено"
+
msgid "Approved"
msgstr "Одобрено"
@@ -770,19 +997,94 @@ msgstr "Пожалуйста, введите пароль"
msgid "Enter %(min)i characters or more"
msgstr "Введите не менее %(min)i символов"
+msgid "Name must not contain only digits"
+msgstr "Имя не может состоять только из цифр"
+
+msgid ""
+"[Comment] %(repo_name)s changeset %(short_id)s \"%(message_short)s\" on "
+"%(branch)s"
+msgstr ""
+"[Комментарий] к набору изменений %(short_id)s «%(message_short)s» "
+"репозитория %(repo_name)s в %(branch)s"
+
msgid "New user %(new_username)s registered"
msgstr "Новый пользователь \"%(new_username)s\" зарегистрирован"
+msgid ""
+"[Review] %(repo_name)s PR %(pr_nice_id)s \"%(pr_title_short)s\" from "
+"%(pr_source_branch)s by %(pr_owner_username)s"
+msgstr ""
+"[Ревью] к PR %(pr_nice_id)s «%(pr_title_short)s» из %(pr_source_branch)s "
+"репозитория %(repo_name)s от %(pr_owner_username)s"
+
+msgid ""
+"[Comment] %(repo_name)s PR %(pr_nice_id)s \"%(pr_title_short)s\" from "
+"%(pr_source_branch)s by %(pr_owner_username)s"
+msgstr ""
+"[Комментарий] к PR %(pr_nice_id)s «%(pr_title_short)s» из "
+"%(pr_source_branch)s репозитория %(repo_name)s от %(pr_owner_username)s"
+
msgid "Closing"
msgstr "Закрыт"
+msgid ""
+"%(user)s wants you to review pull request %(pr_nice_id)s: %(pr_title)s"
+msgstr ""
+"%(user)s просит вас рассмотреть pull-запрос %(pr_nice_id)s: %(pr_title)s"
+
+msgid "Cannot create empty pull request"
+msgstr "Невозможно создать пустой pull-запрос"
+
+msgid ""
+"Cannot create pull request - criss cross merge detected, please merge a "
+"later %s revision to %s"
+msgstr ""
+"Невозможно создать pull-запрос — обнаружено перекрёстное слияние. "
+"Попробуйте слить более позднюю ревизию %s с %s"
+
+msgid "You are not authorized to create the pull request"
+msgstr "Недостаточно привилегий для создания pull-запроса"
+
+msgid "Missing changesets since the previous iteration:"
+msgstr "Отсутствующие ревизии относительно предыдущей итерации:"
+
+msgid "New changesets on %s %s since the previous iteration:"
+msgstr "Новые наборы изменений в %s %s относительно предыдущей итерации:"
+
+msgid "Ancestor didn't change - diff since previous iteration:"
+msgstr "Предок не изменился — разница с момента последней итерации:"
+
+msgid ""
+"This iteration is based on another %s revision and there is no simple "
+"diff."
+msgstr ""
+"Эта итерация основана на другой ревизии %s, простой diff невозможен."
+
+msgid "No changes found on %s %s since previous iteration."
+msgstr "Нет изменений на %s %s относительно предыдущей итерации."
+
+msgid "Closed, next iteration: %s ."
+msgstr "Закрыто. Следующая итерация: %s."
+
msgid "latest tip"
msgstr "последняя версия"
+msgid "SSH key %r is invalid: %s"
+msgstr "Ошибка ключа SSH %r: %s"
+
+msgid "SSH key %s is already used by %s"
+msgstr "Ключ SSH %s уже используется пользователем %s"
+
msgid "New user registration"
msgstr "Регистрация нового пользователя"
msgid ""
+"You can't remove this user since it is crucial for the entire application"
+msgstr ""
+"Вы не можете удалить этого пользователя, поскольку это критично для "
+"работы всего приложения"
+
+msgid ""
"User \"%s\" still owns %s repositories and cannot be removed. Switch "
"owners or remove those repositories: %s"
msgstr ""
@@ -808,12 +1110,34 @@ msgstr ""
msgid "Password reset link"
msgstr "Ссылка сброса пароля"
+msgid "Password reset notification"
+msgstr "Уведомление о сбросе пароля"
+
+msgid ""
+"The password to your account %s has been changed using password reset "
+"form."
+msgstr "Пароль к вашему аккаунту %s был изменён через форму сброса пароля."
+
msgid "Value cannot be an empty list"
msgstr "Значение не может быть пустым списком"
msgid "Username \"%(username)s\" already exists"
msgstr "Пользователь с именем \"%(username)s\" уже существует"
+msgid "Username \"%(username)s\" cannot be used"
+msgstr "Имя «%(username)s» недопустимо"
+
+msgid ""
+"Username may only contain alphanumeric characters underscores, periods or "
+"dashes and must begin with an alphanumeric character or underscore"
+msgstr ""
+"Имя пользователя может содержать только буквы, цифры, символы "
+"подчеркивания, точки и тире, а также должно начинаться с буквы, цифры или "
+"с символа подчеркивания"
+
+msgid "The input is not valid"
+msgstr "Введено некорректное значение"
+
msgid "Username %(username)s is not valid"
msgstr "Имя \"%(username)s\" недопустимо"
@@ -848,6 +1172,12 @@ msgstr "Неверно задан старый пароль"
msgid "Passwords do not match"
msgstr "Пароли не совпадают"
+msgid "Invalid username or password"
+msgstr "Неверное имя пользователя или пароль"
+
+msgid "Repository name %(repo)s is not allowed"
+msgstr "Имя репозитория %(repo)s недопустимо"
+
msgid "Repository named %(repo)s already exists"
msgstr "Репозитарий %(repo)s уже существует"
@@ -857,8 +1187,18 @@ msgstr "Репозитарий \"%(repo)s\" уже существует в группе \"%(group)s\""
msgid "Repository group with name \"%(repo)s\" already exists"
msgstr "Группа репозиториев \"%(repo)s\" уже существует"
+msgid "Invalid repository URL"
+msgstr "Недопустимый URL репозитория"
+
+msgid ""
+"Invalid repository URL. It must be a valid http, https, ssh, svn+http or "
+"svn+https URL"
+msgstr ""
+"Недопустимый URL репозитория. Требуется корректный http, https, ssh, svn"
+"+http или svn+https URL"
+
msgid "Fork has to be the same type as parent"
-msgstr "Тип форка будет совпадать с родительским"
+msgstr "Форк будет иметь тот же тип, что и родительский"
msgid "You don't have permissions to create repository in this group"
msgstr "У вас недостаточно прав для создания репозиториев в этой группе"
@@ -875,6 +1215,12 @@ msgstr "Данное имя пользователя или группы пользователей недопустимо"
msgid "This is not a valid path"
msgstr "Этот путь ошибочен"
+msgid "This email address is already in use"
+msgstr "Этот адрес почты уже занят"
+
+msgid "Email address \"%(email)s\" not found"
+msgstr "Адрес «%(email)s» не зарегистрирован"
+
msgid ""
"The LDAP Login attribute of the CN must be specified - this is the name "
"of the attribute that is equivalent to \"username\""
@@ -899,6 +1245,9 @@ msgstr ""
msgid "Filename cannot be inside a directory"
msgstr "Файла нет в каталоге"
+msgid "Plugins %(loaded)s and %(next_to_load)s both export the same name"
+msgstr "Плагины %(loaded)s и %(next_to_load)s экспортируют одно и то же имя"
+
msgid "About"
msgstr "О программе"
@@ -942,6 +1291,9 @@ msgstr "Имя пользователя"
msgid "Password"
msgstr "Пароль"
+msgid "Stay logged in after browser restart"
+msgstr "Оставаться авторизованным"
+
msgid "Forgot your password ?"
msgstr "Забыли пароль?"
@@ -969,6 +1321,35 @@ msgstr "Капча"
msgid "Send Password Reset Email"
msgstr "Послать ссылку сброса пароля"
+msgid ""
+"A password reset link will be sent to the specified email address if it "
+"is registered in the system."
+msgstr ""
+"Ссылка для сброса пароля была отправлена на соответствующий e-mail, если "
+"он был зарегистрирован в системе."
+
+msgid "You are about to set a new password for the email address %s."
+msgstr "Вы собираетесь установить новый пароль для адреса %s."
+
+msgid ""
+"Note that you must use the same browser session for this as the one used "
+"to request the password reset."
+msgstr ""
+"Обратите внимание, что вы должны оставаться в пределах этой сессии "
+"браузера, поскольку в ней был запрошен сброс пароля."
+
+msgid "Code you received in the email"
+msgstr "Код, который вы получили по почте"
+
+msgid "New Password"
+msgstr "Новый пароль"
+
+msgid "Confirm New Password"
+msgstr "Подтверждение пароля"
+
+msgid "Confirm"
+msgstr "Подтвердить"
+
msgid "Sign Up"
msgstr "Регистрация"
@@ -987,6 +1368,11 @@ msgstr "Фамилия"
msgid "Email"
msgstr "E-mail"
+msgid "Registered accounts are ready to use and need no further action."
+msgstr ""
+"Зарегистрированные аккаунты готовы к использованию и не требуют "
+"дальнейших действий."
+
msgid "Please wait for an administrator to activate your account."
msgstr ""
"Пожалуйста, подождите, пока администратор подтвердит Вашу регистрацию."
@@ -995,7 +1381,7 @@ msgid "Admin Journal"
msgstr "Журнал администратора"
msgid "journal filter..."
-msgstr "Фильтр журнала..."
+msgstr "фильтр..."
msgid "Filter"
msgstr "Отфильтровать"
@@ -1003,8 +1389,8 @@ msgstr "Отфильтровать"
msgid "%s Entry"
msgid_plural "%s Entries"
msgstr[0] "%s запись"
-msgstr[1] "%s записей"
-msgstr[2] "%s записи"
+msgstr[1] "%s записи"
+msgstr[2] "%s записей"
msgid "Action"
msgstr "Действие"
@@ -1013,7 +1399,7 @@ msgid "Date"
msgstr "Дата"
msgid "From IP"
-msgstr "С IP"
+msgstr "IP"
msgid "No actions yet"
msgstr "Действия ещё не производились"
@@ -1030,6 +1416,13 @@ msgstr "Плагины аутентификации"
msgid "Enabled Plugins"
msgstr "Включенные плагины"
+msgid ""
+"Comma-separated list of plugins; Kallithea will try user authentication "
+"in plugin order"
+msgstr ""
+"Список плагинов через запятую. Kallithea будет аутентифицировать "
+"пользователя в порядке указания плагинов"
+
msgid "Available built-in plugins"
msgstr "Доступные встроенные плагины"
@@ -1068,12 +1461,28 @@ msgstr "Включить меню скачивания на странице «Общие сведения»."
msgid "Edit Gist"
msgstr "Правка gist-записи"
+msgid ""
+"Gist was updated since you started editing. Copy your changes and click "
+"%(here)s to reload new version."
+msgstr ""
+"Gist был изменён с момента начала редактирования. Скопируйте свои правки "
+"и нажмите %(here)s для загрузки новой версии."
+
+msgid "here"
+msgstr "сюда"
+
msgid "Gist description ..."
msgstr "Описание..."
+msgid "Gist lifetime"
+msgstr "Время жизни gist`а"
+
msgid "Expires"
msgstr "Истекает"
+msgid "Never"
+msgstr "никогда"
+
msgid "Update Gist"
msgstr "Обновить"
@@ -1098,6 +1507,18 @@ msgstr "Создано"
msgid "There are no gists yet"
msgstr "Gist-записи отсутствуют"
+msgid "New Gist"
+msgstr "Новый gist"
+
+msgid "Name this gist ..."
+msgstr "Назовите этот gist…"
+
+msgid "Create Private Gist"
+msgstr "Создать приватный gist"
+
+msgid "Create Public Gist"
+msgstr "Создать публичный gist"
+
msgid "Reset"
msgstr "Сброс"
@@ -1107,6 +1528,12 @@ msgstr "Gist"
msgid "URL"
msgstr "URL"
+msgid "Public Gist"
+msgstr "Публичный Gist"
+
+msgid "Private Gist"
+msgstr "Приватный Gist"
+
msgid "Delete"
msgstr "Удалить"
@@ -1126,17 +1553,76 @@ msgid "Show as raw"
msgstr "Показать только текст"
msgid "My Account"
-msgstr "Мой Аккаунт"
+msgstr "Мой аккаунт"
msgid "Profile"
msgstr "Профиль"
+msgid "Email Addresses"
+msgstr "Новый E-mail"
+
+msgid "SSH Keys"
+msgstr "Ключи SSH"
+
msgid "API Keys"
msgstr "API-ключи"
+msgid "Owned Repositories"
+msgstr "Свои репозитории"
+
+msgid "Watched Repositories"
+msgstr "Наблюдаемые репозитории"
+
+msgid "Show Permissions"
+msgstr "Права доступа"
+
+msgid "Built-in"
+msgstr "Встроенный"
+
+msgid "Confirm to reset this API key: %s"
+msgstr "Подтвердите сброс этого API-ключа: %s"
+
+msgid "Expired"
+msgstr "Срок действия истёк"
+
+msgid "Confirm to remove this API key: %s"
+msgstr "Подтвердите удаление этого API-ключа: %s"
+
+msgid "Remove"
+msgstr "Удалить"
+
+msgid "No additional API keys specified"
+msgstr "Дополнительные API-ключи не указаны"
+
+msgid "New API key"
+msgstr "Новый API-ключ"
+
msgid "Add"
msgstr "Добавить"
+msgid ""
+"\n"
+"API keys are used to let scripts or services access %s using your\n"
+"account, as if you had provided the script or service with your actual\n"
+"password.\n"
+msgstr ""
+"\n"
+"Ключи API позволяют скриптам или сервисам получать \n"
+"доступ к %s от имени вашего аккаунта, как если бы вы \n"
+"указали в скрипте или сервисе свой реальный пароль.\n"
+
+msgid ""
+"\n"
+"Like passwords, API keys should therefore never be shared with others,\n"
+"nor passed to untrusted scripts or services. If such sharing should\n"
+"happen anyway, reset the API key on this page to prevent further use.\n"
+msgstr ""
+"\n"
+"Как и пароли, ключи API не следует передавать третьим лицам,\n"
+"ненадёжным скриптам и сервисам. Если это всё же произошло, \n"
+"сбросьте ключ на этой странице, чтобы предотвратить\n"
+"его дальнейшее использование.\n"
+
msgid "Primary"
msgstr "Основной"
@@ -1161,20 +1647,57 @@ msgstr "Новый пароль"
msgid "Confirm new password"
msgstr "Подтвердите новый пароль"
+msgid ""
+"This account is managed with %s and the password cannot be changed here"
+msgstr "Этим аккаунтом управляет %s, поэтому здесь нельзя сменить пароль"
+
+msgid "Current IP"
+msgstr "Текущий IP-адрес"
+
+msgid "Gravatar"
+msgstr "Grаvatar"
+
+msgid "Change %s avatar at"
+msgstr "Измените аватар %s на"
+
msgid "Avatars are disabled"
msgstr "Аватары отключены"
msgid "Repositories You Own"
-msgstr "Репозитории, где Вы — владелец"
+msgstr "Ваши репозитории"
msgid "Name"
msgstr "Имя"
+msgid "Fingerprint"
+msgstr "Отпечаток"
+
+msgid "Last Used"
+msgstr "Использовался в предыдущий раз"
+
+msgid "Confirm to remove this SSH key: %s"
+msgstr "Подтвердите удаление этого ключа SSH: %s"
+
+msgid "No SSH keys have been added"
+msgstr "Ключи SSH не были добавлены"
+
+msgid "New SSH key"
+msgstr "Новый ключ SSH"
+
+msgid "Public key"
+msgstr "Публичный ключ"
+
+msgid "Public key (contents of e.g. ~/.ssh/id_rsa.pub)"
+msgstr "Публичный ключ (например, из файла ~/.ssh/id_rsa.pub)"
+
msgid "Repositories You are Watching"
-msgstr "Репозитории, за которыми Вы наблюдаете"
+msgstr "Репозитории, за которыми вы наблюдаете"
msgid "Default Permissions"
-msgstr "Стандартные привилегии"
+msgstr "Права по умолчанию"
+
+msgid "Global"
+msgstr "Глобальные"
msgid "IP Whitelist"
msgstr "Белый список IP"
@@ -1182,6 +1705,16 @@ msgstr "Белый список IP"
msgid "Anonymous access"
msgstr "Анонимный доступ"
+msgid "Allow anonymous access"
+msgstr "Разрешить анонимный доступ"
+
+msgid ""
+"Allow access to Kallithea without needing to log in. Anonymous users use "
+"%s user permissions."
+msgstr ""
+"Разрешить доступ к Kallithea без авторизации. Анонимные пользователи "
+"будут использовать права доступа пользователя %s."
+
msgid ""
"All default permissions on each repository will be reset to chosen "
"permission, note that all custom default permission on repositories will "
@@ -1191,6 +1724,12 @@ msgstr ""
"репозитория. Учтите, что ранее установленные привилегии по умолчанию "
"будут сброшены"
+msgid "Apply to all existing repositories"
+msgstr "Применить ко всем репозиториям"
+
+msgid "Permissions for the Default user on new repositories."
+msgstr "Права пользователя по умолчанию для новых репозиториев."
+
msgid "Repository group"
msgstr "Группа репозиториев"
@@ -1203,21 +1742,82 @@ msgstr ""
"репозиториев. Учтите, что ранее установленные привилегии по умолчанию для "
"групп репозиториев будут сброшены"
+msgid "Apply to all existing repository groups"
+msgstr "Применить ко всем группам репозиториев"
+
+msgid "Permissions for the Default user on new repository groups."
+msgstr "Права пользователя по умолчанию для новых групп репозиториев."
+
msgid "User group"
msgstr "Группа пользователей"
+msgid ""
+"All default permissions on each user group will be reset to chosen "
+"permission, note that all custom default permission on user groups will "
+"be lost"
+msgstr ""
+"Выбранные привилегии будут установлены по умолчанию для каждой группы "
+"пользователей. Учтите, что ранее установленные привилегии по умолчанию "
+"для групп пользователей будут сброшены"
+
+msgid "Apply to all existing user groups"
+msgstr "Применить ко всем группам пользователей"
+
+msgid "Permissions for the Default user on new user groups."
+msgstr "Права пользователя по умолчанию для новых групп пользователей."
+
+msgid "Top level repository creation"
+msgstr "Создание репозитория верхнего уровня"
+
+msgid ""
+"Enable this to allow non-admins to create repositories at the top level."
+msgstr ""
+"Включите, чтобы разрешить всем пользователям создавать репозитории на "
+"верхнем уровне."
+
+msgid ""
+"Note: This will also give all users API access to create repositories "
+"everywhere. That might change in future versions."
+msgstr ""
+"Внимание: это также позволит всем пользователям с помощью API создавать "
+"репозитории где угодно. Это может измениться в будущих версиях."
+
+msgid "Repository creation with group write access"
+msgstr "Создание репозитория с правом записи в группы"
+
+msgid ""
+"With this, write permission to a repository group allows creating "
+"repositories inside that group. Without this, group write permissions "
+"mean nothing."
+msgstr ""
+"С этой опцией, право записи в группу репозиториев позволяет создавать "
+"репозитории в этой группе. Без неё, право записи в группу не имеет "
+"действия."
+
msgid "User group creation"
msgstr "Создание групп пользователей"
+msgid "Enable this to allow non-admins to create user groups."
+msgstr ""
+"Включите для возможности создавать группы пользователей любым "
+"пользователям."
+
msgid "Repository forking"
msgstr "Создание форка репозитория"
+msgid "Enable this to allow non-admins to fork repositories."
+msgstr ""
+"Включите для возможности создавать форки репозиториев любым пользователем."
+
msgid "Registration"
msgstr "Регистрация"
msgid "External auth account activation"
msgstr "Активация сторонней учетной записи"
+msgid "Confirm to delete this IP address: %s"
+msgstr "Подтвердите удаление IP-адреса: %s"
+
msgid "All IP addresses are allowed."
msgstr "Все IP-адреса разрешены."
@@ -1233,6 +1833,12 @@ msgstr "Имя группы"
msgid "Group parent"
msgstr "Родительская группа"
+msgid "Copy parent group permissions"
+msgstr "Скопировать родительские права доступа"
+
+msgid "Copy permission set from parent repository group."
+msgstr "Скопировать набор прав доступа из родительской группы репозиториев."
+
msgid "%s Repository Group Settings"
msgstr "Настройки группы репозиториев %s"
@@ -1243,14 +1849,23 @@ msgid "Settings"
msgstr "Настройки"
msgid "Advanced"
-msgstr "Дополнительно"
+msgstr "Продвинутые"
msgid "Permissions"
-msgstr "Привилегии"
+msgstr "Права доступа"
msgid "Repository Group: %s"
msgstr "Группа репозиториев: %s"
+msgid "Top level repositories"
+msgstr "Репозитории верхнего уровня"
+
+msgid "Total repositories"
+msgstr "Всего репозиториев"
+
+msgid "Children groups"
+msgstr "Дочерние группы"
+
msgid "Created on"
msgstr "Создано"
@@ -1261,23 +1876,70 @@ msgstr[1] "Подтвердите удаление группы %s, содержащей %s репозитория"
msgstr[2] "Подтвердите удаление группы %s, содержащей %s репозиториев"
msgid "Delete this repository group"
-msgstr "Удалить эту группу репозиториев"
+msgstr "Удалить группу репозиториев"
+
+msgid "Not visible"
+msgstr "Невидимый"
+
+msgid "Visible"
+msgstr "Видимый"
+
+msgid "Add repos"
+msgstr "Добавлять репозитории"
+
+msgid "Add/Edit groups"
+msgstr "Добавлять/Редактировать группы"
+
+msgid "User/User Group"
+msgstr "Пользователь/Группа"
+
+msgid "Default"
+msgstr "По умолчанию"
+
+msgid "Revoke"
+msgstr "Отозвать"
msgid "Add new"
msgstr "Добавить новый"
+msgid "Apply to children"
+msgstr "Применить к дочерним"
+
+msgid "Both"
+msgstr "Все"
+
+msgid ""
+"Set or revoke permission to all children of that group, including non-"
+"private repositories and other groups if selected."
+msgstr ""
+"Установить или отозвать права всех дочерних элементов этой группы, "
+"включая публичные репозитории и другие группы, если они выбраны."
+
msgid "Remove this group"
-msgstr "Удалить эту группу"
+msgstr "Удалить группу"
msgid "Confirm to delete this group"
msgstr "Подтвердите удаление этой группы пользователей"
+msgid "Repository group %s"
+msgstr "Группа репозиториев %s"
+
msgid "Repository Groups Administration"
msgstr "Администрирование групп репозиториев"
msgid "Number of Top-level Repositories"
msgstr "Число репозиториев верхнего уровня"
+msgid "Clone remote repository"
+msgstr "Клонировать удалённый репозиторий"
+
+msgid ""
+"Optional: URL of a remote repository. If set, the repository will be "
+"created as a clone from this URL."
+msgstr ""
+"Опционально: URL удалённого репозитория. Если параметр задан, то будет "
+"создан клон репозитория, расположенного по этому адресу."
+
msgid ""
"Keep it short and to the point. Use a README file for longer descriptions."
msgstr ""
@@ -1292,17 +1954,44 @@ msgstr "Тип создаваемого репозитория."
msgid "Landing revision"
msgstr "Ревизия для выгрузки"
+msgid ""
+"Default revision for files page, downloads, full text search index and "
+"readme generation"
+msgstr ""
+"Ревизия по умолчанию для страницы файлов, загрузки, полнотекстовый "
+"поисковый индекс и генерация readme"
+
msgid "%s Creating Repository"
msgstr "Создание репозитория %s"
+msgid "Creating repository"
+msgstr "Создание репозитория"
+
+msgid ""
+"Repository \"%(repo_name)s\" is being created, you will be redirected "
+"when this process is finished.repo_name"
+msgstr ""
+"Репозиторий «%(repo_name)s» создаётся. Вы будете перенаправлены, когда "
+"процесс завершится."
+
+msgid ""
+"We're sorry but error occurred during this operation. Please check your "
+"Kallithea server logs, or contact administrator."
+msgstr ""
+"К сожалению, во время данной операции произошла ошибка. Пожалуйста, "
+"проверьте журнал сервера Kallithea или свяжитесь с администратором."
+
msgid "%s Repository Settings"
msgstr "Настройки репозитория %s"
msgid "Extra Fields"
msgstr "Дополнительные поля"
+msgid "Caches"
+msgstr "Кэш"
+
msgid "Remote"
-msgstr "Удалённый"
+msgstr "Удалённый репозиторий"
msgid "Statistics"
msgstr "Статистика"
@@ -1314,7 +2003,7 @@ msgid "Set"
msgstr "Набор"
msgid "Manually set this repository as a fork of another from the list."
-msgstr "Вручную сделать этот репозиторий форком выбранного из списка."
+msgstr "Вручную задать этот репозиторий форком репозитория из этого списка."
msgid "Public Journal Visibility"
msgstr "Доступ к публичному журналу"
@@ -1340,15 +2029,24 @@ msgstr "Удалить этот репозиторий"
msgid "This repository has %s fork"
msgid_plural "This repository has %s forks"
-msgstr[0] "Данный репозиторий имеет %s копию"
-msgstr[1] "Данный репозиторий имеет %s копии"
-msgstr[2] "Данный репозиторий имеет %s копий"
+msgstr[0] "Данный репозиторий имеет %s форк"
+msgstr[1] "Данный репозиторий имеет %s форка"
+msgstr[2] "Данный репозиторий имеет %s форков"
msgid "Detach forks"
-msgstr "Отсоединить fork'и"
+msgstr "Отделить форки"
msgid "Delete forks"
-msgstr "Удалить fork'и"
+msgstr "Удалить форки"
+
+msgid ""
+"The deleted repository will be moved away and hidden until the "
+"administrator expires it. The administrator can both permanently delete "
+"it or restore it."
+msgstr ""
+"Удаляемый репозиторий будет перемещён и скрыт на срок, определяемый "
+"администратором. Администратор может либо удалить, либо восстановить "
+"репозиторий."
msgid "Invalidate Repository Cache"
msgstr "Сбросить кэш репозитория"
@@ -1371,6 +2069,9 @@ msgstr "Ключ"
msgid "Active"
msgstr "Активный"
+msgid "Label"
+msgstr "Имя"
+
msgid "Confirm to delete this field: %s"
msgstr "Подтвердите удаление этого поля: %s"
@@ -1392,14 +2093,72 @@ msgstr "Введите описание поля"
msgid "Extra fields are disabled."
msgstr "Дополнительные поля отключены."
+msgid "Private Repository"
+msgstr "Приватный репозиторий"
+
+msgid "Fork of repository"
+msgstr "Форк репозитория"
+
+msgid "Remote repository URL"
+msgstr "Ссылка на удалённый репозиторий"
+
+msgid "Pull Changes from Remote Repository"
+msgstr "Изменения из удалённого репозитория"
+
+msgid "Confirm to pull changes from remote repository."
+msgstr "Подтвердите применение изменений из удалённого репозитория."
+
+msgid "This repository does not have a remote repository URL."
+msgstr "Данный репозиторий не имеет URL удалённого репозитория."
+
+msgid "Permanent URL"
+msgstr "Постоянный URL"
+
+msgid ""
+"In case this repository is renamed or moved into another group the "
+"repository URL changes.\n"
+" Using the above permanent URL guarantees "
+"that this repository always will be accessible on that URL.\n"
+" This is useful for CI systems, or any "
+"other cases that you need to hardcode the URL into a 3rd party service."
+msgstr ""
+"В случае, когда репозиторий переименовывается или перемещается в другую "
+"группу, URL репозитория изменяется.\n"
+" Использование постоянного URL гарантирует, "
+"что данный репозиторий всегда будет доступен по этому URL.\n"
+" Это может быть полезно в CI-системах, или "
+"в любом другом случае, требующем встраивания URL в код ПО."
+
+msgid "Remote repository"
+msgstr "Удалённый репозиторий"
+
+msgid "Repository URL"
+msgstr "URL репозитория"
+
+msgid ""
+"Optional: URL of a remote repository. If set, the repository can be "
+"pulled from this URL."
+msgstr ""
+"Опционально: URL удалённого репозитория. Если задан, то репозиторий можно "
+"получить по заданному адресу."
+
msgid "Default revision for files page, downloads, whoosh and readme"
msgstr ""
"Ревизия по умолчанию, из которой будет производиться выгрузка файлов при "
"скачивании"
+msgid "Type name of user"
+msgstr "Введите имя пользователя"
+
msgid "Change owner of this repository."
msgstr "Изменить владельца репозитория."
+msgid "Processed commits"
+msgstr "Обработанные фиксации"
+
+msgid "Processed progress"
+msgstr "Обработанный прогресс"
+
msgid "Reset Statistics"
msgstr "Сброс статистики"
@@ -1409,48 +2168,185 @@ msgstr "Подтвердите сброс статистики."
msgid "Repositories Administration"
msgstr "Администрирование репозиториев"
+msgid "State"
+msgstr "Состояние"
+
msgid "Settings Administration"
msgstr "Администрирование настроек"
+msgid "VCS"
+msgstr "Контроль версий"
+
+msgid "Remap and Rescan"
+msgstr "Пересканирование"
+
+msgid "Visual"
+msgstr "Вид"
+
msgid "Hooks"
msgstr "Хуки"
+msgid "Full Text Search"
+msgstr "Полнотекстовый поиск"
+
+msgid "System Info"
+msgstr "О системе"
+
+msgid "Send test email to"
+msgstr "Отправлять пробное сообщение на адрес"
+
msgid "Send"
msgstr "Отправить"
msgid "Site branding"
-msgstr "Брендинг сайта"
+msgstr "Заголовок сайта"
+
+msgid "Set a custom title for your Kallithea Service."
+msgstr "Задать другое имя для Kallithea Service."
msgid "HTTP authentication realm"
msgstr "Приветствие для HTTP-аутентификации"
+msgid "HTML/JavaScript/CSS customization block"
+msgstr "Блок редактирования HTML/JavaScript/CSS"
+
+msgid ""
+"HTML (possibly with JavaScript and/or CSS) that "
+"will be added to the bottom of every page. This "
+"can be used for web analytics systems, but also "
+"to perform instance-specific customizations like "
+"adding a project banner at the top of every page."
+msgstr ""
+"Код HTML (можно с JavaScript и/или CSS), который "
+"будет добавлен внизу каждой страницы. Может "
+"использоваться для размещения веб-аналитики, но "
+"также и для создания индивидуальных "
+"модификаций, например, для размещения баннера "
+"проекта на каждой странице."
+
+msgid "ReCaptcha public key"
+msgstr "Открытый ключ reCaptcha"
+
+msgid "Public key for reCaptcha system."
+msgstr "Открытый ключ системы reCaptcha."
+
+msgid "ReCaptcha private key"
+msgstr "Закрытый ключ reCaptcha"
+
+msgid ""
+"Private key for reCaptcha system. Setting this value will enable captcha "
+"on registration."
+msgstr ""
+"Закрытый ключ системы reCaptcha. Задание этого значения включит капчу при "
+"регистрации."
+
msgid "Save Settings"
msgstr "Сохранить настройки"
+msgid "Built-in Mercurial Hooks (Read-Only)"
+msgstr "Встроенные хуки Mercurial (только чтение)"
+
msgid "Custom Hooks"
msgstr "Пользовательские хуки"
+msgid ""
+"Hooks can be used to trigger actions on certain events such as push / "
+"pull. They can trigger Python functions or external applications."
+msgstr ""
+"Хуки используются для активации действий при определённых событиях, "
+"например, push/pull-запросах. Могут активироваться функции Python либо "
+"внешние приложения."
+
msgid "Failed to remove hook"
msgstr "Не удалось удалить хук"
+msgid "Rescan options"
+msgstr "Опции пересканирования"
+
+msgid "Delete records of missing repositories"
+msgstr "Удалить записи об отсутствующих репозиториях"
+
+msgid ""
+"Check this option to remove all comments, pull requests and other records "
+"related to repositories that no longer exist in the filesystem."
+msgstr ""
+"Отметьте для удаления всех комментариев, pull-запросов и других записей, "
+"связанных с репозиториями, которые больше не существуют в файловой "
+"системе."
+
msgid "Invalidate cache for all repositories"
msgstr "Сбросить кэш для всех репозиториев"
msgid "Check this to reload data and clear cache keys for all repositories."
-msgstr "Сбросить кэш для всех репозиториев."
+msgstr ""
+"Отметьте, чтобы перезагрузить данные и очистить ключи кэша у всех "
+"репозиториев."
+
+msgid "Install Git hooks"
+msgstr "Установить хуки Git"
+
+msgid ""
+"Verify if Kallithea's Git hooks are installed for each repository. "
+"Current hooks will be updated to the latest version."
+msgstr ""
+"Проверяет установку Git хуков от Kallithea у каждого репозитория. Текущие "
+"хуки будут обновлены до последней версии."
+
+msgid "Overwrite existing Git hooks"
+msgstr "Перезаписать существующие хуки"
+
+msgid ""
+"If installing Git hooks, overwrite any existing hooks, even if they do "
+"not seem to come from Kallithea. WARNING: This operation will destroy any "
+"custom git hooks you may have deployed by hand!"
+msgstr ""
+"Перезаписывает все существующие хуки при установке хуков Git, даже если "
+"они не поставляются с Kallithea. ПРЕДУПРЕЖДЕНИЕ: это действие уничтожит "
+"любые Git хуки, которые могли быть созданы вручную!"
+
+msgid "Rescan Repositories"
+msgstr "Пересканировать репозитории"
msgid "Index build option"
msgstr "Опции создания индекса"
msgid "Build from scratch"
-msgstr "Сборка с нуля"
+msgstr "Пересобрать"
+
+msgid ""
+"This option completely reindexeses all of the repositories for proper "
+"fulltext search capabilities."
+msgstr ""
+"Эта опция полностью переиндексирует все репозитории для корректной работы "
+"полнотекстового поиска."
msgid "Reindex"
msgstr "Перестроить индекс"
+msgid "Checking for updates..."
+msgstr "Поиск обновлений..."
+
+msgid "Kallithea version"
+msgstr "Версия Kallithea"
+
+msgid "Kallithea configuration file"
+msgstr "Конфиг. Kallithea"
+
+msgid "Python version"
+msgstr "Версия Python"
+
+msgid "Platform"
+msgstr "Платформа"
+
msgid "Git version"
msgstr "Версия Git"
+msgid "Git path"
+msgstr "Путь к Git"
+
+msgid "Python Packages"
+msgstr "Пакеты Python"
+
msgid "Show repository size after push"
msgstr "Показывать размер репозитория после отправки"
@@ -1466,6 +2362,13 @@ msgstr "Включить поддержку больших файлов"
msgid "Enable hgsubversion extension"
msgstr "Включить поддержку hgsubversion"
+msgid ""
+"Requires hgsubversion library to be installed. Enables cloning of remote "
+"Subversion repositories while converting them to Mercurial."
+msgstr ""
+"Требует наличия библиотеки hgsubversion. Включает клонирование удалённых "
+"репозиториев Subversion с последующим конвертированием в Mercurial."
+
msgid "Location of repositories"
msgstr "Местонахождение репозиториев"
@@ -1476,6 +2379,13 @@ msgstr ""
"Нажмите для разблокирования. Изменения вступят в силу после перезагрузки "
"Kallithea."
+msgid ""
+"Filesystem location where repositories are stored. After changing this "
+"value, a restart and rescan of the repository folder are both required."
+msgstr ""
+"Путь к репозиториям в файловой системе. После изменения значения "
+"требуется перезапуск и пересканирование папки с репозиториями."
+
msgid "General"
msgstr "Главное"
@@ -1488,6 +2398,116 @@ msgstr "Позволяет хранить дополнительные поля в репозиториях."
msgid "Show Kallithea version"
msgstr "Отображать версию Kallithea"
+msgid ""
+"Shows or hides a version number of Kallithea displayed in the footer."
+msgstr "Показывает или скрывает версию Kallithea внизу страницы."
+
+msgid "Show user Gravatars"
+msgstr "Отображать Gravatars пользователя"
+
+msgid ""
+"Gravatar URL allows you to use another avatar server application.\n"
+" The following "
+"variables of the URL will be replaced accordingly.\n"
+" {scheme} "
+"'http' or 'https' sent from running Kallithea server,\n"
+" {email} user "
+"email,\n"
+" {md5email} md5 "
+"hash of the user email (like at gravatar.com),\n"
+" {size} size "
+"of the image that is expected from the server application,\n"
+" {netloc} "
+"network location/server host of running Kallithea server"
+msgstr ""
+"Поле Gravatar URL позволяет использовать любой другой сервис аватаров.\n"
+" В URL можно "
+"использовать следующие переменные:\n"
+" {scheme} "
+"используемый протокол, 'http' или 'https',\n"
+" {email} e-"
+"mail пользователя,\n"
+" {md5email} хэш "
+"md5 адреса почты пользователя (как на gravatar.com),\n"
+" {size} "
+"ожидаемый размер изображения,\n"
+" {netloc} "
+"сетевой путь/адрес хоста сервера Kallithea"
+
+msgid "HTTP Clone URL"
+msgstr "Ссылка для клонирования по HTTP"
+
+msgid ""
+"Schema of clone URL construction eg. '{scheme}://{user}@{netloc}/"
+"{repo}'.\n"
+" The following "
+"variables are available:\n"
+" {scheme} 'http' or "
+"'https' sent from running Kallithea server,\n"
+" {user} current user "
+"username,\n"
+" {netloc} network "
+"location/server host of running Kallithea server,\n"
+" {repo} full "
+"repository name,\n"
+" {repoid} ID of "
+"repository, can be used to construct clone-by-id,\n"
+" {system_user} name "
+"of the Kallithea system user,\n"
+" {hostname} server "
+"hostname\n"
+" "
+msgstr ""
+"Схема URL для клонирования, например: '{scheme}://{user}@{netloc}/"
+"{repo}'.\n"
+" Доступны следующие "
+"переменные:\n"
+" {scheme} используемый "
+"протокол, 'http' or 'https',\n"
+" {user} имя текущего "
+"пользователя,\n"
+" {netloc} сетевой путь/"
+"адрес хоста сервера Kallithea,\n"
+" {repo} полное имя "
+"репозитория,\n"
+" {repoid} ID "
+"репозитория, может применяться для клонирования по идентификатору,\n"
+" {system_user} имя "
+"пользователя Kallithea в системе,\n"
+" {hostname} имя хоста "
+"севера\n"
+" "
+
+msgid "SSH Clone URL"
+msgstr "Ссылка для клонирования по SSH"
+
+msgid ""
+"Schema for constructing SSH clone URL, eg. 'ssh://{system_user}"
+"@{hostname}/{repo}'."
+msgstr ""
+"Схема URL для клонирования по SSH, например: 'ssh://{system_user}"
+"@{hostname}/{repo}'."
+
+msgid "Repository page size"
+msgstr "Размер страницы репозитория"
+
+msgid ""
+"Number of items displayed in the repository pages before pagination is "
+"shown."
+msgstr ""
+"Количество элементов на странице репозитория до появления нумерации "
+"страниц."
+
+msgid "Admin page size"
+msgstr "Размер страницы администратора"
+
+msgid ""
+"Number of items displayed in the admin pages grids before pagination is "
+"shown."
+msgstr ""
+"Количество элементов в сетке страницы администратора до появления "
+"нумерации страниц."
+
msgid "Icons"
msgstr "Иконки"
@@ -1500,15 +2520,49 @@ msgstr "Показывать иконки приватных репозиториев"
msgid "Show public/private icons next to repository names."
msgstr "Показывать иконки публичных репозиториев."
+msgid "Meta Tagging"
+msgstr "Метатегирование"
+
+msgid ""
+"Parses meta tags from the repository description field and turns them "
+"into colored tags."
+msgstr ""
+"Анализирует мета-теги в поле описания репозитория и отображает их в виде "
+"цветных тегов."
+
+msgid "Stylify recognised meta tags:"
+msgstr "Стилизовать обнаруженные мета-теги:"
+
msgid "Add user group"
msgstr "Добавить группу пользователей"
+msgid "User Groups"
+msgstr "Группы пользователей"
+
+msgid "Add User Group"
+msgstr "Добавить группу пользователей"
+
+msgid "Short, optional description for this user group."
+msgstr "Краткое, опциональное описание этой группы."
+
+msgid "%s user group settings"
+msgstr "Настройки группы %s"
+
+msgid "Show Members"
+msgstr "Участники"
+
+msgid "User Group: %s"
+msgstr "Группа пользователей: %s"
+
msgid "Members"
msgstr "Участники"
msgid "Confirm to delete this user group: %s"
msgstr "Подтвердите удаление следующей группы пользователей: %s"
+msgid "Delete this user group"
+msgstr "Удалить группу"
+
msgid "No members yet"
msgstr "Нет участников"
@@ -1527,21 +2581,57 @@ msgstr "Добавить пользователя"
msgid "Users"
msgstr "Пользователи"
+msgid "Add User"
+msgstr "Добавить пользователя"
+
msgid "Password confirmation"
msgstr "Подтверждение пароля"
+msgid "%s user settings"
+msgstr "Настройки пользователя %s"
+
+msgid "Emails"
+msgstr "Электронная почта"
+
+msgid "User: %s"
+msgstr "Пользователь: %s"
+
+msgid "Source of Record"
+msgstr "Источник записи"
+
+msgid "Last Login"
+msgstr "Последний вход"
+
+msgid "Member of User Groups"
+msgstr "Член группы"
+
msgid "Confirm to delete this user: %s"
msgstr "Подтвердите удаление пользователя %s"
+msgid "Delete this user"
+msgstr "Удалить пользователя"
+
+msgid "Inherited from %s"
+msgstr "Унаследовано от %s"
+
+msgid "Name in Source of Record"
+msgstr "Имя в источнике записи"
+
msgid "New password confirmation"
msgstr "Подтвердите новый пароль"
msgid "Users Administration"
msgstr "Администрирование пользователей"
+msgid "Auth Type"
+msgstr "Тип авторизации"
+
msgid "Server instance: %s"
msgstr "Экземпляр сервера: %s"
+msgid "Support"
+msgstr "Поддержка"
+
msgid "Mercurial repository"
msgstr "Репозиторий Mercurial"
@@ -1570,7 +2660,7 @@ msgid "Options"
msgstr "Опции"
msgid "Compare Fork"
-msgstr "Сравнить форк"
+msgstr "Сравнить форки"
msgid "Compare"
msgstr "Сравнить"
@@ -1579,20 +2669,23 @@ msgid "Search"
msgstr "Поиск"
msgid "Follow"
-msgstr "Наблюдать"
+msgstr "Подписаться"
msgid "Unfollow"
-msgstr "Не наблюдать"
+msgstr "Отписаться"
msgid "Fork"
msgstr "Форк"
msgid "Create Pull Request"
-msgstr "Создать Pull запрос"
+msgstr "Создать pull-запрос"
msgid "Switch To"
msgstr "Переключиться на"
+msgid "No matches found"
+msgstr "Совпадений не найдено"
+
msgid "Show recent activity"
msgstr "Показать последнюю активность"
@@ -1618,7 +2711,7 @@ msgid "Search in repositories"
msgstr "Поиск по репозиториям"
msgid "My Pull Requests"
-msgstr "Мои Pull-запросы"
+msgstr "Мои pull-запросы"
msgid "Not Logged In"
msgstr "Не авторизован"
@@ -1626,9 +2719,21 @@ msgstr "Не авторизован"
msgid "Login to Your Account"
msgstr "Авторизоваться"
+msgid "Forgot password?"
+msgstr "Забыли пароль?"
+
+msgid "Don't have an account?"
+msgstr "Нет аккаунта?"
+
msgid "Log Out"
msgstr "Выход"
+msgid "Parent rev."
+msgstr "Ревизия предка"
+
+msgid "Child rev."
+msgstr "Ревизия потомка"
+
msgid "Create repositories"
msgstr "Создать репозитории"
@@ -1642,11 +2747,11 @@ msgid "Select this option to allow user
msgstr "Опция разрешает пользователю создавать группы пользователей"
msgid "Fork repositories"
-msgstr "Создавать fork от репозиториев"
+msgstr "Создавать форки"
msgid "Select this option to allow repository forking for this user"
msgstr ""
-"Выберите эту опцию чтобы разрешить данному пользователю создавать fork'и "
+"Выберите, чтобы разрешить данному пользователю создавать форки "
"репозиториев"
msgid "Show"
@@ -1661,17 +2766,26 @@ msgstr "Привилегия"
msgid "Edit Permission"
msgstr "Изменить привилегии"
+msgid "No permission defined"
+msgstr "Права не заданы"
+
+msgid "Retry"
+msgstr "Повторить"
+
msgid "Submitting ..."
msgstr "Применение..."
+msgid "Unable to post"
+msgstr "Не удалось отправить"
+
msgid "Add Another Comment"
msgstr "Добавить ещё один комментарий"
msgid "Stop following this repository"
-msgstr "Отменить наблюдение за репозиторием"
+msgstr "Отписаться от этого репозитория"
msgid "Start following this repository"
-msgstr "Наблюдать за репозиторием"
+msgstr "Подписаться к этому репозиторию"
msgid "Group"
msgstr "Группа"
@@ -1688,6 +2802,15 @@ msgstr "Поиск усечен"
msgid "No matching files"
msgstr "Нет совпадений"
+msgid "Open New Pull Request from {0}"
+msgstr "Открыть новый pull-запрос от {0}"
+
+msgid "Open New Pull Request for {0} → {1}"
+msgstr "Открыть новый pull-запрос для {0} → {1}"
+
+msgid "Show Selected Changesets {0} → {1}"
+msgstr "Показать выбранные наборы изменений: {0} → {1}"
+
msgid "Selection Link"
msgstr "Ссылка выбора"
@@ -1697,6 +2820,13 @@ msgstr "Свернуть сравнение"
msgid "Expand Diff"
msgstr "Раскрыть сравнение"
+msgid "No revisions"
+msgstr "Нет ревизий"
+
+msgid "Type name of user or member to grant permission"
+msgstr ""
+"Введите имя пользователя или члена группы для предоставления доступа"
+
msgid "Failed to revoke permission"
msgstr "Не удалось отозвать привилегии"
@@ -1737,7 +2867,7 @@ msgid "Clear selection"
msgstr "Очистить выбор"
msgid "Go to tip of repository"
-msgstr "Перейти на верхушку репозитория"
+msgstr "Перейти к началу репозитория"
msgid "Compare fork with %s"
msgstr "Сравнить fork с %s"
@@ -1763,6 +2893,31 @@ msgstr "Добавлено"
msgid "Affected %s files"
msgstr "Затрагивает %s файлов"
+msgid "First (oldest) changeset in this list"
+msgstr "Первый (самый старый) набор изменений в списке"
+
+msgid "Last (most recent) changeset in this list"
+msgstr "Последний (самый свежий) набор изменений в списке"
+
+msgid "Position in this list of changesets"
+msgstr "Позиция в списке наборов изменений"
+
+msgid ""
+"Changeset status: %s by %s\n"
+"Click to open associated pull request %s"
+msgstr ""
+"Статус набора изменений: %s от %s\n"
+"Кликните, чтобы открыть соответствующий pull-запрос %s"
+
+msgid "Changeset status: %s by %s"
+msgstr "Статус набора изменений: %s от %s"
+
+msgid "Expand commit message"
+msgstr "Развернуть сообщение фиксации"
+
+msgid "%s comments"
+msgstr "%s комментариев"
+
msgid "Bookmark %s"
msgstr "Закладка %s"
@@ -1776,7 +2931,7 @@ msgid "%s Changeset"
msgstr "%s Изменения"
msgid "Changeset status"
-msgstr "Статут изменений"
+msgstr "Статус изменений"
msgid "Raw diff"
msgstr "Отобразить в формате diff"
@@ -1787,9 +2942,21 @@ msgstr "Применить разностное исправление (Patch diff)"
msgid "Download diff"
msgstr "Скачать diff"
+msgid "Merge"
+msgstr "Слить"
+
msgid "Grafted from:"
msgstr "Перенесено из:"
+msgid "Transplanted from:"
+msgstr "Трансплантировано из:"
+
+msgid "Replaced by:"
+msgstr "Заменено:"
+
+msgid "Preceded by:"
+msgstr "Предшествует:"
+
msgid "%s file changed"
msgid_plural "%s files changed"
msgstr[0] "%s файл изменён"
@@ -1805,18 +2972,41 @@ msgstr[2] "%s файлов изменёно: %s добавлений, %s удалений"
msgid "Show full diff anyway"
msgstr "Показать полный diff"
+msgid "comment"
+msgstr "комментарий"
+
+msgid "on pull request"
+msgstr "в pull-запросе"
+
msgid "No title"
msgstr "Нет заголовка"
+msgid "on this changeset"
+msgstr "в этом наборе изменений"
+
msgid "Delete comment?"
msgstr "Удалить комментарий?"
+msgid "Status change"
+msgstr "Изменение статуса"
+
+msgid "Comments are in plain text. Use @username to notify another user."
+msgstr ""
+"Используйте @имя_пользователя в тексте, чтобы отправить оповещение "
+"указанному пользователю."
+
msgid "Set changeset status"
msgstr "Изменить статус ревизии"
+msgid "Vote for pull request status"
+msgstr "Голосовать за статус pull-запроса"
+
msgid "No change"
msgstr "Без изменений"
+msgid "Finish pull request"
+msgstr "Завершить pull-запрос"
+
msgid "Close"
msgstr "Закрыть"
@@ -1844,6 +3034,12 @@ msgstr[0] "%d к строке"
msgstr[1] "%d к строкам"
msgstr[2] "%d к строкам"
+msgid "%d general"
+msgid_plural "%d general"
+msgstr[0] "%d общий"
+msgstr[1] "%d общих"
+msgstr[2] "%d общих"
+
msgid "%s Changesets"
msgstr "%s Изменения"
@@ -1853,12 +3049,30 @@ msgstr "Статус набора изменений: %s"
msgid "Files affected"
msgstr "Затронутые файлы"
+msgid "No file before"
+msgstr "Нет предшествующего файла"
+
+msgid "File before"
+msgstr "Файл до"
+
+msgid "Modified"
+msgstr "Модифицирован"
+
msgid "Deleted"
msgstr "Удалён"
msgid "Renamed"
msgstr "Переименован"
+msgid "Unknown operation: %r"
+msgstr "Неизвестная операция: %r"
+
+msgid "No file after"
+msgstr "Нет последующего файла"
+
+msgid "File after"
+msgstr "Файл после"
+
msgid "Show full diff for this file"
msgstr "Показать полный diff для этого файла"
@@ -1866,11 +3080,24 @@ msgid "Show full side-by-side diff for t
msgstr "Показать полный diff для этого файла"
msgid "Show inline comments"
-msgstr "Показать комментарии в строках"
+msgstr "Показать комментарии к строкам"
msgid "No changesets"
msgstr "Нет изменений"
+msgid "Criss cross merge situation with multiple merge ancestors detected!"
+msgstr "Обнаружено перекрёстное слияние с различными предками!"
+
+msgid ""
+"Please merge the target branch to your branch before creating a pull "
+"request."
+msgstr ""
+"Прежде чем создавать pull-запрос, выполните слияние целевой ветви с вашей "
+"ветвью."
+
+msgid "Merge Ancestor"
+msgstr "Слияние с предком"
+
msgid "Show merge diff"
msgstr "Показать merge diff"
@@ -1886,6 +3113,15 @@ msgstr "от"
msgid "%s Compare"
msgstr "%s Сравнить"
+msgid "Compare Revisions"
+msgstr "Сравнить ревизии"
+
+msgid "Swap"
+msgstr "Поменять местами"
+
+msgid "Compare revisions, branches, bookmarks, or tags."
+msgstr "Сравнение ревизий, ветвей, закладок и тегов."
+
msgid "Showing %s commit"
msgid_plural "Showing %s commits"
msgstr[0] "Показать %s commit"
@@ -1898,6 +3134,9 @@ msgstr "Показать полный diff"
msgid "Public repository"
msgstr "Публичный репозиторий"
+msgid "Repository creation in progress..."
+msgstr "Создание репозитория в процессе..."
+
msgid "No changesets yet"
msgstr "Изменений ещё не было"
@@ -1907,9 +3146,90 @@ msgstr "Подписаться на ленту RSS %s"
msgid "Subscribe to %s atom feed"
msgstr "Подписаться на ленту Atom %s"
+msgid "Creating"
+msgstr "Создание"
+
+msgid "Mention in Comment on Changeset \"%s\""
+msgstr "Упоминание в комментарии к набору изменений «%s»"
+
+msgid "Comment on Changeset \"%s\""
+msgstr "Комментарий к набору изменений «%s»"
+
+msgid "Changeset on"
+msgstr "Набр изменений для"
+
+msgid "branch"
+msgstr "ветви"
+
+msgid "by"
+msgstr "от"
+
+msgid "Status change:"
+msgstr "Изменение статуса:"
+
+msgid "The pull request has been closed."
+msgstr "Этот pull-запрос закрыт."
+
msgid "Hello %s"
msgstr "Здравствуйте, %s"
+msgid "We have received a request to reset the password for your account."
+msgstr "Мы получили запрос на сброс пароля для вашего аккаунта."
+
+msgid ""
+"This account is however managed outside this system and the password "
+"cannot be changed here."
+msgstr ""
+"Однако, поскольку этот аккаунт управляется извне, мы не можем изменить "
+"пароль здесь."
+
+msgid "To set a new password, click the following link"
+msgstr "Перейдите по ссылке, чтобы задать новый пароль"
+
+msgid ""
+"Should you not be able to use the link above, please type the following "
+"code into the password reset form"
+msgstr ""
+"В случае, если перейти по ссылке не удаётся, введите в форме сброса "
+"пароля следующий код"
+
+msgid ""
+"If it weren't you who requested the password reset, just disregard this "
+"message."
+msgstr ""
+"Если вы не запрашивали сброс пароля, то просто проигнорируйте это "
+"сообщение."
+
+msgid "Mention on Pull Request %s \"%s\" by %s"
+msgstr "Упоминание в pull-запросе %s «%s» от %s"
+
+msgid "Added as Reviewer of Pull Request %s \"%s\" by %s"
+msgstr "Добавлен в качестве ревьювера pull-запроса %s «%s» пользователем %s"
+
+msgid "Pull request"
+msgstr "Pull-запрос"
+
+msgid "from"
+msgstr "от"
+
+msgid "to"
+msgstr "к"
+
+msgid "Mention in Comment on Pull Request %s \"%s\""
+msgstr "Упоминание в комментарии к pull-запросу %s «%s»"
+
+msgid "Pull Request %s \"%s\" Closed"
+msgstr "Pull-запрос %s «%s» закрыт"
+
+msgid "Comment on Pull Request %s \"%s\""
+msgstr "Комментарий к pull-запросу %s «%s»"
+
+msgid "Full Name"
+msgstr "Полное имя"
+
+msgid "%s File side-by-side diff"
+msgstr "Построчное сравнение файла %s"
+
msgid "File diff"
msgstr "Сравнение файлов"
@@ -1917,20 +3237,41 @@ msgid "%s File Diff"
msgstr "Сравнение файла %s"
msgid "%s Files"
-msgstr "%s Файлы"
+msgstr "%s файлов"
msgid "%s Files Add"
msgstr "%s Файлов добавлено"
+msgid "Add New File"
+msgstr "Добавить новый файл"
+
msgid "Location"
msgstr "Расположение"
+msgid "Enter filename..."
+msgstr "Введите имя файла..."
+
msgid "or"
msgstr "или"
+msgid "Upload File"
+msgstr "Загрузить файл"
+
+msgid "Create New File"
+msgstr "Создать новый файл"
+
+msgid "New file type"
+msgstr "Тип файла"
+
+msgid "Commit Message"
+msgstr "Зафиксировать сообщение"
+
msgid "Commit Changes"
msgstr "Применить изменения"
+msgid "Search File List"
+msgstr "Искать в списке файлов"
+
msgid "Loading file list..."
msgstr "Загружается список файлов..."
@@ -1946,9 +3287,24 @@ msgstr "Последнее изменение"
msgid "Last Committer"
msgstr "Автор последней ревизии"
+msgid "%s Files Delete"
+msgstr "Удаление файлов %s"
+
+msgid "Delete file"
+msgstr "Удалить файл"
+
+msgid "%s File Edit"
+msgstr "Правка файла %s"
+
msgid "Edit file"
msgstr "Редактировать файл"
+msgid "Show Annotation"
+msgstr "Показать аннотацию"
+
+msgid "Download as Raw"
+msgstr "Загрузить в исходном виде"
+
msgid "Source"
msgstr "Исходный код"
@@ -1958,9 +3314,35 @@ msgstr[0] "%s автор"
msgstr[1] "%s авторов"
msgstr[2] "%s автора"
+msgid "Diff to Revision"
+msgstr "Разница с ревизией"
+
+msgid "Show at Revision"
+msgstr "Показать в ревизии"
+
+msgid "Show Full History"
+msgstr "Показать всю историю"
+
+msgid "Show Authors"
+msgstr "Показать авторов"
+
+msgid "Show Source"
+msgstr "Показать источник"
+
+msgid "Edit on Branch: %s"
+msgstr "Правка в ветке: %s"
+
+msgid "Editing binary files not allowed"
+msgstr "Редактирование бинарных файлов не допускается"
+
msgid "Editing files allowed only when on branch head revision"
msgstr "Редактирование файлов разрешено только в HEAD-ревизии данной ветки"
+msgid "Deleting files allowed only when on branch head revision"
+msgstr ""
+"Удаление файлов допускается только при нахождении в текущей ветке (branch "
+"head)"
+
msgid "Binary file (%s)"
msgstr "Бинарный файл (%s)"
@@ -1976,17 +3358,23 @@ msgstr "Показать только текст."
msgid "annotation"
msgstr "аннотация"
+msgid "Go Back"
+msgstr "Назад"
+
msgid "No files at given path"
msgstr "По заданному пути файлы отсутствуют"
msgid "%s Followers"
-msgstr "%s Наблюдатели"
+msgstr "%s Подписчики"
msgid "Followers"
-msgstr "Наблюдатели"
+msgstr "Подписчики"
msgid "Started following -"
-msgstr "Наблюдать за репозиторием"
+msgstr "Подписался -"
+
+msgid "Fork repository %s"
+msgstr "Создать форк репозитория %s"
msgid "Fork name"
msgstr "Имя форка"
@@ -2003,7 +3391,7 @@ msgid "Copy permissions"
msgstr "Скопировать привилегии"
msgid "Copy permissions from forked repository"
-msgstr "Скопировать привилегии с форкнутого репозитория"
+msgstr "Скопировать права доступа с форка репозитория"
msgid "Update after clone"
msgstr "Обновлять после клонирования"
@@ -2011,17 +3399,20 @@ msgstr "Обновлять после клонирования"
msgid "Checkout source after making a clone"
msgstr "Скачивать исходники после создания клона"
+msgid "Fork this Repository"
+msgstr "Создать форк"
+
msgid "%s Forks"
msgstr "Форки %s"
msgid "Forks"
-msgstr "Ответвления"
+msgstr "Форки"
msgid "Forked"
-msgstr "Форкнуто"
+msgstr "Форк создан"
msgid "There are no forks yet"
-msgstr "Форки ещё не созданы"
+msgstr "Форков пока нет"
msgid "ATOM journal feed"
msgstr "Лента журнала ATOM"
@@ -2047,8 +3438,11 @@ msgstr "Новый pull-запрос"
msgid "Title"
msgstr "Заголовок"
+msgid "Summarize the changes - or leave empty"
+msgstr "Опишите изменения — или оставьте пустым"
+
msgid "Write a short description on this pull request"
-msgstr "Написать короткое писание по этому запросу"
+msgstr "Оставьте краткое описание этого pull-запроса"
msgid "Changeset flow"
msgstr "Поток изменений"
@@ -2065,9 +3459,27 @@ msgstr "Репозиторий назначения"
msgid "No entries"
msgstr "Записи отсуствуют"
+msgid "Vote"
+msgstr "Голосовать"
+
msgid "Age"
msgstr "Возраст"
+msgid "From"
+msgstr "От"
+
+msgid "To"
+msgstr "К"
+
+msgid "You voted: %s"
+msgstr "Ваш выбор: %s"
+
+msgid "You didn't vote"
+msgstr "Вы не голосовали"
+
+msgid "(no title)"
+msgstr "(без заголовка)"
+
msgid "Closed"
msgstr "Закрыто"
@@ -2075,43 +3487,132 @@ msgid "Delete Pull Request"
msgstr "Удалить pull-запрос"
msgid "Confirm to delete this pull request"
-msgstr "Подтвердите удаление этого pull-request'а"
+msgstr "Подтвердите удаление этого pull-запроса"
+
+msgid "Confirm again to delete this pull request with %s comments"
+msgstr ""
+"Ещё раз подтвердите удаление pull-запроса со всеми (%s) комментариями"
+
+msgid "%s Pull Request %s"
+msgstr "%s pull-запрос %s"
msgid "Pull request %s from %s#%s"
msgstr "Pull-запросы %s от %s#%s"
+msgid "Summarize the changes"
+msgstr "Опишите изменения"
+
+msgid "Voting Result"
+msgstr "Результаты голосования"
+
+msgid "Pull request status calculated from votes"
+msgstr "Статус pull-запроса определён по голосованию"
+
+msgid "Origin"
+msgstr "Происхождение"
+
+msgid "on"
+msgstr "на"
+
+msgid "Target"
+msgstr "Цель"
+
+msgid ""
+"This is just a range of changesets and doesn't have a target or a real "
+"merge ancestor."
+msgstr ""
+"Это всего лишь перечень наборов изменений, который не имеет цели или "
+"реального предка для слияния."
+
msgid "Pull changes"
msgstr "Принять изменения"
+msgid "Next iteration"
+msgstr "Следующая итерация"
+
+msgid "Current revision - no change"
+msgstr "Текущая ревизия — без изменений"
+
+msgid ""
+"Pull request iterations do not change content once created. Select a "
+"revision to create a new iteration."
+msgstr ""
+"Итерации pull-запросов не изменяются после создания. Выберите ревизию для "
+"создания новой итерации."
+
msgid "Save Changes"
msgstr "Сохранить изменения"
+msgid "Create New Iteration with Changes"
+msgstr "Создать итерацию с изменениями"
+
msgid "Cancel Changes"
msgstr "Отменить изменения"
+msgid "Reviewers"
+msgstr "Ревьюверы"
+
msgid "Remove reviewer"
msgstr "Удалить рецензента"
+msgid "Type name of reviewer to add"
+msgstr "Введите имя добавляемого ревьювера"
+
msgid "Potential Reviewers"
msgstr "Потенциальные рецензенты"
+msgid "Click to add the repository owner as reviewer:"
+msgstr "Нажмите, чтобы добавить владельца репозитория в качестве ревьювера:"
+
+msgid "Pull Request Content"
+msgstr "Содержимое pull-запроса"
+
msgid "Common ancestor"
msgstr "Общий предок"
msgid "%s Pull Requests"
-msgstr "%s Запросы на внесение изменений"
+msgstr "Pull-запросы %s"
+
+msgid "Pull Requests from '%s'"
+msgstr "Pull-запросы от '%s'"
msgid "Pull Requests to '%s'"
-msgstr "Pull-запросы для %s"
+msgstr "Pull-запросы для '%s'"
msgid "Open New Pull Request"
msgstr "Создать новый pull-запрос"
+msgid "Show Pull Requests to %s"
+msgstr "Показать pull-запросы для '%s'"
+
+msgid "Show Pull Requests from '%s'"
+msgstr "Показать pull-запросы от '%s'"
+
+msgid "Hide closed pull requests (only show open pull requests)"
+msgstr ""
+"Спрятать закрытые pull-запросы (показывать только открытые pull-запросы)"
+
+msgid "Show closed pull requests (in addition to open pull requests)"
+msgstr ""
+"Показывать закрытые pull-запросы (в дополнение к открытым pull-запросам)"
+
+msgid "Pull Requests Created by Me"
+msgstr "Pull-запросы, созданные мной"
+
+msgid "Pull Requests Needing My Review"
+msgstr "Pull-запросы, требующие моего рассмотрения"
+
+msgid "Pull Requests I Participate In"
+msgstr "Pull-запросы, в которых я участвую"
+
+msgid "%s Search"
+msgstr "Поиск %s"
+
msgid "Search in All Repositories"
msgstr "Поиск по всем репозиториям"
msgid "Search term"
-msgstr "Фраза для поиска"
+msgstr "Поисковый запрос"
msgid "Search in"
msgstr "Критерий поиска"
@@ -2128,6 +3629,9 @@ msgstr "Имя файла"
msgid "Permission denied"
msgstr "Недостаточно прав"
+msgid "%s Statistics"
+msgstr "Статистика %s"
+
msgid "%s ATOM feed"
msgstr "ATOM лента репозитория %s"
@@ -2174,7 +3678,7 @@ msgid "%s Summary"
msgstr "%s общие сведения"
msgid "Fork of"
-msgstr "Форк от"
+msgstr "Форк репозитория"
msgid "Clone from"
msgstr "Клонировать из"
@@ -2182,6 +3686,18 @@ msgstr "Клонировать из"
msgid "Clone URL"
msgstr "Ссылка для клонирования"
+msgid "Use ID"
+msgstr "Использовать ID"
+
+msgid "Use SSH"
+msgstr "Использовать SSH"
+
+msgid "Use Name"
+msgstr "Использовать имя"
+
+msgid "Use HTTP"
+msgstr "Использовать HTTP"
+
msgid "Trending files"
msgstr "Популярные файлы"
@@ -2215,8 +3731,14 @@ msgstr "Быстрый старт"
msgid "Add or upload files directly via Kallithea"
msgstr "Добавить или загрузить файлы через Kallithea"
+msgid "Push new repository"
+msgstr "Отправить новый репозиторий"
+
msgid "Existing repository?"
msgstr "Существующий репозиторий?"
+msgid "Readme file from revision %s:%s"
+msgstr "Файл readme из ревизии %s:%s"
+
msgid "Download %s as %s"
msgstr "Скачать %s как %s"
diff --git a/kallithea/lib/annotate.py b/kallithea/lib/annotate.py
--- a/kallithea/lib/annotate.py
+++ b/kallithea/lib/annotate.py
@@ -25,16 +25,15 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-import StringIO
-
from pygments import highlight
from pygments.formatters import HtmlFormatter
from kallithea.lib.vcs.exceptions import VCSError
from kallithea.lib.vcs.nodes import FileNode
+from kallithea.lib.vcs.utils import safe_str
-def annotate_highlight(filenode, annotate_from_changeset_func=None,
+def annotate_highlight(filenode, annotate_from_changeset_func,
order=None, headers=None, **options):
"""
Returns html portion containing annotated table with 3 columns: line
@@ -51,26 +50,26 @@ def annotate_highlight(filenode, annotat
"""
from kallithea.lib.pygmentsutils import get_custom_lexer
options['linenos'] = True
- formatter = AnnotateHtmlFormatter(filenode=filenode, order=order,
- headers=headers,
- annotate_from_changeset_func=annotate_from_changeset_func, **options)
+ formatter = AnnotateHtmlFormatter(filenode=filenode,
+ annotate_from_changeset_func=annotate_from_changeset_func, order=order,
+ headers=headers, **options)
lexer = get_custom_lexer(filenode.extension) or filenode.lexer
- highlighted = highlight(filenode.content, lexer, formatter)
+ highlighted = highlight(safe_str(filenode.content), lexer, formatter)
return highlighted
class AnnotateHtmlFormatter(HtmlFormatter):
- def __init__(self, filenode, annotate_from_changeset_func=None,
+ def __init__(self, filenode, annotate_from_changeset_func,
order=None, **options):
"""
- If ``annotate_from_changeset_func`` is passed it should be a function
+ ``annotate_from_changeset_func`` must be a function
which returns string from the given changeset. For example, we may pass
following function as ``annotate_from_changeset_func``::
def changeset_to_anchor(changeset):
return '%s\n' % \
- (changeset.id, changeset.id)
+ (changeset.raw_id, changeset.raw_id)
:param annotate_from_changeset_func: see above
:param order: (default: ``['ls', 'annotate', 'code']``); order of
@@ -101,22 +100,13 @@ class AnnotateHtmlFormatter(HtmlFormatte
raise VCSError("This formatter expect FileNode parameter, not %r"
% type(filenode))
- def annotate_from_changeset(self, changeset):
- """
- Returns full html line for single changeset per annotated line.
- """
- if self.annotate_from_changeset_func:
- return self.annotate_from_changeset_func(changeset)
- else:
- return ''.join((changeset.id, '\n'))
-
def _wrap_tablelinenos(self, inner):
- dummyoutfile = StringIO.StringIO()
+ inner_lines = []
lncount = 0
for t, line in inner:
if t:
lncount += 1
- dummyoutfile.write(line)
+ inner_lines.append(line)
fl = self.linenostart
mw = len(str(lncount + fl - 1))
@@ -166,7 +156,7 @@ class AnnotateHtmlFormatter(HtmlFormatte
# ln_ = len(ls.splitlines())
# if ln_cs > ln_:
# annotate_changesets = annotate_changesets[:ln_ - ln_cs]
- annotate = ''.join((self.annotate_from_changeset(el[2]())
+ annotate = ''.join((self.annotate_from_changeset_func(el[2]())
for el in self.filenode.annotate))
# in case you wonder about the seemingly redundant
here:
# since the content in the other cell also is wrapped in a div,
@@ -176,7 +166,7 @@ class AnnotateHtmlFormatter(HtmlFormatte
'
'
'''
@@ -204,5 +194,5 @@ class AnnotateHtmlFormatter(HtmlFormatte
''.join(headers_row) +
''.join(body_row_start)
)
- yield 0, dummyoutfile.getvalue()
+ yield 0, ''.join(inner_lines)
yield 0, ''
diff --git a/kallithea/lib/app_globals.py b/kallithea/lib/app_globals.py
--- a/kallithea/lib/app_globals.py
+++ b/kallithea/lib/app_globals.py
@@ -39,9 +39,7 @@ class Globals(object):
"""One instance of Globals is created during application
initialization and is available during requests via the
'app_globals' variable
-
"""
- self.available_permissions = None # propagated after init_model
@property
def cache(self):
diff --git a/kallithea/lib/auth.py b/kallithea/lib/auth.py
--- a/kallithea/lib/auth.py
+++ b/kallithea/lib/auth.py
@@ -30,6 +30,7 @@ import logging
import os
import string
+import bcrypt
import ipaddr
from decorator import decorator
from sqlalchemy.orm import joinedload
@@ -38,14 +39,13 @@ from tg import request
from tg.i18n import ugettext as _
from webob.exc import HTTPForbidden, HTTPFound
-from kallithea import __platform__, is_unix, is_windows
+import kallithea
from kallithea.config.routing import url
-from kallithea.lib.caching_query import FromCache
-from kallithea.lib.utils import conditional_cache, get_repo_group_slug, get_repo_slug, get_user_group_slug
-from kallithea.lib.utils2 import safe_str, safe_unicode
+from kallithea.lib.utils import get_repo_group_slug, get_repo_slug, get_user_group_slug
+from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes
from kallithea.lib.vcs.utils.lazy import LazyProperty
-from kallithea.model.db import (
- Permission, RepoGroup, Repository, User, UserApiKeys, UserGroup, UserGroupMember, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm, UserGroupUserGroupToPerm, UserIpMap, UserToPerm)
+from kallithea.model.db import (Permission, UserApiKeys, UserGroup, UserGroupMember, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm,
+ UserGroupUserGroupToPerm, UserIpMap, UserToPerm)
from kallithea.model.meta import Session
from kallithea.model.user import UserModel
@@ -87,44 +87,34 @@ class PasswordGenerator(object):
def get_crypt_password(password):
"""
- Cryptographic function used for password hashing based on pybcrypt
- or Python's own OpenSSL wrapper on windows
+ Cryptographic function used for bcrypt password hashing.
:param password: password to hash
"""
- if is_windows:
- return hashlib.sha256(password).hexdigest()
- elif is_unix:
- import bcrypt
- return bcrypt.hashpw(safe_str(password), bcrypt.gensalt(10))
- else:
- raise Exception('Unknown or unsupported platform %s'
- % __platform__)
+ return ascii_str(bcrypt.hashpw(safe_bytes(password), bcrypt.gensalt(10)))
def check_password(password, hashed):
"""
- Checks matching password with it's hashed value, runs different
- implementation based on platform it runs on
+ Checks password match the hashed value using bcrypt.
+ Remains backwards compatible and accept plain sha256 hashes which used to
+ be used on Windows.
:param password: password
:param hashed: password in hashed form
"""
# sha256 hashes will always be 64 hex chars
# bcrypt hashes will always contain $ (and be shorter)
- if is_windows or len(hashed) == 64 and all(x in string.hexdigits for x in hashed):
+ if len(hashed) == 64 and all(x in string.hexdigits for x in hashed):
return hashlib.sha256(password).hexdigest() == hashed
- elif is_unix:
- import bcrypt
- try:
- return bcrypt.checkpw(safe_str(password), safe_str(hashed))
- except ValueError as e:
- # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors
- log.error('error from bcrypt checking password: %s', e)
- return False
- else:
- raise Exception('Unknown or unsupported platform %s'
- % __platform__)
+ try:
+ return bcrypt.checkpw(safe_bytes(password), ascii_bytes(hashed))
+ except ValueError as e:
+ # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors
+ log.error('error from bcrypt checking password: %s', e)
+ return False
+ log.error('check_password failed - no method found for hash length %s', len(hashed))
+ return False
def _cached_perms_data(user_id, user_is_admin):
@@ -149,12 +139,9 @@ def _cached_perms_data(user_id, user_is_
#======================================================================
# fetch default permissions
#======================================================================
- default_user = User.get_by_username('default', cache=True)
- default_user_id = default_user.user_id
-
- default_repo_perms = Permission.get_default_perms(default_user_id)
- default_repo_groups_perms = Permission.get_default_group_perms(default_user_id)
- default_user_group_perms = Permission.get_default_user_group_perms(default_user_id)
+ default_repo_perms = Permission.get_default_perms(kallithea.DEFAULT_USER_ID)
+ default_repo_groups_perms = Permission.get_default_group_perms(kallithea.DEFAULT_USER_ID)
+ default_user_group_perms = Permission.get_default_user_group_perms(kallithea.DEFAULT_USER_ID)
if user_is_admin:
#==================================================================
@@ -166,19 +153,19 @@ def _cached_perms_data(user_id, user_is_
# repositories
for perm in default_repo_perms:
- r_k = perm.UserRepoToPerm.repository.repo_name
+ r_k = perm.repository.repo_name
p = 'repository.admin'
permissions[RK][r_k] = p
# repository groups
for perm in default_repo_groups_perms:
- rg_k = perm.UserRepoGroupToPerm.group.group_name
+ rg_k = perm.group.group_name
p = 'group.admin'
permissions[GK][rg_k] = p
# user groups
for perm in default_user_group_perms:
- u_k = perm.UserUserGroupToPerm.user_group.users_group_name
+ u_k = perm.user_group.users_group_name
p = 'usergroup.admin'
permissions[UK][u_k] = p
return permissions
@@ -189,7 +176,7 @@ def _cached_perms_data(user_id, user_is_
# default global permissions taken from the default user
default_global_perms = UserToPerm.query() \
- .filter(UserToPerm.user_id == default_user_id) \
+ .filter(UserToPerm.user_id == kallithea.DEFAULT_USER_ID) \
.options(joinedload(UserToPerm.permission))
for perm in default_global_perms:
@@ -197,27 +184,27 @@ def _cached_perms_data(user_id, user_is_
# defaults for repositories, taken from default user
for perm in default_repo_perms:
- r_k = perm.UserRepoToPerm.repository.repo_name
- if perm.Repository.owner_id == user_id:
+ r_k = perm.repository.repo_name
+ if perm.repository.owner_id == user_id:
p = 'repository.admin'
- elif perm.Repository.private:
+ elif perm.repository.private:
p = 'repository.none'
else:
- p = perm.Permission.permission_name
+ p = perm.permission.permission_name
permissions[RK][r_k] = p
# defaults for repository groups taken from default user permission
# on given group
for perm in default_repo_groups_perms:
- rg_k = perm.UserRepoGroupToPerm.group.group_name
- p = perm.Permission.permission_name
+ rg_k = perm.group.group_name
+ p = perm.permission.permission_name
permissions[GK][rg_k] = p
# defaults for user groups taken from default user permission
# on given user group
for perm in default_user_group_perms:
- u_k = perm.UserUserGroupToPerm.user_group.users_group_name
- p = perm.Permission.permission_name
+ u_k = perm.user_group.users_group_name
+ p = perm.permission.permission_name
permissions[UK][u_k] = p
#======================================================================
@@ -271,30 +258,28 @@ def _cached_perms_data(user_id, user_is_
# user group for repositories permissions
user_repo_perms_from_users_groups = \
- Session().query(UserGroupRepoToPerm, Permission, Repository,) \
- .join((Repository, UserGroupRepoToPerm.repository_id ==
- Repository.repo_id)) \
- .join((Permission, UserGroupRepoToPerm.permission_id ==
- Permission.permission_id)) \
+ Session().query(UserGroupRepoToPerm) \
.join((UserGroup, UserGroupRepoToPerm.users_group_id ==
UserGroup.users_group_id)) \
.filter(UserGroup.users_group_active == True) \
.join((UserGroupMember, UserGroupRepoToPerm.users_group_id ==
UserGroupMember.users_group_id)) \
.filter(UserGroupMember.user_id == user_id) \
+ .options(joinedload(UserGroupRepoToPerm.repository)) \
+ .options(joinedload(UserGroupRepoToPerm.permission)) \
.all()
for perm in user_repo_perms_from_users_groups:
bump_permission(RK,
- perm.UserGroupRepoToPerm.repository.repo_name,
- perm.Permission.permission_name)
+ perm.repository.repo_name,
+ perm.permission.permission_name)
# user permissions for repositories
user_repo_perms = Permission.get_default_perms(user_id)
for perm in user_repo_perms:
bump_permission(RK,
- perm.UserRepoToPerm.repository.repo_name,
- perm.Permission.permission_name)
+ perm.repository.repo_name,
+ perm.permission.permission_name)
#======================================================================
# !! PERMISSIONS FOR REPOSITORY GROUPS !!
@@ -305,59 +290,56 @@ def _cached_perms_data(user_id, user_is_
#======================================================================
# user group for repo groups permissions
user_repo_group_perms_from_users_groups = \
- Session().query(UserGroupRepoGroupToPerm, Permission, RepoGroup) \
- .join((RepoGroup, UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)) \
- .join((Permission, UserGroupRepoGroupToPerm.permission_id
- == Permission.permission_id)) \
+ Session().query(UserGroupRepoGroupToPerm) \
.join((UserGroup, UserGroupRepoGroupToPerm.users_group_id ==
UserGroup.users_group_id)) \
.filter(UserGroup.users_group_active == True) \
.join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id
== UserGroupMember.users_group_id)) \
.filter(UserGroupMember.user_id == user_id) \
+ .options(joinedload(UserGroupRepoGroupToPerm.permission)) \
.all()
for perm in user_repo_group_perms_from_users_groups:
bump_permission(GK,
- perm.UserGroupRepoGroupToPerm.group.group_name,
- perm.Permission.permission_name)
+ perm.group.group_name,
+ perm.permission.permission_name)
# user explicit permissions for repository groups
user_repo_groups_perms = Permission.get_default_group_perms(user_id)
for perm in user_repo_groups_perms:
bump_permission(GK,
- perm.UserRepoGroupToPerm.group.group_name,
- perm.Permission.permission_name)
+ perm.group.group_name,
+ perm.permission.permission_name)
#======================================================================
# !! PERMISSIONS FOR USER GROUPS !!
#======================================================================
# user group for user group permissions
user_group_user_groups_perms = \
- Session().query(UserGroupUserGroupToPerm, Permission, UserGroup) \
+ Session().query(UserGroupUserGroupToPerm) \
.join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id
== UserGroup.users_group_id)) \
- .join((Permission, UserGroupUserGroupToPerm.permission_id
- == Permission.permission_id)) \
.join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id
== UserGroupMember.users_group_id)) \
.filter(UserGroupMember.user_id == user_id) \
.join((UserGroup, UserGroupMember.users_group_id ==
UserGroup.users_group_id), aliased=True, from_joinpoint=True) \
.filter(UserGroup.users_group_active == True) \
+ .options(joinedload(UserGroupUserGroupToPerm.permission)) \
.all()
for perm in user_group_user_groups_perms:
bump_permission(UK,
- perm.UserGroupUserGroupToPerm.target_user_group.users_group_name,
- perm.Permission.permission_name)
+ perm.target_user_group.users_group_name,
+ perm.permission.permission_name)
# user explicit permission for user groups
user_user_groups_perms = Permission.get_default_user_group_perms(user_id)
for perm in user_user_groups_perms:
bump_permission(UK,
- perm.UserUserGroupToPerm.user_group.users_group_name,
- perm.Permission.permission_name)
+ perm.user_group.users_group_name,
+ perm.permission.permission_name)
return permissions
@@ -405,7 +387,7 @@ class AuthUser(object):
if not dbuser.active:
log.info('Db user %s not active', dbuser.username)
return None
- allowed_ips = AuthUser.get_allowed_ips(dbuser.user_id, cache=True)
+ allowed_ips = AuthUser.get_allowed_ips(dbuser.user_id)
if not check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
log.info('Access for %s from %s forbidden - not in %s', dbuser.username, ip_addr, allowed_ips)
return None
@@ -414,9 +396,8 @@ class AuthUser(object):
def __init__(self, user_id=None, dbuser=None, is_external_auth=False):
self.is_external_auth = is_external_auth # container auth - don't show logout option
- # These attributes will be overridden by fill_data, below, unless the
- # requested user cannot be found and the default anonymous user is
- # not enabled.
+ # These attributes will be overridden below if the requested user is
+ # found or anonymous access (using the default user) is enabled.
self.user_id = None
self.username = None
self.api_key = None
@@ -442,7 +423,7 @@ class AuthUser(object):
self.is_default_user = False
else:
# copy non-confidential database fields from a `db.User` to this `AuthUser`.
- for k, v in dbuser.get_dict().iteritems():
+ for k, v in dbuser.get_dict().items():
assert k not in ['api_keys', 'permissions']
setattr(self, k, v)
self.is_default_user = dbuser.is_default_user
@@ -450,7 +431,15 @@ class AuthUser(object):
@LazyProperty
def permissions(self):
- return self.__get_perms(user=self, cache=False)
+ """
+ Fills user permission attribute with permissions taken from database
+ works for permissions given for repositories, and for permissions that
+ are granted to groups
+
+ :param user: `AuthUser` instance
+ """
+ log.debug('Getting PERMISSION tree for %s', self)
+ return _cached_perms_data(self.user_id, self.is_admin)
def has_repository_permission_level(self, repo_name, level, purpose=None):
required_perms = {
@@ -492,22 +481,6 @@ class AuthUser(object):
def api_keys(self):
return self._get_api_keys()
- def __get_perms(self, user, cache=False):
- """
- Fills user permission attribute with permissions taken from database
- works for permissions given for repositories, and for permissions that
- are granted to groups
-
- :param user: `AuthUser` instance
- """
- user_id = user.user_id
- user_is_admin = user.is_admin
-
- log.debug('Getting PERMISSION tree')
- compute = conditional_cache('short_term', 'cache_desc',
- condition=cache, func=_cached_perms_data)
- return compute(user_id, user_is_admin)
-
def _get_api_keys(self):
api_keys = [self.api_key]
for api_key in UserApiKeys.query() \
@@ -525,7 +498,7 @@ class AuthUser(object):
"""
Returns list of repositories you're an admin of
"""
- return [x[0] for x in self.permissions['repositories'].iteritems()
+ return [x[0] for x in self.permissions['repositories'].items()
if x[1] == 'repository.admin']
@property
@@ -533,7 +506,7 @@ class AuthUser(object):
"""
Returns list of repository groups you're an admin of
"""
- return [x[0] for x in self.permissions['repositories_groups'].iteritems()
+ return [x[0] for x in self.permissions['repositories_groups'].items()
if x[1] == 'group.admin']
@property
@@ -541,11 +514,11 @@ class AuthUser(object):
"""
Returns list of user groups you're an admin of
"""
- return [x[0] for x in self.permissions['user_groups'].iteritems()
+ return [x[0] for x in self.permissions['user_groups'].items()
if x[1] == 'usergroup.admin']
def __repr__(self):
- return "" % (self.user_id, self.username)
+ return "<%s %s: %r>" % (self.__class__.__name__, self.user_id, self.username)
def to_cookie(self):
""" Serializes this login session to a cookie `dict`. """
@@ -566,14 +539,10 @@ class AuthUser(object):
)
@classmethod
- def get_allowed_ips(cls, user_id, cache=False):
+ def get_allowed_ips(cls, user_id):
_set = set()
- default_ips = UserIpMap.query().filter(UserIpMap.user_id ==
- User.get_default_user(cache=True).user_id)
- if cache:
- default_ips = default_ips.options(FromCache("sql_cache_short",
- "get_user_ips_default"))
+ default_ips = UserIpMap.query().filter(UserIpMap.user_id == kallithea.DEFAULT_USER_ID)
for ip in default_ips:
try:
_set.add(ip.ip_addr)
@@ -583,9 +552,6 @@ class AuthUser(object):
pass
user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
- if cache:
- user_ips = user_ips.options(FromCache("sql_cache_short",
- "get_user_ips_%s" % user_id))
for ip in user_ips:
try:
_set.add(ip.ip_addr)
@@ -596,24 +562,6 @@ class AuthUser(object):
return _set or set(['0.0.0.0/0', '::/0'])
-def set_available_permissions(config):
- """
- This function will propagate globals with all available defined
- permission given in db. We don't want to check each time from db for new
- permissions since adding a new permission also requires application restart
- ie. to decorate new views with the newly created permission
-
- :param config: current config instance
-
- """
- log.info('getting information about all available permissions')
- try:
- all_perms = Session().query(Permission).all()
- config['available_permissions'] = [x.permission_name for x in all_perms]
- finally:
- Session.remove()
-
-
#==============================================================================
# CHECK DECORATORS
#==============================================================================
@@ -778,7 +726,7 @@ class _PermsFunction(object):
def __init__(self, *required_perms):
self.required_perms = required_perms # usually very short - a list is thus fine
- def __nonzero__(self):
+ def __bool__(self):
""" Defend against accidentally forgetting to call the object
and instead evaluating it directly in a boolean context,
which could have security implications.
@@ -835,10 +783,6 @@ class HasPermissionAnyMiddleware(object)
self.required_perms = set(perms)
def __call__(self, authuser, repo_name, purpose=None):
- # repo_name MUST be unicode, since we handle keys in ok
- # dict by unicode
- repo_name = safe_unicode(repo_name)
-
try:
ok = authuser.permissions['repositories'][repo_name] in self.required_perms
except KeyError:
diff --git a/kallithea/lib/auth_modules/__init__.py b/kallithea/lib/auth_modules/__init__.py
--- a/kallithea/lib/auth_modules/__init__.py
+++ b/kallithea/lib/auth_modules/__init__.py
@@ -20,7 +20,7 @@ import logging
import traceback
from kallithea.lib.auth import AuthUser, PasswordGenerator
-from kallithea.lib.compat import formatted_json, hybrid_property
+from kallithea.lib.compat import hybrid_property
from kallithea.lib.utils2 import str2bool
from kallithea.model.db import Setting, User
from kallithea.model.meta import Session
@@ -136,9 +136,6 @@ class KallitheaAuthPluginBase(object):
username)
if username:
user = User.get_by_username_or_email(username)
- if user is None:
- log.debug('Fallback to fetch user in case insensitive mode')
- user = User.get_by_username(username, case_insensitive=True)
else:
log.debug('provided username:`%s` is empty skipping...', username)
return user
@@ -286,11 +283,11 @@ def loadplugin(plugin):
ImportError -- if we couldn't import the plugin at all
"""
log.debug("Importing %s", plugin)
- if not plugin.startswith(u'kallithea.lib.auth_modules.auth_'):
- parts = plugin.split(u'.lib.auth_modules.auth_', 1)
+ if not plugin.startswith('kallithea.lib.auth_modules.auth_'):
+ parts = plugin.split('.lib.auth_modules.auth_', 1)
if len(parts) == 2:
_module, pn = parts
- plugin = u'kallithea.lib.auth_modules.auth_' + pn
+ plugin = 'kallithea.lib.auth_modules.auth_' + pn
PLUGIN_CLASS_NAME = "KallitheaAuthPlugin"
try:
module = importlib.import_module(plugin)
@@ -309,7 +306,7 @@ def loadplugin(plugin):
"a subclass of %s" % (plugin, KallitheaAuthPluginBase))
plugin = pluginclass()
- if plugin.plugin_settings.im_func != KallitheaAuthPluginBase.plugin_settings.im_func:
+ if plugin.plugin_settings.__func__ != KallitheaAuthPluginBase.plugin_settings:
raise TypeError("Authentication class %s.KallitheaAuthPluginBase "
"has overridden the plugin_settings method, which is "
"forbidden." % plugin)
@@ -351,7 +348,7 @@ def authenticate(username, password, env
conf_key = "auth_%s_%s" % (plugin_name, v["name"])
setting = Setting.get_by_name(conf_key)
plugin_settings[v["name"]] = setting.app_settings_value if setting else None
- log.debug('Settings for auth plugin %s:\n%s', plugin_name, formatted_json(plugin_settings))
+ log.debug('Settings for auth plugin %s: %s', plugin_name, plugin_settings)
if not str2bool(plugin_settings["enabled"]):
log.info("Authentication plugin %s is disabled, skipping for %s",
diff --git a/kallithea/lib/auth_modules/auth_container.py b/kallithea/lib/auth_modules/auth_container.py
--- a/kallithea/lib/auth_modules/auth_container.py
+++ b/kallithea/lib/auth_modules/auth_container.py
@@ -29,7 +29,7 @@ import logging
from kallithea.lib import auth_modules
from kallithea.lib.compat import hybrid_property
-from kallithea.lib.utils2 import safe_str, safe_unicode, str2bool
+from kallithea.lib.utils2 import str2bool
from kallithea.model.db import Setting
@@ -180,7 +180,7 @@ class KallitheaAuthPlugin(auth_modules.K
# only way to log in is using environ
username = None
if userobj:
- username = safe_str(getattr(userobj, 'username'))
+ username = getattr(userobj, 'username')
if not username:
# we don't have any objects in DB, user doesn't exist, extract
@@ -199,8 +199,8 @@ class KallitheaAuthPlugin(auth_modules.K
user_data = {
'username': username,
- 'firstname': safe_unicode(firstname or username),
- 'lastname': safe_unicode(lastname or ''),
+ 'firstname': firstname or username,
+ 'lastname': lastname or '',
'groups': [],
'email': email or '',
'admin': admin or False,
diff --git a/kallithea/lib/auth_modules/auth_crowd.py b/kallithea/lib/auth_modules/auth_crowd.py
--- a/kallithea/lib/auth_modules/auth_crowd.py
+++ b/kallithea/lib/auth_modules/auth_crowd.py
@@ -28,10 +28,12 @@ Original author and date, and relevant c
import base64
import logging
-import urllib2
+import urllib.parse
+import urllib.request
-from kallithea.lib import auth_modules
-from kallithea.lib.compat import formatted_json, hybrid_property, json
+from kallithea.lib import auth_modules, ext_json
+from kallithea.lib.compat import hybrid_property
+from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes
log = logging.getLogger(__name__)
@@ -71,10 +73,10 @@ class CrowdServer(object):
self._make_opener()
def _make_opener(self):
- mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
mgr.add_password(None, self._uri, self.user, self.passwd)
- handler = urllib2.HTTPBasicAuthHandler(mgr)
- self.opener = urllib2.build_opener(handler)
+ handler = urllib.request.HTTPBasicAuthHandler(mgr)
+ self.opener = urllib.request.build_opener(handler)
def _request(self, url, body=None, headers=None,
method=None, noformat=False,
@@ -82,14 +84,12 @@ class CrowdServer(object):
_headers = {"Content-type": "application/json",
"Accept": "application/json"}
if self.user and self.passwd:
- authstring = base64.b64encode("%s:%s" % (self.user, self.passwd))
+ authstring = ascii_str(base64.b64encode(safe_bytes("%s:%s" % (self.user, self.passwd))))
_headers["Authorization"] = "Basic %s" % authstring
if headers:
_headers.update(headers)
- log.debug("Sent crowd: \n%s",
- formatted_json({"url": url, "body": body,
- "headers": _headers}))
- req = urllib2.Request(url, body, _headers)
+ log.debug("Sent to crowd at %s:\nHeaders: %s\nBody:\n%s", url, _headers, body)
+ req = urllib.request.Request(url, body, _headers)
if method:
req.get_method = lambda: method
@@ -103,7 +103,7 @@ class CrowdServer(object):
rval["status"] = True
rval["error"] = "Response body was empty"
elif not noformat:
- rval = json.loads(msg)
+ rval = ext_json.loads(msg)
rval["status"] = True
else:
rval = "".join(rdoc.readlines())
@@ -120,14 +120,14 @@ class CrowdServer(object):
"""Authenticate a user against crowd. Returns brief information about
the user."""
url = ("%s/rest/usermanagement/%s/authentication?username=%s"
- % (self._uri, self._version, urllib2.quote(username)))
- body = json.dumps({"value": password})
+ % (self._uri, self._version, urllib.parse.quote(username)))
+ body = ascii_bytes(ext_json.dumps({"value": password}))
return self._request(url, body)
def user_groups(self, username):
"""Retrieve a list of groups to which this user belongs."""
url = ("%s/rest/usermanagement/%s/user/group/nested?username=%s"
- % (self._uri, self._version, urllib2.quote(username)))
+ % (self._uri, self._version, urllib.parse.quote(username)))
return self._request(url)
@@ -209,11 +209,11 @@ class KallitheaAuthPlugin(auth_modules.K
log.debug('Empty username or password skipping...')
return None
- log.debug("Crowd settings: \n%s", formatted_json(settings))
+ log.debug("Crowd settings: %s", settings)
server = CrowdServer(**settings)
server.set_credentials(settings["app_name"], settings["app_password"])
crowd_user = server.user_auth(username, password)
- log.debug("Crowd returned: \n%s", formatted_json(crowd_user))
+ log.debug("Crowd returned: %s", crowd_user)
if not crowd_user["status"]:
log.error('Crowd authentication as %s returned no status', username)
return None
@@ -223,7 +223,7 @@ class KallitheaAuthPlugin(auth_modules.K
return None
res = server.user_groups(crowd_user["name"])
- log.debug("Crowd groups: \n%s", formatted_json(res))
+ log.debug("Crowd groups: %s", res)
crowd_user["groups"] = [x["name"] for x in res["groups"]]
# old attrs fetched from Kallithea database
@@ -246,7 +246,7 @@ class KallitheaAuthPlugin(auth_modules.K
for group in settings["admin_groups"].split(","):
if group in user_data["groups"]:
user_data["admin"] = True
- log.debug("Final crowd user object: \n%s", formatted_json(user_data))
+ log.debug("Final crowd user object: %s", user_data)
log.info('user %s authenticated correctly', user_data['username'])
return user_data
diff --git a/kallithea/lib/auth_modules/auth_internal.py b/kallithea/lib/auth_modules/auth_internal.py
--- a/kallithea/lib/auth_modules/auth_internal.py
+++ b/kallithea/lib/auth_modules/auth_internal.py
@@ -29,8 +29,7 @@ Original author and date, and relevant c
import logging
from kallithea.lib import auth_modules
-from kallithea.lib.compat import formatted_json, hybrid_property
-from kallithea.model.db import User
+from kallithea.lib.compat import hybrid_property
log = logging.getLogger(__name__)
@@ -77,7 +76,7 @@ class KallitheaAuthPlugin(auth_modules.K
"admin": userobj.admin,
"extern_name": userobj.user_id,
}
- log.debug(formatted_json(user_data))
+ log.debug('user data: %s', user_data)
from kallithea.lib import auth
password_match = auth.check_password(password, userobj.password)
diff --git a/kallithea/lib/auth_modules/auth_ldap.py b/kallithea/lib/auth_modules/auth_ldap.py
--- a/kallithea/lib/auth_modules/auth_ldap.py
+++ b/kallithea/lib/auth_modules/auth_ldap.py
@@ -31,7 +31,7 @@ import logging
from kallithea.lib import auth_modules
from kallithea.lib.compat import hybrid_property
from kallithea.lib.exceptions import LdapConnectionError, LdapImportError, LdapPasswordError, LdapUsernameError
-from kallithea.lib.utils2 import safe_str, safe_unicode
+from kallithea.lib.utils2 import safe_str
log = logging.getLogger(__name__)
@@ -70,11 +70,11 @@ class AuthLdap(object):
port)
for host in server.split(',')))
- self.LDAP_BIND_DN = safe_str(bind_dn)
- self.LDAP_BIND_PASS = safe_str(bind_pass)
+ self.LDAP_BIND_DN = bind_dn
+ self.LDAP_BIND_PASS = bind_pass
- self.BASE_DN = safe_str(base_dn)
- self.LDAP_FILTER = safe_str(ldap_filter)
+ self.BASE_DN = base_dn
+ self.LDAP_FILTER = ldap_filter
self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope)
self.attr_login = attr_login
@@ -139,7 +139,7 @@ class AuthLdap(object):
try:
log.debug('Trying simple bind with %s', dn)
- server.simple_bind_s(dn, safe_str(password))
+ server.simple_bind_s(dn, password)
results = server.search_ext_s(dn, ldap.SCOPE_BASE,
'(objectClass=*)')
if len(results) == 1:
@@ -328,7 +328,8 @@ class KallitheaAuthPlugin(auth_modules.K
(user_dn, ldap_attrs) = aldap.authenticate_ldap(username, password)
log.debug('Got ldap DN response %s', user_dn)
- get_ldap_attr = lambda k: ldap_attrs.get(settings.get(k), [''])[0]
+ def get_ldap_attr(k):
+ return safe_str(ldap_attrs.get(settings.get(k), [b''])[0])
# old attrs fetched from Kallithea database
admin = getattr(userobj, 'admin', False)
@@ -338,8 +339,8 @@ class KallitheaAuthPlugin(auth_modules.K
user_data = {
'username': username,
- 'firstname': safe_unicode(get_ldap_attr('attr_firstname') or firstname),
- 'lastname': safe_unicode(get_ldap_attr('attr_lastname') or lastname),
+ 'firstname': get_ldap_attr('attr_firstname') or firstname,
+ 'lastname': get_ldap_attr('attr_lastname') or lastname,
'groups': [],
'email': get_ldap_attr('attr_email') or email,
'admin': admin,
diff --git a/kallithea/lib/auth_modules/auth_pam.py b/kallithea/lib/auth_modules/auth_pam.py
--- a/kallithea/lib/auth_modules/auth_pam.py
+++ b/kallithea/lib/auth_modules/auth_pam.py
@@ -32,7 +32,7 @@ import threading
import time
from kallithea.lib import auth_modules
-from kallithea.lib.compat import formatted_json, hybrid_property
+from kallithea.lib.compat import hybrid_property
try:
@@ -142,7 +142,7 @@ class KallitheaAuthPlugin(auth_modules.K
log.warning("Cannot extract additional info for PAM user %s", username)
pass
- log.debug("pamuser: \n%s", formatted_json(user_data))
+ log.debug("pamuser: %s", user_data)
log.info('user %s authenticated correctly', user_data['username'])
return user_data
diff --git a/kallithea/lib/base.py b/kallithea/lib/base.py
--- a/kallithea/lib/base.py
+++ b/kallithea/lib/base.py
@@ -28,9 +28,9 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
+import base64
import datetime
import logging
-import time
import traceback
import warnings
@@ -45,12 +45,11 @@ from tg.i18n import ugettext as _
from kallithea import BACKENDS, __version__
from kallithea.config.routing import url
-from kallithea.lib import auth_modules
+from kallithea.lib import auth_modules, ext_json
from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
-from kallithea.lib.compat import json
from kallithea.lib.exceptions import UserCreationError
from kallithea.lib.utils import get_repo_slug, is_valid_repo
-from kallithea.lib.utils2 import AttributeDict, safe_int, safe_str, safe_unicode, set_hook_environment, str2bool
+from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, safe_str, set_hook_environment, str2bool
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError
from kallithea.model import meta
from kallithea.model.db import PullRequest, Repository, Setting, User
@@ -97,12 +96,18 @@ def _get_ip_addr(environ):
return _filter_proxy(ip)
-def _get_access_path(environ):
- """Return PATH_INFO from environ ... using tg.original_request if available."""
+def get_path_info(environ):
+ """Return PATH_INFO from environ ... using tg.original_request if available.
+
+ In Python 3 WSGI, PATH_INFO is a unicode str, but kind of contains encoded
+ bytes. The code points are guaranteed to only use the lower 8 bit bits, and
+ encoding the string with the 1:1 encoding latin1 will give the
+ corresponding byte string ... which then can be decoded to proper unicode.
+ """
org_req = environ.get('tg.original_request')
if org_req is not None:
environ = org_req.environ
- return environ.get('PATH_INFO')
+ return safe_str(environ['PATH_INFO'].encode('latin1'))
def log_in_user(user, remember, is_external_auth, ip_addr):
@@ -172,7 +177,7 @@ class BasicAuth(paste.auth.basic.AuthBas
(authmeth, auth) = authorization.split(' ', 1)
if 'basic' != authmeth.lower():
return self.build_authentication(environ)
- auth = auth.strip().decode('base64')
+ auth = safe_str(base64.b64decode(auth.strip()))
_parts = auth.split(':', 1)
if len(_parts) == 2:
username, password = _parts
@@ -218,7 +223,7 @@ class BaseVCSController(object):
Returns (None, wsgi_app) to send the wsgi_app response to the client.
"""
# Use anonymous access if allowed for action on repo.
- default_user = User.get_default_user(cache=True)
+ default_user = User.get_default_user()
default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr)
if default_authuser is None:
log.debug('No anonymous access at all') # move on to proper user auth
@@ -242,7 +247,7 @@ class BaseVCSController(object):
# If not authenticated by the container, running basic auth
if not username:
- self.authenticate.realm = safe_str(self.config['realm'])
+ self.authenticate.realm = self.config['realm']
result = self.authenticate(environ)
if isinstance(result, str):
paste.httpheaders.AUTH_TYPE.update(environ, 'basic')
@@ -273,11 +278,8 @@ class BaseVCSController(object):
def _check_permission(self, action, authuser, repo_name):
"""
- Checks permissions using action (push/pull) user and repository
- name
-
- :param action: 'push' or 'pull' action
- :param user: `User` instance
+ :param action: 'push' or 'pull'
+ :param user: `AuthUser` instance
:param repo_name: repository name
"""
if action == 'push':
@@ -286,7 +288,7 @@ class BaseVCSController(object):
repo_name):
return False
- else:
+ elif action == 'pull':
#any other action need at least read permission
if not HasPermissionAnyMiddleware('repository.read',
'repository.write',
@@ -294,13 +296,15 @@ class BaseVCSController(object):
repo_name):
return False
+ else:
+ assert False, action
+
return True
def _get_ip_addr(self, environ):
return _get_ip_addr(environ)
def __call__(self, environ, start_response):
- start = time.time()
try:
# try parsing a request for this VCS - if it fails, call the wrapped app
parsed_request = self.parse_request(environ)
@@ -334,7 +338,7 @@ class BaseVCSController(object):
try:
log.info('%s action on %s repo "%s" by "%s" from %s',
- parsed_request.action, self.scm_alias, parsed_request.repo_name, safe_str(user.username), ip_addr)
+ parsed_request.action, self.scm_alias, parsed_request.repo_name, user.username, ip_addr)
app = self._make_app(parsed_request)
return app(environ, start_response)
except Exception:
@@ -343,10 +347,6 @@ class BaseVCSController(object):
except webob.exc.HTTPException as e:
return e(environ, start_response)
- finally:
- log_ = logging.getLogger('kallithea.' + self.__class__.__name__)
- log_.debug('Request time: %.3fs', time.time() - start)
- meta.Session.remove()
class BaseController(TGController):
@@ -413,7 +413,7 @@ class BaseController(TGController):
# END CONFIG VARS
c.repo_name = get_repo_slug(request) # can be empty
- c.backends = BACKENDS.keys()
+ c.backends = list(BACKENDS)
self.cut_off_limit = safe_int(config.get('cut_off_limit'))
@@ -454,7 +454,7 @@ class BaseController(TGController):
return log_in_user(user, remember=False, is_external_auth=True, ip_addr=ip_addr)
# User is default user (if active) or anonymous
- default_user = User.get_default_user(cache=True)
+ default_user = User.get_default_user()
authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr)
if authuser is None: # fall back to anonymous
authuser = AuthUser(dbuser=default_user) # TODO: somehow use .make?
@@ -529,9 +529,9 @@ class BaseController(TGController):
request.ip_addr = ip_addr
request.needs_csrf_check = needs_csrf_check
- log.info('IP: %s User: %s accessed %s',
+ log.info('IP: %s User: %s Request: %s',
request.ip_addr, request.authuser,
- safe_unicode(_get_access_path(environ)),
+ get_path_info(environ),
)
return super(BaseController, self).__call__(environ, context)
except webob.exc.HTTPException as e:
@@ -552,13 +552,13 @@ class BaseRepoController(BaseController)
def _before(self, *args, **kwargs):
super(BaseRepoController, self)._before(*args, **kwargs)
- if c.repo_name: # extracted from routes
+ if c.repo_name: # extracted from request by base-base BaseController._before
_dbr = Repository.get_by_repo_name(c.repo_name)
if not _dbr:
return
log.debug('Found repository in database %s with state `%s`',
- safe_unicode(_dbr), safe_unicode(_dbr.repo_state))
+ _dbr, _dbr.repo_state)
route = getattr(request.environ.get('routes.route'), 'name', '')
# allow to delete repos that are somehow damages in filesystem
@@ -608,7 +608,7 @@ class BaseRepoController(BaseController)
raise webob.exc.HTTPNotFound()
except RepositoryError as e:
log.error(traceback.format_exc())
- h.flash(safe_str(e), category='error')
+ h.flash(e, category='error')
raise webob.exc.HTTPBadRequest()
@@ -634,7 +634,7 @@ def jsonify(func, *args, **kwargs):
warnings.warn(msg, Warning, 2)
log.warning(msg)
log.debug("Returning JSON wrapped action output")
- return json.dumps(data, encoding='utf-8')
+ return ascii_bytes(ext_json.dumps(data))
@decorator.decorator
def IfSshEnabled(func, *args, **kwargs):
diff --git a/kallithea/lib/caching_query.py b/kallithea/lib/caching_query.py
deleted file mode 100644
--- a/kallithea/lib/caching_query.py
+++ /dev/null
@@ -1,240 +0,0 @@
-"""caching_query.py
-
-Represent persistence structures which allow the usage of
-Beaker caching with SQLAlchemy.
-
-The three new concepts introduced here are:
-
- * CachingQuery - a Query subclass that caches and
- retrieves results in/from Beaker.
- * FromCache - a query option that establishes caching
- parameters on a Query
- * _params_from_query - extracts value parameters from
- a Query.
-
-The rest of what's here are standard SQLAlchemy and
-Beaker constructs.
-
-"""
-import beaker
-from beaker.exceptions import BeakerException
-from sqlalchemy.orm.interfaces import MapperOption
-from sqlalchemy.orm.query import Query
-from sqlalchemy.sql import visitors
-
-from kallithea.lib.utils2 import safe_str
-
-
-class CachingQuery(Query):
- """A Query subclass which optionally loads full results from a Beaker
- cache region.
-
- The CachingQuery stores additional state that allows it to consult
- a Beaker cache before accessing the database:
-
- * A "region", which is a cache region argument passed to a
- Beaker CacheManager, specifies a particular cache configuration
- (including backend implementation, expiration times, etc.)
- * A "namespace", which is a qualifying name that identifies a
- group of keys within the cache. A query that filters on a name
- might use the name "by_name", a query that filters on a date range
- to a joined table might use the name "related_date_range".
-
- When the above state is present, a Beaker cache is retrieved.
-
- The "namespace" name is first concatenated with
- a string composed of the individual entities and columns the Query
- requests, i.e. such as ``Query(User.id, User.name)``.
-
- The Beaker cache is then loaded from the cache manager based
- on the region and composed namespace. The key within the cache
- itself is then constructed against the bind parameters specified
- by this query, which are usually literals defined in the
- WHERE clause.
-
- The FromCache mapper option below represent
- the "public" method of configuring this state upon the CachingQuery.
-
- """
-
- def __init__(self, manager, *args, **kw):
- self.cache_manager = manager
- Query.__init__(self, *args, **kw)
-
- def __iter__(self):
- """override __iter__ to pull results from Beaker
- if particular attributes have been configured.
-
- Note that this approach does *not* detach the loaded objects from
- the current session. If the cache backend is an in-process cache
- (like "memory") and lives beyond the scope of the current session's
- transaction, those objects may be expired. The method here can be
- modified to first expunge() each loaded item from the current
- session before returning the list of items, so that the items
- in the cache are not the same ones in the current Session.
-
- """
- if hasattr(self, '_cache_parameters'):
- return self.get_value(createfunc=lambda:
- list(Query.__iter__(self)))
- else:
- return Query.__iter__(self)
-
- def invalidate(self):
- """Invalidate the value represented by this Query."""
-
- cache, cache_key = _get_cache_parameters(self)
- cache.remove(cache_key)
-
- def get_value(self, merge=True, createfunc=None):
- """Return the value from the cache for this query.
-
- Raise KeyError if no value present and no
- createfunc specified.
-
- """
- cache, cache_key = _get_cache_parameters(self)
- ret = cache.get_value(cache_key, createfunc=createfunc)
- if merge:
- ret = self.merge_result(ret, load=False)
- return ret
-
- def set_value(self, value):
- """Set the value in the cache for this query."""
-
- cache, cache_key = _get_cache_parameters(self)
- cache.put(cache_key, value)
-
-
-def query_callable(manager, query_cls=CachingQuery):
- def query(*arg, **kw):
- return query_cls(manager, *arg, **kw)
- return query
-
-
-def get_cache_region(name, region):
- if region not in beaker.cache.cache_regions:
- raise BeakerException('Cache region `%s` not configured '
- 'Check if proper cache settings are in the .ini files' % region)
- kw = beaker.cache.cache_regions[region]
- return beaker.cache.Cache._get_cache(name, kw)
-
-
-def _get_cache_parameters(query):
- """For a query with cache_region and cache_namespace configured,
- return the corresponding Cache instance and cache key, based
- on this query's current criterion and parameter values.
-
- """
- if not hasattr(query, '_cache_parameters'):
- raise ValueError("This Query does not have caching "
- "parameters configured.")
-
- region, namespace, cache_key = query._cache_parameters
-
- namespace = _namespace_from_query(namespace, query)
-
- if cache_key is None:
- # cache key - the value arguments from this query's parameters.
- args = [safe_str(x) for x in _params_from_query(query)]
- args.extend(filter(lambda k: k not in ['None', None, u'None'],
- [str(query._limit), str(query._offset)]))
-
- cache_key = " ".join(args)
-
- if cache_key is None:
- raise Exception('Cache key cannot be None')
-
- # get cache
- #cache = query.cache_manager.get_cache_region(namespace, region)
- cache = get_cache_region(namespace, region)
- # optional - hash the cache_key too for consistent length
- # import uuid
- # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
-
- return cache, cache_key
-
-
-def _namespace_from_query(namespace, query):
- # cache namespace - the token handed in by the
- # option + class we're querying against
- namespace = " ".join([namespace] + [str(x) for x in query._entities])
-
- # memcached wants this
- namespace = namespace.replace(' ', '_')
-
- return namespace
-
-
-def _set_cache_parameters(query, region, namespace, cache_key):
-
- if hasattr(query, '_cache_parameters'):
- region, namespace, cache_key = query._cache_parameters
- raise ValueError("This query is already configured "
- "for region %r namespace %r" %
- (region, namespace)
- )
- query._cache_parameters = region, safe_str(namespace), cache_key
-
-
-class FromCache(MapperOption):
- """Specifies that a Query should load results from a cache."""
-
- propagate_to_loaders = False
-
- def __init__(self, region, namespace, cache_key=None):
- """Construct a new FromCache.
-
- :param region: the cache region. Should be a
- region configured in the Beaker CacheManager.
-
- :param namespace: the cache namespace. Should
- be a name uniquely describing the target Query's
- lexical structure.
-
- :param cache_key: optional. A string cache key
- that will serve as the key to the query. Use this
- if your query has a huge amount of parameters (such
- as when using in_()) which correspond more simply to
- some other identifier.
-
- """
- self.region = region
- self.namespace = namespace
- self.cache_key = cache_key
-
- def process_query(self, query):
- """Process a Query during normal loading operation."""
-
- _set_cache_parameters(query, self.region, self.namespace,
- self.cache_key)
-
-
-def _params_from_query(query):
- """Pull the bind parameter values from a query.
-
- This takes into account any scalar attribute bindparam set up.
-
- E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
- would return [5, 7].
-
- """
- v = []
-
- def visit_bindparam(bind):
- if bind.key in query._params:
- value = query._params[bind.key]
- elif bind.callable:
- # lazyloader may dig a callable in here, intended
- # to late-evaluate params after autoflush is called.
- # convert to a scalar value.
- value = bind.callable()
- else:
- value = bind.value
-
- v.append(value)
- if query._criterion is not None:
- visitors.traverse(query._criterion, {}, {'bindparam': visit_bindparam})
- for f in query._from_obj:
- visitors.traverse(f, {}, {'bindparam': visit_bindparam})
- return v
diff --git a/kallithea/lib/celerylib/__init__.py b/kallithea/lib/celerylib/__init__.py
--- a/kallithea/lib/celerylib/__init__.py
+++ b/kallithea/lib/celerylib/__init__.py
@@ -33,9 +33,9 @@ from hashlib import md5
from decorator import decorator
from tg import config
-from kallithea import CELERY_EAGER, CELERY_ON
+import kallithea
from kallithea.lib.pidlock import DaemonLock, LockHeld
-from kallithea.lib.utils2 import safe_str
+from kallithea.lib.utils2 import safe_bytes
from kallithea.model import meta
@@ -57,10 +57,10 @@ class FakeTask(object):
def task(f_org):
- """Wrapper of celery.task.task, running async if CELERY_ON
+ """Wrapper of celery.task.task, running async if CELERY_APP
"""
- if CELERY_ON:
+ if kallithea.CELERY_APP:
def f_async(*args, **kwargs):
log.info('executing %s task', f_org.__name__)
try:
@@ -68,8 +68,7 @@ def task(f_org):
finally:
log.info('executed %s task', f_org.__name__)
f_async.__name__ = f_org.__name__
- from kallithea.lib import celerypylons
- runner = celerypylons.task(ignore_result=True)(f_async)
+ runner = kallithea.CELERY_APP.task(ignore_result=True)(f_async)
def f_wrapped(*args, **kwargs):
t = runner.apply_async(args=args, kwargs=kwargs)
@@ -95,7 +94,7 @@ def __get_lockkey(func, *fargs, **fkwarg
func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
lockkey = 'task_%s.lock' % \
- md5(func_name + '-' + '-'.join(map(safe_str, params))).hexdigest()
+ md5(safe_bytes(func_name + '-' + '-'.join(str(x) for x in params))).hexdigest()
return lockkey
@@ -128,7 +127,7 @@ def dbsession(func):
ret = func(*fargs, **fkwargs)
return ret
finally:
- if CELERY_ON and not CELERY_EAGER:
+ if kallithea.CELERY_APP and not kallithea.CELERY_EAGER:
meta.Session.remove()
return decorator(__wrapper, func)
diff --git a/kallithea/lib/celerylib/tasks.py b/kallithea/lib/celerylib/tasks.py
--- a/kallithea/lib/celerylib/tasks.py
+++ b/kallithea/lib/celerylib/tasks.py
@@ -26,24 +26,23 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-import logging
+import email.utils
import os
-import rfc822
import traceback
from collections import OrderedDict
from operator import itemgetter
from time import mktime
+import celery.utils.log
from tg import config
-from kallithea import CELERY_ON
-from kallithea.lib import celerylib
-from kallithea.lib.compat import json
+import kallithea
+from kallithea.lib import celerylib, ext_json
from kallithea.lib.helpers import person
from kallithea.lib.hooks import log_create_repository
from kallithea.lib.rcmail.smtp_mailer import SmtpMailer
from kallithea.lib.utils import action_logger
-from kallithea.lib.utils2 import str2bool
+from kallithea.lib.utils2 import ascii_bytes, str2bool
from kallithea.lib.vcs.utils import author_email
from kallithea.model.db import RepoGroup, Repository, Statistics, User
@@ -51,7 +50,7 @@ from kallithea.model.db import RepoGroup
__all__ = ['whoosh_index', 'get_commits_stats', 'send_email']
-log = logging.getLogger(__name__)
+log = celery.utils.log.get_task_logger(__name__)
@celerylib.task
@@ -67,6 +66,11 @@ def whoosh_index(repo_location, full_ind
.run(full_index=full_index)
+# for js data compatibility cleans the key for person from '
+def akc(k):
+ return person(k).replace('"', '')
+
+
@celerylib.task
@celerylib.dbsession
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
@@ -80,9 +84,6 @@ def get_commits_stats(repo_name, ts_min_
try:
lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey))
- # for js data compatibility cleans the key for person from '
- akc = lambda k: person(k).replace('"', "")
-
co_day_auth_aggr = {}
commits_by_day_aggregate = {}
repo = Repository.get_by_repo_name(repo_name)
@@ -118,22 +119,21 @@ def get_commits_stats(repo_name, ts_min_
return True
if cur_stats:
- commits_by_day_aggregate = OrderedDict(json.loads(
+ commits_by_day_aggregate = OrderedDict(ext_json.loads(
cur_stats.commit_activity_combined))
- co_day_auth_aggr = json.loads(cur_stats.commit_activity)
+ co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity)
log.debug('starting parsing %s', parse_limit)
- lmktime = mktime
- last_rev = last_rev + 1 if last_rev >= 0 else 0
+ last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0
log.debug('Getting revisions from %s to %s',
last_rev, last_rev + parse_limit
)
for cs in repo[last_rev:last_rev + parse_limit]:
log.debug('parsing %s', cs)
last_cs = cs # remember last parsed changeset
- k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
- cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
+ tt = cs.date.timetuple()
+ k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0))
if akc(cs.author) in co_day_auth_aggr:
try:
@@ -143,8 +143,7 @@ def get_commits_stats(repo_name, ts_min_
except ValueError:
time_pos = None
- if time_pos >= 0 and time_pos is not None:
-
+ if time_pos is not None and time_pos >= 0:
datadict = \
co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
@@ -195,8 +194,8 @@ def get_commits_stats(repo_name, ts_min_
}
stats = cur_stats if cur_stats else Statistics()
- stats.commit_activity = json.dumps(co_day_auth_aggr)
- stats.commit_activity_combined = json.dumps(overview_data)
+ stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr))
+ stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data))
log.debug('last revision %s', last_rev)
leftovers = len(repo.revisions[last_rev:])
@@ -204,7 +203,7 @@ def get_commits_stats(repo_name, ts_min_
if last_rev == 0 or leftovers < parse_limit:
log.debug('getting code trending stats')
- stats.languages = json.dumps(__get_codes_stats(repo_name))
+ stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name)))
try:
stats.repository = dbrepo
@@ -221,7 +220,7 @@ def get_commits_stats(repo_name, ts_min_
lock.release()
# execute another task if celery is enabled
- if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
+ if len(repo.revisions) > 1 and kallithea.CELERY_APP and recurse_limit > 0:
get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1)
elif recurse_limit <= 0:
log.debug('Not recursing - limit has been reached')
@@ -234,7 +233,7 @@ def get_commits_stats(repo_name, ts_min_
@celerylib.task
@celerylib.dbsession
-def send_email(recipients, subject, body='', html_body='', headers=None, author=None):
+def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
"""
Sends an email with defined parameters from the .ini files.
@@ -244,7 +243,8 @@ def send_email(recipients, subject, body
:param body: body of the mail
:param html_body: html version of body
:param headers: dictionary of prepopulated e-mail headers
- :param author: User object of the author of this mail, if known and relevant
+ :param from_name: full name to be used as sender of this mail - often a
+ .full_name_or_username value
"""
assert isinstance(recipients, list), recipients
if headers is None:
@@ -276,13 +276,13 @@ def send_email(recipients, subject, body
# SMTP sender
envelope_from = email_config.get('app_email_from', 'Kallithea')
# 'From' header
- if author is not None:
- # set From header based on author but with a generic e-mail address
+ if from_name is not None:
+ # set From header based on from_name but with a generic e-mail address
# In case app_email_from is in "Some Name " format, we first
# extract the e-mail address.
envelope_addr = author_email(envelope_from)
headers['From'] = '"%s" <%s>' % (
- rfc822.quote('%s (no-reply)' % author.full_name_or_username),
+ email.utils.quote('%s (no-reply)' % from_name),
envelope_addr)
user = email_config.get('smtp_username')
@@ -414,7 +414,7 @@ def create_repo_fork(form_data, cur_user
DBS = celerylib.get_session()
- base_path = Repository.base_path()
+ base_path = kallithea.CONFIG['base_path']
cur_user = User.guess_instance(cur_user)
repo_name = form_data['repo_name'] # fork in this case
@@ -489,7 +489,7 @@ def __get_codes_stats(repo_name):
for _topnode, _dirnodes, filenodes in tip.walk('/'):
for filenode in filenodes:
ext = filenode.extension.lower()
- if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not filenode.is_binary:
+ if ext in LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary:
if ext in code_stats:
code_stats[ext] += 1
else:
diff --git a/kallithea/lib/celerypylons/__init__.py b/kallithea/lib/celerypylons/__init__.py
--- a/kallithea/lib/celerypylons/__init__.py
+++ b/kallithea/lib/celerypylons/__init__.py
@@ -14,34 +14,64 @@ To make sure that the config really has
mandatory settings.
"""
+import logging
+
import celery
-import celery.result as result
import tg
-from celery.bin import worker
-from celery.task import task
+
+import kallithea
-def celery_config(config):
- """Return Celery config object populated from relevant settings in a config dict, such as tg.config"""
+class CeleryConfig(object):
+ imports = ['kallithea.lib.celerylib.tasks']
+ task_always_eager = False
- # Verify .ini file configuration has been loaded
- assert config['celery.imports'] == 'kallithea.lib.celerylib.tasks', 'Kallithea Celery configuration has not been loaded'
+# map from Kallithea .ini Celery 3 config names to Celery 4 config names
+celery3_compat = {
+ 'broker.url': 'broker_url',
+ 'celery.accept.content': 'accept_content',
+ 'celery.always.eager': 'task_always_eager',
+ 'celery.amqp.task.result.expires': 'result_expires',
+ 'celeryd.concurrency': 'worker_concurrency',
+ 'celeryd.max.tasks.per.child': 'worker_max_tasks_per_child',
+ #'celery.imports' ends up unchanged
+ 'celery.result.backend': 'result_backend',
+ 'celery.result.serializer': 'result_serializer',
+ 'celery.task.serializer': 'task_serializer',
+}
- class CeleryConfig(object):
- pass
+list_config_names = """imports accept_content""".split()
+
+
+desupported = set([
+ 'celery.result.dburi',
+ 'celery.result.serialier',
+ 'celery.send.task.error.emails',
+])
+
+
+log = logging.getLogger(__name__)
+
+
+def make_celery_config(config):
+ """Return Celery config object populated from relevant settings in a config dict, such as tg.config"""
celery_config = CeleryConfig()
- PREFIXES = """ADMINS BROKER CASSANDRA CELERYBEAT CELERYD CELERYMON CELERY EMAIL SERVER""".split()
- LIST_PARAMS = """CELERY_IMPORTS ADMINS ROUTES CELERY_ACCEPT_CONTENT""".split()
-
for config_key, config_value in sorted(config.items()):
- celery_key = config_key.replace('.', '_').upper()
- if celery_key.split('_', 1)[0] not in PREFIXES:
+ if config_key in desupported and config_value:
+ log.error('Celery configuration setting %r is no longer supported', config_key)
+ celery_key = celery3_compat.get(config_key)
+ parts = config_key.split('.', 1)
+ if celery_key: # explicit Celery 3 backwards compatibility
+ pass
+ elif parts[0] == 'celery' and len(parts) == 2: # Celery 4 config key
+ celery_key = parts[1]
+ else:
continue
- if not isinstance(config_value, basestring):
+ if not isinstance(config_value, str):
continue
- if celery_key in LIST_PARAMS:
+ if celery_key in list_config_names:
celery_value = config_value.split()
elif config_value.isdigit():
celery_value = int(config_value)
@@ -53,6 +83,10 @@ def celery_config(config):
return celery_config
-# Create celery app from the TurboGears configuration file
-app = celery.Celery()
-app.config_from_object(celery_config(tg.config))
+def make_app():
+ """Create celery app from the TurboGears configuration file"""
+ app = celery.Celery()
+ celery_config = make_celery_config(tg.config)
+ kallithea.CELERY_EAGER = celery_config.task_always_eager
+ app.config_from_object(celery_config)
+ return app
diff --git a/kallithea/lib/colored_formatter.py b/kallithea/lib/colored_formatter.py
--- a/kallithea/lib/colored_formatter.py
+++ b/kallithea/lib/colored_formatter.py
@@ -15,7 +15,7 @@
import logging
-BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
+BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(30, 38)
# Sequences
RESET_SEQ = "\033[0m"
diff --git a/kallithea/lib/compat.py b/kallithea/lib/compat.py
--- a/kallithea/lib/compat.py
+++ b/kallithea/lib/compat.py
@@ -29,7 +29,6 @@ Original author and date, and relevant c
import functools
import os
-import sys
#==============================================================================
# Hybrid property/method
@@ -43,15 +42,10 @@ from sqlalchemy.util import OrderedSet
#==============================================================================
# json
#==============================================================================
-from kallithea.lib.ext_json import json
+from kallithea.lib import ext_json
-# alias for formatted json
-formatted_json = functools.partial(json.dumps, indent=4, sort_keys=True)
-
-
-
-
+formatted_json = functools.partial(ext_json.dumps, indent=4, sort_keys=True)
#==============================================================================
@@ -68,3 +62,8 @@ if os.name == 'nt': # Windows
else:
kill = os.kill
+
+
+# mute pyflakes "imported but unused"
+assert hybrid_property
+assert OrderedSet
diff --git a/kallithea/lib/db_manage.py b/kallithea/lib/db_manage.py
--- a/kallithea/lib/db_manage.py
+++ b/kallithea/lib/db_manage.py
@@ -26,8 +26,6 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-from __future__ import print_function
-
import logging
import os
import sys
@@ -56,7 +54,6 @@ class DbManage(object):
self.tests = tests
self.root = root
self.dburi = dbconf
- self.db_exists = False
self.cli_args = cli_args or {}
self.init_db(SESSION=SESSION)
@@ -189,7 +186,7 @@ class DbManage(object):
return password
if username is None:
- username = raw_input('Specify admin username:')
+ username = input('Specify admin username:')
if password is None:
password = get_password()
if not password:
@@ -198,7 +195,7 @@ class DbManage(object):
if not password:
sys.exit()
if email is None:
- email = raw_input('Specify admin email:')
+ email = input('Specify admin email:')
self.create_user(username, password, email, True)
else:
log.info('creating admin and regular test users')
@@ -294,7 +291,7 @@ class DbManage(object):
if _path is not None:
path = _path
elif not self.tests and not test_repo_path:
- path = raw_input(
+ path = input(
'Enter a valid absolute path to store repositories. '
'All repositories in that path will be added automatically:'
)
@@ -385,18 +382,18 @@ class DbManage(object):
def create_user(self, username, password, email='', admin=False):
log.info('creating user %s', username)
UserModel().create_or_update(username, password, email,
- firstname=u'Kallithea', lastname=u'Admin',
+ firstname='Kallithea', lastname='Admin',
active=True, admin=admin,
extern_type=User.DEFAULT_AUTH_TYPE)
def create_default_user(self):
log.info('creating default user')
# create default user for handling default permissions.
- user = UserModel().create_or_update(username=User.DEFAULT_USER,
+ user = UserModel().create_or_update(username=User.DEFAULT_USER_NAME,
password=str(uuid.uuid1())[:20],
email='anonymous@kallithea-scm.org',
- firstname=u'Anonymous',
- lastname=u'User')
+ firstname='Anonymous',
+ lastname='User')
# based on configuration options activate/deactivate this user which
# controls anonymous access
if self.cli_args.get('public_access') is False:
@@ -419,4 +416,4 @@ class DbManage(object):
permissions that are missing, and not alter already defined ones
"""
log.info('creating default user permissions')
- PermissionModel().create_default_permissions(user=User.DEFAULT_USER)
+ PermissionModel().create_default_permissions(user=User.DEFAULT_USER_NAME)
diff --git a/kallithea/lib/diffs.py b/kallithea/lib/diffs.py
--- a/kallithea/lib/diffs.py
+++ b/kallithea/lib/diffs.py
@@ -32,7 +32,7 @@ import re
from tg.i18n import ugettext as _
from kallithea.lib import helpers as h
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.utils2 import safe_str
from kallithea.lib.vcs.backends.base import EmptyChangeset
from kallithea.lib.vcs.exceptions import VCSError
from kallithea.lib.vcs.nodes import FileNode, SubModuleNode
@@ -216,8 +216,7 @@ def wrapped_diff(filenode_old, filenode_
stats = (0, 0)
if not html_diff:
- submodules = filter(lambda o: isinstance(o, SubModuleNode),
- [filenode_new, filenode_old])
+ submodules = [o for o in [filenode_new, filenode_old] if isinstance(o, SubModuleNode)]
if submodules:
html_diff = wrap_to_table(h.escape('Submodule %r' % submodules[0]))
else:
@@ -235,10 +234,9 @@ def get_gitdiff(filenode_old, filenode_n
"""
# make sure we pass in default context
context = context or 3
- submodules = filter(lambda o: isinstance(o, SubModuleNode),
- [filenode_new, filenode_old])
+ submodules = [o for o in [filenode_new, filenode_old] if isinstance(o, SubModuleNode)]
if submodules:
- return ''
+ return b''
for filenode in (filenode_old, filenode_new):
if not isinstance(filenode, FileNode):
@@ -263,7 +261,7 @@ def get_diff(scm_instance, rev1, rev2, p
ignore_whitespace=ignore_whitespace, context=context)
except MemoryError:
h.flash('MemoryError: Diff is too big', category='error')
- return ''
+ return b''
NEW_FILENODE = 1
@@ -281,7 +279,7 @@ class DiffProcessor(object):
mentioned in the diff together with a dict of meta information that
can be used to render it in a HTML template.
"""
- _diff_git_re = re.compile('^diff --git', re.MULTILINE)
+ _diff_git_re = re.compile(b'^diff --git', re.MULTILINE)
def __init__(self, diff, vcs='hg', diff_limit=None, inline_diff=True):
"""
@@ -291,10 +289,10 @@ class DiffProcessor(object):
based on that parameter cut off will be triggered, set to None
to show full diff
"""
- if not isinstance(diff, basestring):
- raise Exception('Diff must be a basestring got %s instead' % type(diff))
+ if not isinstance(diff, bytes):
+ raise Exception('Diff must be bytes - got %s' % type(diff))
- self._diff = diff
+ self._diff = memoryview(diff)
self.adds = 0
self.removes = 0
self.diff_limit = diff_limit
@@ -317,7 +315,7 @@ class DiffProcessor(object):
self.limited_diff = True
continue
- head, diff_lines = _get_header(self.vcs, buffer(self._diff, start, end - start))
+ head, diff_lines = _get_header(self.vcs, self._diff[start:end])
op = None
stats = {
@@ -399,7 +397,7 @@ class DiffProcessor(object):
'new_lineno': '',
'action': 'context',
'line': msg,
- } for _op, msg in stats['ops'].iteritems()
+ } for _op, msg in stats['ops'].items()
if _op not in [MOD_FILENODE]])
_files.append({
@@ -420,22 +418,22 @@ class DiffProcessor(object):
for chunk in diff_data['chunks']:
lineiter = iter(chunk)
try:
- peekline = lineiter.next()
+ peekline = next(lineiter)
while True:
# find a first del line
while peekline['action'] != 'del':
- peekline = lineiter.next()
+ peekline = next(lineiter)
delline = peekline
- peekline = lineiter.next()
+ peekline = next(lineiter)
# if not followed by add, eat all following del lines
if peekline['action'] != 'add':
while peekline['action'] == 'del':
- peekline = lineiter.next()
+ peekline = next(lineiter)
continue
# found an add - make sure it is the only one
addline = peekline
try:
- peekline = lineiter.next()
+ peekline = next(lineiter)
except StopIteration:
# add was last line - ok
_highlight_inline_diff(delline, addline)
@@ -479,10 +477,10 @@ def _escaper(string):
return ' '
assert False
- return _escape_re.sub(substitute, safe_unicode(string))
+ return _escape_re.sub(substitute, safe_str(string))
-_git_header_re = re.compile(r"""
+_git_header_re = re.compile(br"""
^diff[ ]--git[ ]a/(?P.+?)[ ]b/(?P.+?)\n
(?:^old[ ]mode[ ](?P\d+)\n
^new[ ]mode[ ](?P\d+)(?:\n|$))?
@@ -499,7 +497,7 @@ _git_header_re = re.compile(r"""
""", re.VERBOSE | re.MULTILINE)
-_hg_header_re = re.compile(r"""
+_hg_header_re = re.compile(br"""
^diff[ ]--git[ ]a/(?P.+?)[ ]b/(?P.+?)\n
(?:^old[ ]mode[ ](?P\d+)\n
^new[ ]mode[ ](?P\d+)(?:\n|$))?
@@ -518,6 +516,9 @@ _hg_header_re = re.compile(r"""
""", re.VERBOSE | re.MULTILINE)
+_header_next_check = re.compile(br'''(?!@)(?!literal )(?!delta )''')
+
+
def _get_header(vcs, diff_chunk):
"""
Parses a Git diff for a single file (header and chunks) and returns a tuple with:
@@ -537,11 +538,11 @@ def _get_header(vcs, diff_chunk):
match = _hg_header_re.match(diff_chunk)
if match is None:
raise Exception('diff not recognized as valid %s diff' % vcs)
- meta_info = match.groupdict()
+ meta_info = {k: None if v is None else safe_str(v) for k, v in match.groupdict().items()}
rest = diff_chunk[match.end():]
- if rest and not rest.startswith('@') and not rest.startswith('literal ') and not rest.startswith('delta '):
- raise Exception('cannot parse %s diff header: %r followed by %r' % (vcs, diff_chunk[:match.end()], rest[:1000]))
- diff_lines = (_escaper(m.group(0)) for m in re.finditer(r'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
+ if rest and _header_next_check.match(rest):
+ raise Exception('cannot parse %s diff header: %r followed by %r' % (vcs, safe_str(bytes(diff_chunk[:match.end()])), safe_str(bytes(rest[:1000]))))
+ diff_lines = (_escaper(m.group(0)) for m in re.finditer(br'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
return meta_info, diff_lines
@@ -559,7 +560,7 @@ def _parse_lines(diff_lines):
chunks = []
try:
- line = diff_lines.next()
+ line = next(diff_lines)
while True:
lines = []
@@ -590,7 +591,7 @@ def _parse_lines(diff_lines):
'line': line,
})
- line = diff_lines.next()
+ line = next(diff_lines)
while old_line < old_end or new_line < new_end:
if not line:
@@ -623,7 +624,7 @@ def _parse_lines(diff_lines):
'line': line[1:],
})
- line = diff_lines.next()
+ line = next(diff_lines)
if _newline_marker.match(line):
# we need to append to lines, since this is not
@@ -634,7 +635,7 @@ def _parse_lines(diff_lines):
'action': 'context',
'line': line,
})
- line = diff_lines.next()
+ line = next(diff_lines)
if old_line > old_end:
raise Exception('error parsing diff - more than %s "-" lines at -%s+%s' % (old_end, old_line, new_line))
if new_line > new_end:
diff --git a/kallithea/lib/exceptions.py b/kallithea/lib/exceptions.py
--- a/kallithea/lib/exceptions.py
+++ b/kallithea/lib/exceptions.py
@@ -74,9 +74,8 @@ class UserCreationError(Exception):
pass
-class RepositoryCreationError(Exception):
+class HgsubversionImportError(Exception):
pass
-
-class HgsubversionImportError(Exception):
+class InvalidCloneUriException(Exception):
pass
diff --git a/kallithea/lib/ext_json.py b/kallithea/lib/ext_json.py
--- a/kallithea/lib/ext_json.py
+++ b/kallithea/lib/ext_json.py
@@ -1,16 +1,16 @@
"""
-Extended JSON encoder for json
+Extended JSON encoder with support for more data types
-json.org does not specify how date time can be represented - monkeypatch it to do something.
+json.org does not specify how date time can be represented - just encode it somehow and ignore decoding ...
"""
import datetime
import decimal
import functools
-import json # is re-exported after monkey patching
+import json
-__all__ = ['json']
+__all__ = ['dumps', 'dump', 'load', 'loads']
def _is_tz_aware(value):
@@ -70,10 +70,12 @@ class ExtendedEncoder(json.JSONEncoder):
try:
return _obj_dump(obj)
except NotImplementedError:
- pass
+ pass # quiet skipping of unsupported types!
raise TypeError("%r is not JSON serializable" % (obj,))
-# monkey-patch and export JSON encoder to use custom encoding method
-json.dumps = functools.partial(json.dumps, cls=ExtendedEncoder)
-json.dump = functools.partial(json.dump, cls=ExtendedEncoder)
+dumps = functools.partial(json.dumps, cls=ExtendedEncoder)
+dump = functools.partial(json.dump, cls=ExtendedEncoder)
+# No special support for loading these types back!!!
+load = json.load
+loads = json.loads
diff --git a/kallithea/lib/feeds.py b/kallithea/lib/feeds.py
new file mode 100644
--- /dev/null
+++ b/kallithea/lib/feeds.py
@@ -0,0 +1,152 @@
+# -*- coding: utf-8 -*-
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+"""
+kallithea.lib.feeds
+~~~~~~~~~~~~~~~~~~~
+
+Shared code for providing RSS and ATOM feeds.
+"""
+
+import datetime
+import re
+
+import mako.template
+
+
+language = 'en-us'
+ttl = "5"
+
+
+# From ``django.utils.feedgenerator`` via webhelpers.feedgenerator
+def rfc2822_date(date):
+ # We do this ourselves to be timezone aware, email.Utils is not tz aware.
+ if getattr(date, "tzinfo", False):
+ time_str = date.strftime('%a, %d %b %Y %H:%M:%S ')
+ offset = date.tzinfo.utcoffset(date)
+ timezone = (offset.days * 24 * 60) + (offset.seconds / 60)
+ hour, minute = divmod(timezone, 60)
+ return time_str + "%+03d%02d" % (hour, minute)
+ else:
+ return date.strftime('%a, %d %b %Y %H:%M:%S -0000')
+
+# From ``django.utils.feedgenerator`` via webhelpers.feedgenerator
+def rfc3339_date(date):
+ if getattr(date, "tzinfo", False):
+ time_str = date.strftime('%Y-%m-%dT%H:%M:%S')
+ offset = date.tzinfo.utcoffset(date)
+ timezone = (offset.days * 24 * 60) + (offset.seconds / 60)
+ hour, minute = divmod(timezone, 60)
+ return time_str + "%+03d:%02d" % (hour, minute)
+ else:
+ return date.strftime('%Y-%m-%dT%H:%M:%SZ')
+
+# From ``django.utils.feedgenerator`` via webhelpers.feedgenerator
+def get_tag_uri(url, date):
+ "Creates a TagURI. See http://diveintomark.org/archives/2004/05/28/howto-atom-id"
+ tag = re.sub('^http://', '', url)
+ if date is not None:
+ tag = re.sub('/', ',%s:/' % date.strftime('%Y-%m-%d'), tag, 1)
+ tag = re.sub('#', '/', tag)
+ return 'tag:' + tag
+
+
+class Attributes(object):
+ """Simple namespace for attribute dict access in mako and elsewhere"""
+ def __init__(self, a_dict):
+ self.__dict__ = a_dict
+
+
+class _Feeder(object):
+
+ content_type = None
+ template = None # subclass must provide a mako.template.Template
+
+ @classmethod
+ def render(cls, header, entries):
+ try:
+ latest_pubdate = max(
+ pubdate for pubdate in (e.get('pubdate') for e in entries)
+ if pubdate
+ )
+ except ValueError: # max() arg is an empty sequence ... or worse
+ latest_pubdate = datetime.datetime.now()
+
+ return cls.template.render(
+ language=language,
+ ttl=ttl, # rss only
+ latest_pubdate=latest_pubdate,
+ rfc2822_date=rfc2822_date, # for RSS
+ rfc3339_date=rfc3339_date, # for Atom
+ get_tag_uri=get_tag_uri,
+ entries=[Attributes(e) for e in entries],
+ **header
+ )
+
+
+class AtomFeed(_Feeder):
+
+ content_type = 'application/atom+xml'
+
+ template = mako.template.Template('''\
+
+
+ ${title}
+
+ ${link}
+ ${rfc3339_date(latest_pubdate)}
+ % for entry in entries:
+
+ ${entry.title}
+
+ ${rfc3339_date(entry.pubdate)}
+ ${rfc3339_date(entry.pubdate)}
+
+ ${entry.author_name}
+ ${entry.author_email}
+
+ ${get_tag_uri(entry.link, entry.pubdate)}
+ ${entry.description}
+
+ % endfor
+
+''', default_filters=['x'], output_encoding='utf-8', encoding_errors='replace')
+
+
+class RssFeed(_Feeder):
+
+ content_type = 'application/rss+xml'
+
+ template = mako.template.Template('''\
+
+
+
+ ${title}
+ ${link}
+ ${description}
+ ${language}
+ ${rfc2822_date(latest_pubdate)}
+ ${ttl}
+ % for entry in entries:
+
+ ${entry.title}
+ ${entry.link}
+ ${entry.description}
+ ${entry.author_email} (${entry.author_name})
+ ${rfc2822_date(entry.pubdate)}
+
+ % endfor
+
+
+''', default_filters=['x'], output_encoding='utf-8', encoding_errors='replace')
diff --git a/kallithea/lib/helpers.py b/kallithea/lib/helpers.py
--- a/kallithea/lib/helpers.py
+++ b/kallithea/lib/helpers.py
@@ -22,9 +22,8 @@ import json
import logging
import random
import re
-import StringIO
import textwrap
-import urlparse
+import urllib.parse
from beaker.cache import cache_region
from pygments import highlight as code_highlight
@@ -49,7 +48,7 @@ from kallithea.lib.markup_renderer impor
from kallithea.lib.pygmentsutils import get_custom_lexer
from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict
from kallithea.lib.utils2 import age as _age
-from kallithea.lib.utils2 import credentials_filter, safe_int, safe_str, safe_unicode, str2bool, time_to_datetime
+from kallithea.lib.utils2 import credentials_filter, safe_bytes, safe_int, safe_str, str2bool, time_to_datetime
from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
#==============================================================================
@@ -58,6 +57,25 @@ from kallithea.lib.vcs.exceptions import
from kallithea.lib.vcs.utils import author_email, author_name
+# mute pyflakes "imported but unused"
+assert Option
+assert checkbox
+assert end_form
+assert password
+assert radio
+assert submit
+assert text
+assert textarea
+assert format_byte_size
+assert chop_at
+assert wrap_paragraphs
+assert HasPermissionAny
+assert HasRepoGroupPermissionLevel
+assert HasRepoPermissionLevel
+assert time_to_datetime
+assert EmptyChangeset
+
+
log = logging.getLogger(__name__)
@@ -167,7 +185,7 @@ def select(name, selected_values, option
for x in option_list:
if isinstance(x, tuple) and len(x) == 2:
value, label = x
- elif isinstance(x, basestring):
+ elif isinstance(x, str):
value = label = x
else:
log.error('invalid select option %r', x)
@@ -177,7 +195,7 @@ def select(name, selected_values, option
for x in value:
if isinstance(x, tuple) and len(x) == 2:
group_value, group_label = x
- elif isinstance(x, basestring):
+ elif isinstance(x, str):
group_value = group_label = x
else:
log.error('invalid select option %r', x)
@@ -200,14 +218,12 @@ def FID(raw_id, path):
:param path:
"""
- return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_str(path)).hexdigest()[:12])
+ return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12])
class _FilesBreadCrumbs(object):
def __call__(self, repo_name, rev, paths):
- if isinstance(paths, str):
- paths = safe_unicode(paths)
url_l = [link_to(repo_name, url('files_home',
repo_name=repo_name,
revision=rev, f_path=''),
@@ -246,12 +262,12 @@ class CodeHtmlFormatter(HtmlFormatter):
yield i, t
def _wrap_tablelinenos(self, inner):
- dummyoutfile = StringIO.StringIO()
+ inner_lines = []
lncount = 0
for t, line in inner:
if t:
lncount += 1
- dummyoutfile.write(line)
+ inner_lines.append(line)
fl = self.linenostart
mw = len(str(lncount + fl - 1))
@@ -304,7 +320,7 @@ class CodeHtmlFormatter(HtmlFormatter):
'
'
@@ -331,7 +347,48 @@ def pygmentize(filenode, **kwargs):
"""
lexer = get_custom_lexer(filenode.extension) or filenode.lexer
return literal(markup_whitespace(
- code_highlight(filenode.content, lexer, CodeHtmlFormatter(**kwargs))))
+ code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs))))
+
+
+def hsv_to_rgb(h, s, v):
+ if s == 0.0:
+ return v, v, v
+ i = int(h * 6.0) # XXX assume int() truncates!
+ f = (h * 6.0) - i
+ p = v * (1.0 - s)
+ q = v * (1.0 - s * f)
+ t = v * (1.0 - s * (1.0 - f))
+ i = i % 6
+ if i == 0:
+ return v, t, p
+ if i == 1:
+ return q, v, p
+ if i == 2:
+ return p, v, t
+ if i == 3:
+ return p, q, v
+ if i == 4:
+ return t, p, v
+ if i == 5:
+ return v, p, q
+
+
+def gen_color(n=10000):
+ """generator for getting n of evenly distributed colors using
+ hsv color and golden ratio. It always return same order of colors
+
+ :returns: RGB tuple
+ """
+
+ golden_ratio = 0.618033988749895
+ h = 0.22717784590367374
+
+ for _unused in range(n):
+ h += golden_ratio
+ h %= 1
+ HSV_tuple = [h, 0.95, 0.95]
+ RGB_tuple = hsv_to_rgb(*HSV_tuple)
+ yield [str(int(x * 256)) for x in RGB_tuple]
def pygmentize_annotation(repo_name, filenode, **kwargs):
@@ -340,82 +397,38 @@ def pygmentize_annotation(repo_name, fil
:param filenode:
"""
-
+ cgenerator = gen_color()
color_dict = {}
- def gen_color(n=10000):
- """generator for getting n of evenly distributed colors using
- hsv color and golden ratio. It always return same order of colors
-
- :returns: RGB tuple
- """
-
- def hsv_to_rgb(h, s, v):
- if s == 0.0:
- return v, v, v
- i = int(h * 6.0) # XXX assume int() truncates!
- f = (h * 6.0) - i
- p = v * (1.0 - s)
- q = v * (1.0 - s * f)
- t = v * (1.0 - s * (1.0 - f))
- i = i % 6
- if i == 0:
- return v, t, p
- if i == 1:
- return q, v, p
- if i == 2:
- return p, v, t
- if i == 3:
- return p, q, v
- if i == 4:
- return t, p, v
- if i == 5:
- return v, p, q
-
- golden_ratio = 0.618033988749895
- h = 0.22717784590367374
-
- for _unused in xrange(n):
- h += golden_ratio
- h %= 1
- HSV_tuple = [h, 0.95, 0.95]
- RGB_tuple = hsv_to_rgb(*HSV_tuple)
- yield map(lambda x: str(int(x * 256)), RGB_tuple)
-
- cgenerator = gen_color()
-
def get_color_string(cs):
if cs in color_dict:
col = color_dict[cs]
else:
- col = color_dict[cs] = cgenerator.next()
+ col = color_dict[cs] = next(cgenerator)
return "color: rgb(%s)! important;" % (', '.join(col))
- def url_func(repo_name):
-
- def _url_func(changeset):
- author = escape(changeset.author)
- date = changeset.date
- message = escape(changeset.message)
- tooltip_html = ("Author: %s "
- "Date: %s "
- "Message: %s") % (author, date, message)
+ def url_func(changeset):
+ author = escape(changeset.author)
+ date = changeset.date
+ message = escape(changeset.message)
+ tooltip_html = ("Author: %s "
+ "Date: %s "
+ "Message: %s") % (author, date, message)
- lnk_format = show_id(changeset)
- uri = link_to(
- lnk_format,
- url('changeset_home', repo_name=repo_name,
- revision=changeset.raw_id),
- style=get_color_string(changeset.raw_id),
- **{'data-toggle': 'popover',
- 'data-content': tooltip_html}
- )
+ lnk_format = show_id(changeset)
+ uri = link_to(
+ lnk_format,
+ url('changeset_home', repo_name=repo_name,
+ revision=changeset.raw_id),
+ style=get_color_string(changeset.raw_id),
+ **{'data-toggle': 'popover',
+ 'data-content': tooltip_html}
+ )
- uri += '\n'
- return uri
- return _url_func
+ uri += '\n'
+ return uri
- return literal(markup_whitespace(annotate_highlight(filenode, url_func(repo_name), **kwargs)))
+ return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
class _Message(object):
@@ -424,22 +437,14 @@ class _Message(object):
Converting the message to a string returns the message text. Instances
also have the following attributes:
- * ``message``: the message text.
* ``category``: the category specified when the message was created.
+ * ``message``: the html-safe message text.
"""
def __init__(self, category, message):
self.category = category
self.message = message
- def __str__(self):
- return self.message
-
- __unicode__ = __str__
-
- def __html__(self):
- return escape(safe_unicode(self.message))
-
def _session_flash_messages(append=None, clear=False):
"""Manage a message queue in tg.session: return the current message queue
@@ -461,7 +466,7 @@ def _session_flash_messages(append=None,
return flash_messages
-def flash(message, category=None, logf=None):
+def flash(message, category, logf=None):
"""
Show a message to the user _and_ log it through the specified function
@@ -471,14 +476,22 @@ def flash(message, category=None, logf=N
logf defaults to log.info, unless category equals 'success', in which
case logf defaults to log.debug.
"""
+ assert category in ('error', 'success', 'warning'), category
+ if hasattr(message, '__html__'):
+ # render to HTML for storing in cookie
+ safe_message = str(message)
+ else:
+ # Apply str - the message might be an exception with __str__
+ # Escape, so we can trust the result without further escaping, without any risk of injection
+ safe_message = html_escape(str(message))
if logf is None:
logf = log.info
if category == 'success':
logf = log.debug
- logf('Flash %s: %s', category, message)
+ logf('Flash %s: %s', category, safe_message)
- _session_flash_messages(append=(category, message))
+ _session_flash_messages(append=(category, safe_message))
def pop_flash_messages():
@@ -486,14 +499,22 @@ def pop_flash_messages():
The return value is a list of ``Message`` objects.
"""
- return [_Message(*m) for m in _session_flash_messages(clear=True)]
+ return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
-age = lambda x, y=False: _age(x, y)
-capitalize = lambda x: x.capitalize()
+def age(x, y=False):
+ return _age(x, y)
+
+def capitalize(x):
+ return x.capitalize()
+
email = author_email
-short_id = lambda x: x[:12]
-hide_credentials = lambda x: ''.join(credentials_filter(x))
+
+def short_id(x):
+ return x[:12]
+
+def hide_credentials(x):
+ return ''.join(credentials_filter(x))
def show_id(cs):
@@ -516,8 +537,7 @@ def show_id(cs):
def fmt_date(date):
if date:
- return date.strftime("%Y-%m-%d %H:%M:%S").decode('utf-8')
-
+ return date.strftime("%Y-%m-%d %H:%M:%S")
return ""
@@ -548,7 +568,7 @@ def user_attr_or_none(author, show_attr)
email = author_email(author)
if email:
from kallithea.model.db import User
- user = User.get_by_email(email, cache=True) # cache will only use sql_cache_short
+ user = User.get_by_email(email)
if user is not None:
return getattr(user, show_attr)
return None
@@ -590,15 +610,12 @@ def person(author, show_attr="username")
def person_by_id(id_, show_attr="username"):
from kallithea.model.db import User
- # attr to return from fetched user
- person_getter = lambda usr: getattr(usr, show_attr)
-
# maybe it's an ID ?
if str(id_).isdigit() or isinstance(id_, int):
id_ = int(id_)
user = User.get(id_)
if user is not None:
- return person_getter(user)
+ return getattr(user, show_attr)
return id_
@@ -677,7 +694,7 @@ def action_parser(user_log, feed=False,
return _op, _name
revs = []
- if len(filter(lambda v: v != '', revs_ids)) > 0:
+ if len([v for v in revs_ids if v != '']) > 0:
repo = None
for rev in revs_ids[:revs_top_limit]:
_op, _name = _get_op(rev)
@@ -850,10 +867,7 @@ def action_parser(user_log, feed=False,
.replace('[', '') \
.replace(']', '')
- action_params_func = lambda: ""
-
- if callable(action_str[1]):
- action_params_func = action_str[1]
+ action_params_func = action_str[1] if callable(action_str[1]) else (lambda: "")
def action_parser_icon():
action = user_log.action
@@ -937,13 +951,13 @@ def gravatar_url(email_address, size=30,
if email_address == _def:
return default
- parsed_url = urlparse.urlparse(url.current(qualified=True))
+ parsed_url = urllib.parse.urlparse(url.current(qualified=True))
url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \
.replace('{email}', email_address) \
- .replace('{md5email}', hashlib.md5(safe_str(email_address).lower()).hexdigest()) \
+ .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \
.replace('{netloc}', parsed_url.netloc) \
.replace('{scheme}', parsed_url.scheme) \
- .replace('{size}', safe_str(size))
+ .replace('{size}', str(size))
return url
@@ -959,7 +973,7 @@ def changed_tooltip(nodes):
suf = ''
if len(nodes) > 30:
suf = ' ' + _(' and %s more') % (len(nodes) - 30)
- return literal(pref + ' '.join([safe_unicode(x.path)
+ return literal(pref + ' '.join([x.path
for x in nodes[:30]]) + suf)
else:
return ': ' + _('No files')
@@ -1069,6 +1083,8 @@ def urlify_text(s, repo_name=None, link_
URLs links to what they say.
Issues are linked to given issue-server.
If link_ is provided, all text not already linking somewhere will link there.
+ >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* markup/b>")
+ literal('Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>')
"""
def _replace(match_obj):
@@ -1162,10 +1178,11 @@ def urlify_issues(newtext, repo_name):
assert CONFIG['sqlalchemy.url'] # make sure config has been loaded
# Build chain of urlify functions, starting with not doing any transformation
- tmp_urlify_issues_f = lambda s: s
+ def tmp_urlify_issues_f(s):
+ return s
issue_pat_re = re.compile(r'issue_pat(.*)')
- for k in CONFIG.keys():
+ for k in CONFIG:
# Find all issue_pat* settings that also have corresponding server_link and prefix configuration
m = issue_pat_re.match(k)
if m is None:
@@ -1214,9 +1231,9 @@ def urlify_issues(newtext, repo_name):
'url': issue_url,
'text': issue_text,
}
- tmp_urlify_issues_f = (lambda s,
- issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f:
- issue_re.sub(issues_replace, chain_f(s)))
+
+ def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f):
+ return issue_re.sub(issues_replace, chain_f(s))
# Set tmp function globally - atomically
_urlify_issues_f = tmp_urlify_issues_f
@@ -1229,7 +1246,7 @@ def render_w_mentions(source, repo_name=
Render plain text with revision hashes and issue references urlified
and with @mention highlighting.
"""
- s = safe_unicode(source)
+ s = safe_str(source)
s = urlify_text(s, repo_name=repo_name)
return literal('
%s
' % s)
diff --git a/kallithea/lib/hooks.py b/kallithea/lib/hooks.py
--- a/kallithea/lib/hooks.py
+++ b/kallithea/lib/hooks.py
@@ -25,17 +25,17 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-import binascii
import os
import sys
import time
+import mercurial.scmutil
+
from kallithea.lib import helpers as h
from kallithea.lib.exceptions import UserCreationError
-from kallithea.lib.utils import action_logger, make_ui, setup_cache_regions
-from kallithea.lib.utils2 import HookEnvironmentError, get_hook_environment, safe_str, safe_unicode
+from kallithea.lib.utils import action_logger, make_ui
+from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes, safe_str
from kallithea.lib.vcs.backends.base import EmptyChangeset
-from kallithea.lib.vcs.utils.hgcompat import revrange
from kallithea.model.db import Repository, User
@@ -44,7 +44,7 @@ def _get_scm_size(alias, root_path):
alias += '.'
size_scm, size_root = 0, 0
- for path, dirs, files in os.walk(safe_str(root_path)):
+ for path, dirs, files in os.walk(root_path):
if path.find(alias) != -1:
for f in files:
try:
@@ -66,16 +66,16 @@ def _get_scm_size(alias, root_path):
def repo_size(ui, repo, hooktype=None, **kwargs):
- """Presents size of repository after push"""
- size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root)
+ """Show size of Mercurial repository, to be called after push."""
+ size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', safe_str(repo.root))
last_cs = repo[len(repo) - 1]
msg = ('Repository size .hg: %s Checkout: %s Total: %s\n'
'Last revision is now r%s:%s\n') % (
- size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12]
+ size_hg_f, size_root_f, size_total_f, last_cs.rev(), ascii_str(last_cs.hex())[:12]
)
- ui.status(msg)
+ ui.status(safe_bytes(msg))
def log_pull_action(ui, repo, **kwargs):
@@ -110,8 +110,7 @@ def log_push_action(ui, repo, node, node
Note: This hook is not only logging, but also the side effect invalidating
cahes! The function should perhaps be renamed.
"""
- _h = binascii.hexlify
- revs = [_h(repo[r].node()) for r in revrange(repo, [node + ':' + node_last])]
+ revs = [ascii_str(repo[r].hex()) for r in mercurial.scmutil.revrange(repo, [b'%s:%s' % (node, node_last)])]
process_pushed_raw_ids(revs)
return 0
@@ -303,31 +302,23 @@ def _hook_environment(repo_path):
they thus need enough info to be able to create an app environment and
connect to the database.
"""
- from paste.deploy import appconfig
- from sqlalchemy import engine_from_config
- from kallithea.config.environment import load_environment
- from kallithea.model.base import init_model
+ import paste.deploy
+ import kallithea.config.middleware
extras = get_hook_environment()
- ini_file_path = extras['config']
- #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging
- app_conf = appconfig('config:%s' % ini_file_path)
- conf = load_environment(app_conf.global_conf, app_conf.local_conf)
- setup_cache_regions(conf)
+ path_to_ini_file = extras['config']
+ kallithea.CONFIG = paste.deploy.appconfig('config:' + path_to_ini_file)
+ #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging
+ kallithea.config.middleware.make_app(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
- engine = engine_from_config(conf, 'sqlalchemy.')
- init_model(engine)
-
- repo_path = safe_unicode(repo_path)
# fix if it's not a bare repo
if repo_path.endswith(os.sep + '.git'):
repo_path = repo_path[:-5]
repo = Repository.get_by_full_path(repo_path)
if not repo:
- raise OSError('Repository %s not found in database'
- % (safe_str(repo_path)))
+ raise OSError('Repository %s not found in database' % repo_path)
baseui = make_ui()
return baseui, repo
@@ -368,19 +359,20 @@ def handle_git_post_receive(repo_path, g
if push_ref['old_rev'] == EmptyChangeset().raw_id:
# update the symbolic ref if we push new repo
if scm_repo.is_empty():
- scm_repo._repo.refs.set_symbolic_ref('HEAD',
- 'refs/heads/%s' % push_ref['name'])
+ scm_repo._repo.refs.set_symbolic_ref(
+ b'HEAD',
+ b'refs/heads/%s' % safe_bytes(push_ref['name']))
# build exclude list without the ref
cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*']
- stdout, stderr = scm_repo.run_git_command(cmd)
+ stdout = scm_repo.run_git_command(cmd)
ref = push_ref['ref']
heads = [head for head in stdout.splitlines() if head != ref]
# now list the git revs while excluding from the list
cmd = ['log', push_ref['new_rev'], '--reverse', '--pretty=format:%H']
cmd.append('--not')
cmd.extend(heads) # empty list is ok
- stdout, stderr = scm_repo.run_git_command(cmd)
+ stdout = scm_repo.run_git_command(cmd)
git_revs += stdout.splitlines()
elif push_ref['new_rev'] == EmptyChangeset().raw_id:
@@ -389,7 +381,7 @@ def handle_git_post_receive(repo_path, g
else:
cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref,
'--reverse', '--pretty=format:%H']
- stdout, stderr = scm_repo.run_git_command(cmd)
+ stdout = scm_repo.run_git_command(cmd)
git_revs += stdout.splitlines()
elif _type == 'tags':
@@ -404,5 +396,5 @@ def handle_git_post_receive(repo_path, g
def rejectpush(ui, **kwargs):
"""Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos"""
ex = get_hook_environment()
- ui.warn((b"Push access to %r denied\n") % safe_str(ex.repository))
+ ui.warn(safe_bytes("Push access to %r denied\n" % ex.repository))
return 1
diff --git a/kallithea/lib/indexers/__init__.py b/kallithea/lib/indexers/__init__.py
--- a/kallithea/lib/indexers/__init__.py
+++ b/kallithea/lib/indexers/__init__.py
@@ -146,7 +146,7 @@ class WhooshResultWrapper(object):
docnum = self.matcher.id()
chunks = [offsets for offsets in self.get_chunks()]
docs_id.append([docnum, chunks])
- self.matcher.next()
+ self.matcher.next() # this looks like a py2 iterator ... but it isn't
return docs_id
def __str__(self):
@@ -203,7 +203,7 @@ class WhooshResultWrapper(object):
return res
def get_short_content(self, res, chunks):
- return u''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
+ return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
def get_chunks(self):
"""
diff --git a/kallithea/lib/indexers/daemon.py b/kallithea/lib/indexers/daemon.py
--- a/kallithea/lib/indexers/daemon.py
+++ b/kallithea/lib/indexers/daemon.py
@@ -39,8 +39,8 @@ from whoosh.qparser import QueryParser
from kallithea.config.conf import INDEX_EXTENSIONS, INDEX_FILENAMES
from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA
-from kallithea.lib.utils2 import safe_str, safe_unicode
-from kallithea.lib.vcs.exceptions import ChangesetError, NodeDoesNotExistError, RepositoryError
+from kallithea.lib.utils2 import safe_str
+from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, NodeDoesNotExistError, RepositoryError
from kallithea.model.db import Repository
from kallithea.model.scm import ScmModel
@@ -77,8 +77,7 @@ class WhooshIndexingDaemon(object):
# filter repo list
if repo_list:
- # Fix non-ascii repo names to unicode
- repo_list = map(safe_unicode, repo_list)
+ repo_list = set(repo_list)
self.filtered_repo_paths = {}
for repo_name, repo in self.repo_paths.items():
if repo_name in repo_list:
@@ -110,7 +109,7 @@ class WhooshIndexingDaemon(object):
self.initial = False
def _get_index_revision(self, repo):
- db_repo = Repository.get_by_repo_name(repo.name_unicode)
+ db_repo = Repository.get_by_repo_name(repo.name)
landing_rev = 'tip'
if db_repo:
_rev_type, _rev = db_repo.landing_rev
@@ -133,7 +132,7 @@ class WhooshIndexingDaemon(object):
cs = self._get_index_changeset(repo)
for _topnode, _dirs, files in cs.walk('/'):
for f in files:
- index_paths_.add(os.path.join(safe_str(repo.path), safe_str(f.path)))
+ index_paths_.add(os.path.join(repo.path, f.path))
except RepositoryError:
log.debug(traceback.format_exc())
@@ -142,19 +141,16 @@ class WhooshIndexingDaemon(object):
def get_node(self, repo, path, index_rev=None):
"""
- gets a filenode based on given full path. It operates on string for
- hg git compatibility.
+ gets a filenode based on given full path.
:param repo: scm repo instance
:param path: full path including root location
:return: FileNode
"""
# FIXME: paths should be normalized ... or even better: don't include repo.path
- path = safe_str(path)
- repo_path = safe_str(repo.path)
- assert path.startswith(repo_path)
- assert path[len(repo_path)] in (os.path.sep, os.path.altsep)
- node_path = path[len(repo_path) + 1:]
+ assert path.startswith(repo.path)
+ assert path[len(repo.path)] in (os.path.sep, os.path.altsep)
+ node_path = path[len(repo.path) + 1:]
cs = self._get_index_changeset(repo, index_rev=index_rev)
node = cs.get_node(node_path)
return node
@@ -182,27 +178,27 @@ class WhooshIndexingDaemon(object):
indexed = indexed_w_content = 0
if self.is_indexable_node(node):
- u_content = node.content
- if not isinstance(u_content, unicode):
+ bytes_content = node.content
+ if b'\0' in bytes_content:
log.warning(' >> %s - no text content', path)
- u_content = u''
+ u_content = ''
else:
log.debug(' >> %s', path)
+ u_content = safe_str(bytes_content)
indexed_w_content += 1
else:
log.debug(' >> %s - not indexable', path)
# just index file name without it's content
- u_content = u''
+ u_content = ''
indexed += 1
- p = safe_unicode(path)
writer.add_document(
- fileid=p,
- owner=unicode(repo.contact),
- repository_rawname=safe_unicode(repo_name),
- repository=safe_unicode(repo_name),
- path=p,
+ fileid=path,
+ owner=repo.contact,
+ repository_rawname=repo_name,
+ repository=repo_name,
+ path=path,
content=u_content,
modtime=self.get_node_mtime(node),
extension=node.extension
@@ -237,18 +233,18 @@ class WhooshIndexingDaemon(object):
indexed += 1
log.debug(' >> %s %s/%s', cs, indexed, total)
writer.add_document(
- raw_id=unicode(cs.raw_id),
- owner=unicode(repo.contact),
+ raw_id=cs.raw_id,
+ owner=repo.contact,
date=cs._timestamp,
- repository_rawname=safe_unicode(repo_name),
- repository=safe_unicode(repo_name),
+ repository_rawname=repo_name,
+ repository=repo_name,
author=cs.author,
message=cs.message,
last=cs.last,
- added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(),
- removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(),
- changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(),
- parents=u' '.join([cs.raw_id for cs in cs.parents]),
+ added=' '.join(node.path for node in cs.added).lower(),
+ removed=' '.join(node.path for node in cs.removed).lower(),
+ changed=' '.join(node.path for node in cs.changed).lower(),
+ parents=' '.join(cs.raw_id for cs in cs.parents),
)
return indexed
@@ -291,7 +287,7 @@ class WhooshIndexingDaemon(object):
continue
qp = QueryParser('repository', schema=CHGSETS_SCHEMA)
- q = qp.parse(u"last:t AND %s" % repo_name)
+ q = qp.parse("last:t AND %s" % repo_name)
results = searcher.search(q)
@@ -303,14 +299,18 @@ class WhooshIndexingDaemon(object):
# assuming that there is only one result, if not this
# may require a full re-index.
start_id = results[0]['raw_id']
- last_rev = repo.get_changeset(revision=start_id).revision
+ try:
+ last_rev = repo.get_changeset(revision=start_id).revision
+ except ChangesetDoesNotExistError:
+ log.error('previous last revision %s not found - indexing from scratch', start_id)
+ start_id = None
# there are new changesets to index or a new repo to index
if last_rev == 0 or num_of_revs > last_rev + 1:
# delete the docs in the index for the previous
# last changeset(s)
for hit in results:
- q = qp.parse(u"last:t AND %s AND raw_id:%s" %
+ q = qp.parse("last:t AND %s AND raw_id:%s" %
(repo_name, hit['raw_id']))
writer.delete_by_query(q)
@@ -330,8 +330,8 @@ class WhooshIndexingDaemon(object):
log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<')
def update_file_index(self):
- log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
- 'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys()))
+ log.debug('STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
+ 'AND REPOS %s', INDEX_EXTENSIONS, ' and '.join(self.repo_paths))
idx = open_dir(self.index_location, indexname=self.indexname)
# The set of all paths in the index
@@ -390,9 +390,7 @@ class WhooshIndexingDaemon(object):
ri_cnt = 0 # indexed
riwc_cnt = 0 # indexed with content
for path in self.get_paths(repo):
- path = safe_unicode(path)
if path in to_index or path not in indexed_paths:
-
# This is either a file that's changed, or a new file
# that wasn't indexed before. So index it!
i, iwc = self.add_doc(writer, path, repo, repo_name)
@@ -431,7 +429,7 @@ class WhooshIndexingDaemon(object):
file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
file_idx_writer = file_idx.writer()
log.debug('BUILDING INDEX FOR EXTENSIONS %s '
- 'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys()))
+ 'AND REPOS %s', INDEX_EXTENSIONS, ' and '.join(self.repo_paths))
for repo_name, repo in sorted(self.repo_paths.items()):
log.debug('Updating indices for repo %s', repo_name)
diff --git a/kallithea/lib/inifile.py b/kallithea/lib/inifile.py
--- a/kallithea/lib/inifile.py
+++ b/kallithea/lib/inifile.py
@@ -42,6 +42,10 @@ default_variables = {
'uuid': lambda: 'VERY-SECRET',
}
+variable_options = {
+ 'database_engine': ['sqlite', 'postgres', 'mysql'],
+ 'http_server': ['waitress', 'gearbox', 'gevent', 'gunicorn', 'uwsgi'],
+}
def expand(template, mako_variable_values, settings):
"""Expand mako template and tweak it.
@@ -63,16 +67,27 @@ def expand(template, mako_variable_value
... %elif conditional_options == 'option-b':
... some_variable = "never mind - option-b will not be used anyway ..."
... %endif
+ ...
+ ... [comment-section]
+ ... #variable3 = 3.0
+ ... #variable4 = 4.0
+ ... #variable5 = 5.0
+ ... variable5 = 5.1
+ ... #variable6 = 6.0
+ ... #variable6 = 6.1
+ ... #variable7 = 7.0
+ ... variable7 = 7.1
... '''
- >>> selected_mako_conditionals = []
>>> mako_variable_values = {'mako_variable': 'VALUE', 'mako_function': (lambda: 'FUNCTION RESULT'),
- ... 'conditional_options': 'option-a'}
+ ... 'conditional_options': 'option-a', 'http_server': 'nc'}
>>> settings = { # only partially used
... '[first-section]': {'variable2': 'VAL2', 'first_extra': 'EXTRA'},
+ ... '[comment-section]': {'variable3': '3.0', 'variable4': '4.1', 'variable5': '5.2', 'variable6': '6.2', 'variable7': '7.0'},
... '[third-section]': {'third_extra': ' 3'},
... '[fourth-section]': {'fourth_extra': '4', 'fourth': '"four"'},
... }
- >>> print expand(template, mako_variable_values, settings)
+ >>> print(expand(template, mako_variable_values, settings))
+ ERROR: http_server is 'nc' - it should be one of 'waitress', 'gearbox', 'gevent', 'gunicorn', 'uwsgi'
[first-section]
@@ -87,6 +102,19 @@ def expand(template, mako_variable_value
[second-section]
# option a was chosen
+ [comment-section]
+ variable3 = 3.0
+ #variable4 = 4.0
+ variable4 = 4.1
+ #variable5 = 5.0
+ #variable5 = 5.1
+ variable5 = 5.2
+ #variable6 = 6.0
+ #variable6 = 6.1
+ variable6 = 6.2
+ variable7 = 7.0
+ #variable7 = 7.1
+
[fourth-section]
fourth = "four"
fourth_extra = 4
@@ -99,6 +127,12 @@ def expand(template, mako_variable_value
mako_variables.update(mako_variable_values or {})
settings = dict((k, dict(v)) for k, v in settings.items()) # deep copy before mutating
+ for key, value in mako_variables.items():
+ if key in variable_options:
+ if value not in variable_options[key]:
+ print('ERROR: %s is %r - it should be one of %s' %
+ (key, value, ', '.join(repr(x) for x in variable_options[key])))
+
ini_lines = mako.template.Template(template).render(**mako_variables)
def process_section(m):
@@ -106,20 +140,44 @@ def expand(template, mako_variable_value
sectionname, lines = m.groups()
if sectionname in settings:
section_settings = settings.pop(sectionname)
+ add_after_key_value = {} # map key to value it should be added after
- def process_line(m):
- """process a section line and update value if necessary"""
- key, value = m.groups()
+ # 1st pass:
+ # comment out lines with keys that have new values
+ # find best line for keeping or un-commenting (because it has the right value) or adding after (because it is the last with other value)
+ def comment_out(m):
+ """process a section line if in section_settings and comment out and track in add_after_key_value"""
line = m.group(0)
- if key in section_settings:
- new_line = '%s = %s' % (key, section_settings.pop(key))
- if new_line != line:
- # keep old entry as example - comments might refer to it
- line = '#%s\n%s' % (line, new_line)
- return line.rstrip()
+ comment, key, line_value = m.groups()
+ if key not in section_settings:
+ return line
+ new_value = section_settings[key]
+ if line_value == new_value or add_after_key_value.get(key) != new_value:
+ add_after_key_value[key] = line_value
+ if comment:
+ return line
+ return '#' + line
+
+ lines = re.sub(r'^(#)?([^#\n\s]*)[ \t]*=[ \t]*(.*)$', comment_out, lines, flags=re.MULTILINE)
- # process lines that not are comments or empty and look like name=value
- lines = re.sub(r'^([^#\n\s]*)[ \t]*=[ \t]*(.*)$', process_line, lines, flags=re.MULTILINE)
+ def add_after_comment(m):
+ """process a section comment line and add new value"""
+ line = m.group(0)
+ key, line_value = m.groups()
+ if key not in section_settings:
+ return line
+ if line_value != add_after_key_value.get(key):
+ return line
+ new_value = section_settings[key]
+ if new_value == line_value:
+ line = line.lstrip('#')
+ else:
+ line += '\n%s = %s' % (key, new_value)
+ section_settings.pop(key)
+ return line
+
+ lines = re.sub(r'^#([^#\n\s]*)[ \t]*=[ \t]*(.*)$', add_after_comment, lines, flags=re.MULTILINE)
+
# add unused section settings
if section_settings:
lines += '\n' + ''.join('%s = %s\n' % (key, value) for key, value in sorted(section_settings.items()))
diff --git a/kallithea/lib/locale.py b/kallithea/lib/locale.py
--- a/kallithea/lib/locale.py
+++ b/kallithea/lib/locale.py
@@ -24,7 +24,7 @@ def current_locale_is_valid():
Note: UTF-8 is preferred, but for example ISO-8859-1 or mbcs should also
work under the right circumstances."""
try:
- u'\xe9'.encode(sys.getfilesystemencoding()) # Test using é (é)
+ '\xe9'.encode(sys.getfilesystemencoding()) # Test using é (é)
except UnicodeEncodeError:
log.error("Cannot encode Unicode paths to file system encoding %r", sys.getfilesystemencoding())
for var in ['LC_ALL', 'LC_CTYPE', 'LANG']:
diff --git a/kallithea/lib/markup_renderer.py b/kallithea/lib/markup_renderer.py
--- a/kallithea/lib/markup_renderer.py
+++ b/kallithea/lib/markup_renderer.py
@@ -33,7 +33,7 @@ import traceback
import bleach
import markdown as markdown_mod
-from kallithea.lib.utils2 import MENTIONS_REGEX, safe_unicode
+from kallithea.lib.utils2 import MENTIONS_REGEX, safe_str
log = logging.getLogger(__name__)
@@ -119,17 +119,17 @@ class MarkupRenderer(object):
At last it will just do a simple html replacing new lines with
>>> MarkupRenderer.render('''''', '.md')
- u'
"""
- source = safe_unicode(source)
+ source = safe_str(source)
try:
if flavored:
source = cls._flavored_markdown(source)
@@ -209,7 +213,7 @@ class MarkupRenderer(object):
@classmethod
def rst(cls, source, safe=True):
- source = safe_unicode(source)
+ source = safe_str(source)
try:
from docutils.core import publish_parts
from docutils.parsers.rst import directives
@@ -219,7 +223,7 @@ class MarkupRenderer(object):
docutils_settings.update({'input_encoding': 'unicode',
'report_level': 4})
- for k, v in docutils_settings.iteritems():
+ for k, v in docutils_settings.items():
directives.register_directive(k, v)
parts = publish_parts(source=source,
diff --git a/kallithea/lib/middleware/permanent_repo_url.py b/kallithea/lib/middleware/permanent_repo_url.py
--- a/kallithea/lib/middleware/permanent_repo_url.py
+++ b/kallithea/lib/middleware/permanent_repo_url.py
@@ -20,7 +20,8 @@ middleware to handle permanent repo URLs
"""
-from kallithea.lib.utils import fix_repo_id_name, safe_str
+from kallithea.lib.utils import fix_repo_id_name
+from kallithea.lib.utils2 import safe_bytes, safe_str
class PermanentRepoUrl(object):
@@ -30,9 +31,11 @@ class PermanentRepoUrl(object):
self.config = config
def __call__(self, environ, start_response):
- path_info = environ['PATH_INFO']
+ # Extract path_info as get_path_info does, but do it explicitly because
+ # we also have to do the reverse operation when patching it back in
+ path_info = safe_str(environ['PATH_INFO'].encode('latin1'))
if path_info.startswith('/'): # it must
- path_info = '/' + safe_str(fix_repo_id_name(path_info[1:]))
- environ['PATH_INFO'] = path_info
+ path_info = '/' + fix_repo_id_name(path_info[1:])
+ environ['PATH_INFO'] = safe_bytes(path_info).decode('latin1')
return self.application(environ, start_response)
diff --git a/kallithea/lib/middleware/pygrack.py b/kallithea/lib/middleware/pygrack.py
--- a/kallithea/lib/middleware/pygrack.py
+++ b/kallithea/lib/middleware/pygrack.py
@@ -33,7 +33,7 @@ import traceback
from webob import Request, Response, exc
import kallithea
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.utils2 import ascii_bytes
from kallithea.lib.vcs import subprocessio
@@ -87,7 +87,6 @@ class GitRepository(object):
:param path:
"""
- path = safe_unicode(path)
assert path.startswith('/' + self.repo_name + '/')
return path[len(self.repo_name) + 2:].strip('/')
@@ -113,14 +112,14 @@ class GitRepository(object):
# ref_list
# "0000"
server_advert = '# service=%s\n' % git_command
- packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
+ packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower()
_git_path = kallithea.CONFIG.get('git_path', 'git')
cmd = [_git_path, git_command[4:],
'--stateless-rpc', '--advertise-refs', self.content_path]
log.debug('handling cmd %s', cmd)
try:
out = subprocessio.SubprocessIOChunker(cmd,
- starting_values=[packet_len + server_advert + '0000']
+ starting_values=[ascii_bytes(packet_len + server_advert + '0000')]
)
except EnvironmentError as e:
log.error(traceback.format_exc())
@@ -166,7 +165,7 @@ class GitRepository(object):
log.error(traceback.format_exc())
raise exc.HTTPExpectationFailed()
- if git_command in [u'git-receive-pack']:
+ if git_command in ['git-receive-pack']:
# updating refs manually after each push.
# Needed for pre-1.7.0.4 git clients using regular HTTP mode.
from kallithea.lib.vcs import get_repo
@@ -186,7 +185,7 @@ class GitRepository(object):
_path = self._get_fixedpath(req.path_info)
if _path.startswith('info/refs'):
app = self.inforefs
- elif [a for a in self.valid_accepts if a in req.accept]:
+ elif req.accept.acceptable_offers(self.valid_accepts):
app = self.backend
try:
resp = app(req, environ)
diff --git a/kallithea/lib/middleware/sessionmiddleware.py b/kallithea/lib/middleware/sessionmiddleware.py
deleted file mode 100644
--- a/kallithea/lib/middleware/sessionmiddleware.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- coding: utf-8 -*-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see .
-"""
-kallithea.lib.middleware.sessionmiddleware
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-session management middleware
-
-This file overrides Beaker's built-in SessionMiddleware
-class to automagically use secure cookies over HTTPS.
-
-Original Beaker SessionMiddleware class written by Ben Bangert
-"""
-
-from beaker.middleware import SessionMiddleware
-from beaker.session import SessionObject
-
-
-class SecureSessionMiddleware(SessionMiddleware):
- def __call__(self, environ, start_response):
- """
- This function's implementation is taken directly from Beaker,
- with HTTPS detection added. When accessed over HTTPS, force
- setting cookie's secure flag.
-
- The only difference from that original code is that we switch
- the secure option on and off depending on the URL scheme (first
- two lines). To avoid concurrency issues, we use a local options
- variable.
- """
- options = dict(self.options)
- options["secure"] = environ['wsgi.url_scheme'] == 'https'
-
- session = SessionObject(environ, **options)
- if environ.get('paste.registry'):
- if environ['paste.registry'].reglist:
- environ['paste.registry'].register(self.session, session)
- environ[self.environ_key] = session
- environ['beaker.get_session'] = self._get_session
-
- if 'paste.testing_variables' in environ and 'webtest_varname' in options:
- environ['paste.testing_variables'][options['webtest_varname']] = session
-
- def session_start_response(status, headers, exc_info=None):
- if session.accessed():
- session.persist()
- if session.__dict__['_headers']['set_cookie']:
- cookie = session.__dict__['_headers']['cookie_out']
- if cookie:
- headers.append(('Set-cookie', cookie))
- return start_response(status, headers, exc_info)
- return self.wrap_app(environ, session_start_response)
diff --git a/kallithea/lib/middleware/simplegit.py b/kallithea/lib/middleware/simplegit.py
--- a/kallithea/lib/middleware/simplegit.py
+++ b/kallithea/lib/middleware/simplegit.py
@@ -31,11 +31,10 @@ Original author and date, and relevant c
import logging
import re
-from kallithea.lib.base import BaseVCSController
+from kallithea.lib.base import BaseVCSController, get_path_info
from kallithea.lib.hooks import log_pull_action
from kallithea.lib.middleware.pygrack import make_wsgi_app
from kallithea.lib.utils import make_ui
-from kallithea.lib.utils2 import safe_unicode
from kallithea.model.db import Repository
@@ -57,14 +56,14 @@ class SimpleGit(BaseVCSController):
@classmethod
def parse_request(cls, environ):
- path_info = environ.get('PATH_INFO', '')
+ path_info = get_path_info(environ)
m = GIT_PROTO_PAT.match(path_info)
if m is None:
return None
class parsed_request(object):
# See https://git-scm.com/book/en/v2/Git-Internals-Transfer-Protocols#_the_smart_protocol
- repo_name = safe_unicode(m.group(1).rstrip('/'))
+ repo_name = m.group(1).rstrip('/')
cmd = m.group(2)
query_string = environ['QUERY_STRING']
diff --git a/kallithea/lib/middleware/simplehg.py b/kallithea/lib/middleware/simplehg.py
--- a/kallithea/lib/middleware/simplehg.py
+++ b/kallithea/lib/middleware/simplehg.py
@@ -30,12 +30,13 @@ Original author and date, and relevant c
import logging
import os
-import urllib
+import urllib.parse
+
+import mercurial.hgweb
-from kallithea.lib.base import BaseVCSController
+from kallithea.lib.base import BaseVCSController, get_path_info
from kallithea.lib.utils import make_ui
-from kallithea.lib.utils2 import safe_str, safe_unicode
-from kallithea.lib.vcs.utils.hgcompat import hgweb_mod
+from kallithea.lib.utils2 import safe_bytes
log = logging.getLogger(__name__)
@@ -99,12 +100,12 @@ class SimpleHg(BaseVCSController):
http_accept = environ.get('HTTP_ACCEPT', '')
if not http_accept.startswith('application/mercurial'):
return None
- path_info = environ.get('PATH_INFO', '')
+ path_info = get_path_info(environ)
if not path_info.startswith('/'): # it must!
return None
class parsed_request(object):
- repo_name = safe_unicode(path_info[1:].rstrip('/'))
+ repo_name = path_info[1:].rstrip('/')
query_string = environ['QUERY_STRING']
@@ -120,7 +121,7 @@ class SimpleHg(BaseVCSController):
break
action = 'pull'
for cmd_arg in hgarg[5:].split(';'):
- cmd, _args = urllib.unquote_plus(cmd_arg).split(' ', 1)
+ cmd, _args = urllib.parse.unquote_plus(cmd_arg).split(' ', 1)
op = cmd_mapping.get(cmd, 'push')
if op != 'pull':
assert op == 'push'
@@ -136,13 +137,13 @@ class SimpleHg(BaseVCSController):
"""
Make an hgweb wsgi application.
"""
- str_repo_name = safe_str(parsed_request.repo_name)
- repo_path = os.path.join(safe_str(self.basepath), str_repo_name)
+ repo_name = parsed_request.repo_name
+ repo_path = os.path.join(self.basepath, repo_name)
baseui = make_ui(repo_path=repo_path)
- hgweb_app = hgweb_mod.hgweb(repo_path, name=str_repo_name, baseui=baseui)
+ hgweb_app = mercurial.hgweb.hgweb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
def wrapper_app(environ, start_response):
- environ['REPO_NAME'] = str_repo_name # used by hgweb_mod.hgweb
+ environ['REPO_NAME'] = repo_name # used by mercurial.hgweb.hgweb
return hgweb_app(environ, start_response)
return wrapper_app
diff --git a/kallithea/lib/middleware/wrapper.py b/kallithea/lib/middleware/wrapper.py
--- a/kallithea/lib/middleware/wrapper.py
+++ b/kallithea/lib/middleware/wrapper.py
@@ -29,8 +29,7 @@ Original author and date, and relevant c
import logging
import time
-from kallithea.lib.base import _get_access_path, _get_ip_addr
-from kallithea.lib.utils2 import safe_unicode
+from kallithea.lib.base import _get_ip_addr, get_path_info
log = logging.getLogger(__name__)
@@ -41,12 +40,14 @@ class Meter:
def __init__(self, start_response):
self._start_response = start_response
self._start = time.time()
+ self.status = None
self._size = 0
def duration(self):
return time.time() - self._start
def start_response(self, status, response_headers, exc_info=None):
+ self.status = status
write = self._start_response(status, response_headers, exc_info)
def metered_write(s):
self.measure(s)
@@ -64,21 +65,21 @@ class ResultIter:
def __init__(self, result, meter, description):
self._result_close = getattr(result, 'close', None) or (lambda: None)
- self._next = iter(result).next
+ self._next = iter(result).__next__
self._meter = meter
self._description = description
def __iter__(self):
return self
- def next(self):
+ def __next__(self):
chunk = self._next()
self._meter.measure(chunk)
return chunk
def close(self):
self._result_close()
- log.info("%s responded after %.3fs with %s bytes", self._description, self._meter.duration(), self._meter.size())
+ log.info("%s responded %r after %.3fs with %s bytes", self._description, self._meter.status, self._meter.duration(), self._meter.size())
class RequestWrapper(object):
@@ -91,10 +92,11 @@ class RequestWrapper(object):
meter = Meter(start_response)
description = "Request from %s for %s" % (
_get_ip_addr(environ),
- safe_unicode(_get_access_path(environ)),
+ get_path_info(environ),
)
+ log.info("%s received", description)
try:
result = self.application(environ, meter.start_response)
finally:
- log.info("%s responding after %.3fs", description, meter.duration())
+ log.info("%s responding %r after %.3fs", description, meter.status, meter.duration())
return ResultIter(result, meter, description)
diff --git a/kallithea/lib/page.py b/kallithea/lib/page.py
--- a/kallithea/lib/page.py
+++ b/kallithea/lib/page.py
@@ -15,11 +15,11 @@
Custom paging classes
"""
import logging
-import math
-import re
-from webhelpers2.html import HTML, literal
-from webhelpers.paginate import Page as _Page
+import paginate
+import paginate_sqlalchemy
+import sqlalchemy.orm
+from webhelpers2.html import literal
from kallithea.config.routing import url
@@ -27,229 +27,36 @@ from kallithea.config.routing import url
log = logging.getLogger(__name__)
-class Page(_Page):
- """
- Custom pager emitting Bootstrap paginators
- """
-
- def __init__(self, *args, **kwargs):
- kwargs.setdefault('url', url.current)
- _Page.__init__(self, *args, **kwargs)
-
- def _get_pos(self, cur_page, max_page, items):
- edge = (items / 2) + 1
- if (cur_page <= edge):
- radius = max(items / 2, items - cur_page)
- elif (max_page - cur_page) < edge:
- radius = (items - 1) - (max_page - cur_page)
- else:
- radius = items / 2
-
- left = max(1, (cur_page - (radius)))
- right = min(max_page, cur_page + (radius))
- return left, cur_page, right
-
- def _range(self, regexp_match):
- """
- Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
-
- Arguments:
-
- regexp_match
- A "re" (regular expressions) match object containing the
- radius of linked pages around the current page in
- regexp_match.group(1) as a string
-
- This function is supposed to be called as a callable in
- re.sub.
-
- """
- radius = int(regexp_match.group(1))
-
- # Compute the first and last page number within the radius
- # e.g. '1 .. 5 6 [7] 8 9 .. 12'
- # -> leftmost_page = 5
- # -> rightmost_page = 9
- leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
- self.last_page,
- (radius * 2) + 1)
- nav_items = []
-
- # Create a link to the first page (unless we are on the first page
- # or there would be no need to insert '..' spacers)
- if self.page != self.first_page and self.first_page < leftmost_page:
- nav_items.append(HTML.li(self._pagerlink(self.first_page, self.first_page)))
+class Page(paginate.Page):
- # Insert dots if there are pages between the first page
- # and the currently displayed page range
- if leftmost_page - self.first_page > 1:
- # Wrap in a SPAN tag if nolink_attr is set
- text_ = '..'
- if self.dotdot_attr:
- text_ = HTML.span(c=text_, **self.dotdot_attr)
- nav_items.append(HTML.li(text_))
-
- for thispage in xrange(leftmost_page, rightmost_page + 1):
- # Highlight the current page number and do not use a link
- text_ = str(thispage)
- if thispage == self.page:
- # Wrap in a SPAN tag if nolink_attr is set
- if self.curpage_attr:
- text_ = HTML.li(HTML.span(c=text_), **self.curpage_attr)
- nav_items.append(text_)
- # Otherwise create just a link to that page
- else:
- nav_items.append(HTML.li(self._pagerlink(thispage, text_)))
-
- # Insert dots if there are pages between the displayed
- # page numbers and the end of the page range
- if self.last_page - rightmost_page > 1:
- text_ = '..'
- # Wrap in a SPAN tag if nolink_attr is set
- if self.dotdot_attr:
- text_ = HTML.span(c=text_, **self.dotdot_attr)
- nav_items.append(HTML.li(text_))
-
- # Create a link to the very last page (unless we are on the last
- # page or there would be no need to insert '..' spacers)
- if self.page != self.last_page and rightmost_page < self.last_page:
- nav_items.append(HTML.li(self._pagerlink(self.last_page, self.last_page)))
-
- #_page_link = url.current()
- #nav_items.append(literal('' % (_page_link, str(int(self.page)+1))))
- #nav_items.append(literal('' % (_page_link, str(int(self.page)+1))))
- return self.separator.join(nav_items)
-
- def pager(self, format='
$link_previous ~2~ $link_next
', page_param='page', partial_param='partial',
- show_if_single_page=False, separator=' ', onclick=None,
- symbol_first='<<', symbol_last='>>',
- symbol_previous='<', symbol_next='>',
- link_attr=None,
- curpage_attr=None,
- dotdot_attr=None, **kwargs
- ):
- self.curpage_attr = curpage_attr or {'class': 'active'}
- self.separator = separator
- self.pager_kwargs = kwargs
- self.page_param = page_param
- self.partial_param = partial_param
- self.onclick = onclick
- self.link_attr = link_attr or {'class': 'pager_link', 'rel': 'prerender'}
- self.dotdot_attr = dotdot_attr or {'class': 'pager_dotdot'}
+ def __init__(self, collection,
+ page=1, items_per_page=20, item_count=None,
+ **kwargs):
+ if isinstance(collection, sqlalchemy.orm.query.Query):
+ collection = paginate_sqlalchemy.SqlalchemyOrmWrapper(collection)
+ paginate.Page.__init__(self, collection, page=page, items_per_page=items_per_page, item_count=item_count,
+ url_maker=lambda page: url.current(page=page, **kwargs))
- # Don't show navigator if there is no more than one page
- if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
- return ''
-
- from string import Template
- # Replace ~...~ in token format by range of pages
- result = re.sub(r'~(\d+)~', self._range, format)
-
- # Interpolate '%' variables
- result = Template(result).safe_substitute({
- 'first_page': self.first_page,
- 'last_page': self.last_page,
- 'page': self.page,
- 'page_count': self.page_count,
- 'items_per_page': self.items_per_page,
- 'first_item': self.first_item,
- 'last_item': self.last_item,
- 'item_count': self.item_count,
- 'link_first': self.page > self.first_page and
- self._pagerlink(self.first_page, symbol_first) or '',
- 'link_last': self.page < self.last_page and
- self._pagerlink(self.last_page, symbol_last) or '',
- 'link_previous': HTML.li(self.previous_page and
- self._pagerlink(self.previous_page, symbol_previous)
- or HTML.a(symbol_previous)),
- 'link_next': HTML.li(self.next_page and
- self._pagerlink(self.next_page, symbol_next)
- or HTML.a(symbol_next)),
- })
-
- return literal(result)
-
-
-class RepoPage(Page):
-
- def __init__(self, collection, page=1, items_per_page=20,
- item_count=None, **kwargs):
-
- """Create a "RepoPage" instance. special pager for paging
- repository
- """
- # TODO: call baseclass __init__
- self._url_generator = kwargs.pop('url', url.current)
-
- # Safe the kwargs class-wide so they can be used in the pager() method
- self.kwargs = kwargs
-
- # Save a reference to the collection
- self.original_collection = collection
-
- self.collection = collection
+ def pager(self):
+ return literal(
+ paginate.Page.pager(self,
+ format='
$link_previous\n~4~$link_next
',
+ link_attr={'class': 'pager_link'},
+ dotdot_attr={'class': 'pager_dotdot'},
+ separator='\n',
+ ))
- # The self.page is the number of the current page.
- # The first page has the number 1!
- try:
- self.page = int(page) # make it int() if we get it as a string
- except (ValueError, TypeError):
- log.error("Invalid page value: %r", page)
- self.page = 1
-
- self.items_per_page = items_per_page
-
- # Unless the user tells us how many items the collections has
- # we calculate that ourselves.
- if item_count is not None:
- self.item_count = item_count
- else:
- self.item_count = len(self.collection)
-
- # Compute the number of the first and last available page
- if self.item_count > 0:
- self.first_page = 1
- self.page_count = int(math.ceil(float(self.item_count) /
- self.items_per_page))
- self.last_page = self.first_page + self.page_count - 1
-
- # Make sure that the requested page number is the range of
- # valid pages
- if self.page > self.last_page:
- self.page = self.last_page
- elif self.page < self.first_page:
- self.page = self.first_page
+ @staticmethod
+ def default_link_tag(item):
+ # based on the base class implementation, but wrapping results in
, and with different handling of current_page
+ text = item['value']
+ if item['type'] == 'current_page': # we need active on the li and can thus not use curpage_attr
+ return '''
%s
''' % text
- # Note: the number of items on this page can be less than
- # items_per_page if the last page is not full
- self.first_item = max(0, (self.item_count) - (self.page *
- items_per_page))
- self.last_item = ((self.item_count - 1) - items_per_page *
- (self.page - 1))
-
- self.items = list(self.collection[self.first_item:self.last_item + 1])
-
- # Links to previous and next page
- if self.page > self.first_page:
- self.previous_page = self.page - 1
- else:
- self.previous_page = None
-
- if self.page < self.last_page:
- self.next_page = self.page + 1
- else:
- self.next_page = None
-
- # No items available
+ if not item['href'] or item['type'] == 'span':
+ if item['attrs']:
+ text = paginate.make_html_tag('span', **item['attrs']) + text + ''
else:
- self.first_page = None
- self.page_count = 0
- self.last_page = None
- self.first_item = None
- self.last_item = None
- self.previous_page = None
- self.next_page = None
- self.items = []
-
- # This is a subclass of the 'list' type. Initialise the list now.
- list.__init__(self, reversed(self.items))
+ target_url = item['href']
+ text = paginate.make_html_tag('a', text=text, href=target_url, **item['attrs'])
+ return '''
%s
''' % text
diff --git a/kallithea/lib/paster_commands/template.ini.mako b/kallithea/lib/paster_commands/template.ini.mako
--- a/kallithea/lib/paster_commands/template.ini.mako
+++ b/kallithea/lib/paster_commands/template.ini.mako
@@ -1,11 +1,11 @@
## -*- coding: utf-8 -*-
-<%text>################################################################################%text>
-<%text>################################################################################%text>
-# Kallithea - config file generated with kallithea-config #
-# #
-# The %(here)s variable will be replaced with the parent directory of this file#
-<%text>################################################################################%text>
-<%text>################################################################################%text>
+<%text>###################################################################################%text>
+<%text>###################################################################################%text>
+<%text>## Kallithea config file generated with kallithea-config ##%text>
+<%text>## ##%text>
+<%text>## The %(here)s variable will be replaced with the parent directory of this file ##%text>
+<%text>###################################################################################%text>
+<%text>###################################################################################%text>
[DEFAULT]
@@ -111,64 +111,33 @@ timeout = 3600
%endif
%else:
<%text>## UWSGI ##%text>
-<%text>## run with uwsgi --ini-paste-logged %text>
[uwsgi]
-socket = /tmp/uwsgi.sock
-master = true
-http = ${host}:${port}
-
-<%text>## set as daemon and redirect all output to file%text>
-#daemonize = ./uwsgi_kallithea.log
-
-<%text>## master process PID%text>
-pidfile = ./uwsgi_kallithea.pid
+<%text>## Note: this section is parsed by the uWSGI .ini parser when run as:%text>
+<%text>## uwsgi --venv /srv/kallithea/venv --ini-paste-logged my.ini%text>
+<%text>## Note: in uWSGI 2.0.18 or older, pastescript needs to be installed to%text>
+<%text>## get correct application logging. In later versions this is not necessary.%text>
+<%text>## pip install pastescript%text>
-<%text>## stats server with workers statistics, use uwsgitop%text>
-<%text>## for monitoring, `uwsgitop 127.0.0.1:1717`%text>
-stats = 127.0.0.1:1717
-memory-report = true
-
-<%text>## log 5XX errors%text>
-log-5xx = true
-
-<%text>## Set the socket listen queue size.%text>
-listen = 128
-
-<%text>## Gracefully Reload workers after the specified amount of managed requests%text>
-<%text>## (avoid memory leaks).%text>
-max-requests = 1000
+<%text>## HTTP Basics:%text>
+http-socket = ${host}:${port}
+buffer-size = 65535 ; Mercurial will use huge GET headers for discovery
-<%text>## enable large buffers%text>
-buffer-size = 65535
-
-<%text>## socket and http timeouts ##%text>
-http-timeout = 3600
-socket-timeout = 3600
-
-<%text>## Log requests slower than the specified number of milliseconds.%text>
-log-slow = 10
-
-<%text>## Exit if no app can be loaded.%text>
-need-app = true
-
-<%text>## Set lazy mode (load apps in workers instead of master).%text>
-lazy = true
+<%text>## Scaling:%text>
+master = true ; Use separate master and worker processes
+auto-procname = true ; Name worker processes accordingly
+lazy = true ; App *must* be loaded in workers - db connections can't be shared
+workers = 4 ; On demand scaling up to this many worker processes
+cheaper = 1 ; Initial and on demand scaling down to this many worker processes
+max-requests = 1000 ; Graceful reload of worker processes to avoid leaks
-<%text>## scaling ##%text>
-<%text>## set cheaper algorithm to use, if not set default will be used%text>
-cheaper-algo = spare
-
-<%text>## minimum number of workers to keep at all times%text>
-cheaper = 1
-
-<%text>## number of workers to spawn at startup%text>
-cheaper-initial = 1
-
-<%text>## maximum number of workers that can be spawned%text>
-workers = 4
-
-<%text>## how many workers should be spawned at a time%text>
-cheaper-step = 1
+<%text>## Tweak defaults:%text>
+strict = true ; Fail on unknown config directives
+enable-threads = true ; Enable Python threads (not threaded workers)
+vacuum = true ; Delete sockets during shutdown
+single-interpreter = true
+die-on-term = true ; Shutdown when receiving SIGTERM (default is respawn)
+need-app = true ; Exit early if no app can be loaded.
+reload-on-exception = true ; Don't assume that the application worker can process more requests after a severe error
%endif
<%text>## middleware for hosting the WSGI application under a URL prefix%text>
@@ -220,7 +189,7 @@ commit_parse_limit = 25
<%text>## used, which is correct in many cases but for example not when using uwsgi.%text>
<%text>## If you change this setting, you should reinstall the Git hooks via%text>
<%text>## Admin > Settings > Remap and Rescan.%text>
-# git_hook_interpreter = /srv/kallithea/venv/bin/python2
+#git_hook_interpreter = /srv/kallithea/venv/bin/python3
%if git_hook_interpreter:
git_hook_interpreter = ${git_hook_interpreter}
%endif
@@ -295,7 +264,7 @@ issue_sub =
<%text>## issue_pat, issue_server_link and issue_sub can have suffixes to specify%text>
<%text>## multiple patterns, to other issues server, wiki or others%text>
<%text>## below an example how to create a wiki pattern%text>
-# wiki-some-id -> https://wiki.example.com/some-id
+<%text>## wiki-some-id -> https://wiki.example.com/some-id%text>
#issue_pat_wiki = wiki-(\S+)
#issue_server_link_wiki = https://wiki.example.com/\1
@@ -313,12 +282,12 @@ allow_repo_location_change = True
allow_custom_hooks_settings = True
<%text>## extra extensions for indexing, space separated and without the leading '.'.%text>
-# index.extensions =
+#index.extensions =
# gemfile
# lock
<%text>## extra filenames for indexing, space separated%text>
-# index.filenames =
+#index.filenames =
# .dockerignore
# .editorconfig
# INSTALL
@@ -356,25 +325,23 @@ ssh_locale = ${ssh_locale}
<%text>### CELERY CONFIG ####%text>
<%text>####################################%text>
+<%text>## Note: Celery doesn't support Windows.%text>
use_celery = false
-<%text>## Example: connect to the virtual host 'rabbitmqhost' on localhost as rabbitmq:%text>
-broker.url = amqp://rabbitmq:qewqew@localhost:5672/rabbitmqhost
+<%text>## Celery config settings from https://docs.celeryproject.org/en/4.4.0/userguide/configuration.html prefixed with 'celery.'.%text>
-celery.imports = kallithea.lib.celerylib.tasks
-celery.accept.content = pickle
-celery.result.backend = amqp
-celery.result.dburi = amqp://
-celery.result.serializer = json
+<%text>## Example: use the message queue on the local virtual host 'kallitheavhost' as the RabbitMQ user 'kallithea':%text>
+celery.broker_url = amqp://kallithea:thepassword@localhost:5672/kallitheavhost
-#celery.send.task.error.emails = true
+celery.result.backend = db+sqlite:///celery-results.db
+
#celery.amqp.task.result.expires = 18000
-celeryd.concurrency = 2
-celeryd.max.tasks.per.child = 1
+celery.worker_concurrency = 2
+celery.worker_max_tasks_per_child = 1
<%text>## If true, tasks will never be sent to the queue, but executed locally instead.%text>
-celery.always.eager = false
+celery.task_always_eager = false
<%text>####################################%text>
<%text>### BEAKER CACHE ####%text>
@@ -383,19 +350,15 @@ celery.always.eager = false
beaker.cache.data_dir = %(here)s/data/cache/data
beaker.cache.lock_dir = %(here)s/data/cache/lock
-beaker.cache.regions = short_term,long_term,sql_cache_short
-
-beaker.cache.short_term.type = memory
-beaker.cache.short_term.expire = 60
-beaker.cache.short_term.key_length = 256
+beaker.cache.regions = long_term,long_term_file
beaker.cache.long_term.type = memory
beaker.cache.long_term.expire = 36000
beaker.cache.long_term.key_length = 256
-beaker.cache.sql_cache_short.type = memory
-beaker.cache.sql_cache_short.expire = 10
-beaker.cache.sql_cache_short.key_length = 256
+beaker.cache.long_term_file.type = file
+beaker.cache.long_term_file.expire = 604800
+beaker.cache.long_term_file.key_length = 256
<%text>####################################%text>
<%text>### BEAKER SESSION ####%text>
@@ -429,12 +392,24 @@ session.secret = ${uuid()}
#session.sa.url = postgresql://postgres:qwe@localhost/kallithea
#session.table_name = db_session
-<%text>############################%text>
-<%text>## ERROR HANDLING SYSTEMS ##%text>
-<%text>############################%text>
+<%text>####################################%text>
+<%text>### ERROR HANDLING ####%text>
+<%text>####################################%text>
+
+<%text>## Show a nice error page for application HTTP errors and exceptions (default true)%text>
+#errorpage.enabled = true
-# Propagate email settings to ErrorReporter of TurboGears2
-# You do not normally need to change these lines
+<%text>## Enable Backlash client-side interactive debugger (default false)%text>
+<%text>## WARNING: *THIS MUST BE false IN PRODUCTION ENVIRONMENTS!!!*%text>
+<%text>## This debug mode will allow all visitors to execute malicious code.%text>
+#debug = false
+
+<%text>## Enable Backlash server-side error reporting (unless debug mode handles it client-side) (default true)%text>
+#trace_errors.enable = true
+<%text>## Errors will be reported by mail if trace_errors.error_email is set.%text>
+
+<%text>## Propagate email settings to ErrorReporter of TurboGears2%text>
+<%text>## You do not normally need to change these lines%text>
get trace_errors.smtp_server = smtp_server
get trace_errors.smtp_port = smtp_port
get trace_errors.from_address = error_email_from
@@ -443,63 +418,7 @@ get trace_errors.smtp_username = smtp_us
get trace_errors.smtp_password = smtp_password
get trace_errors.smtp_use_tls = smtp_use_tls
-%if error_aggregation_service == 'appenlight':
-<%text>####################%text>
-<%text>### [appenlight] ###%text>
-<%text>####################%text>
-
-<%text>## AppEnlight is tailored to work with Kallithea, see%text>
-<%text>## http://appenlight.com for details how to obtain an account%text>
-<%text>## you must install python package `appenlight_client` to make it work%text>
-
-<%text>## appenlight enabled%text>
-appenlight = false
-
-appenlight.server_url = https://api.appenlight.com
-appenlight.api_key = YOUR_API_KEY
-
-<%text>## TWEAK AMOUNT OF INFO SENT HERE%text>
-
-<%text>## enables 404 error logging (default False)%text>
-appenlight.report_404 = false
-
-<%text>## time in seconds after request is considered being slow (default 1)%text>
-appenlight.slow_request_time = 1
-
-<%text>## record slow requests in application%text>
-<%text>## (needs to be enabled for slow datastore recording and time tracking)%text>
-appenlight.slow_requests = true
-
-<%text>## enable hooking to application loggers%text>
-#appenlight.logging = true
-
-<%text>## minimum log level for log capture%text>
-#appenlight.logging.level = WARNING
-
-<%text>## send logs only from erroneous/slow requests%text>
-<%text>## (saves API quota for intensive logging)%text>
-appenlight.logging_on_error = false
-
-<%text>## list of additional keywords that should be grabbed from environ object%text>
-<%text>## can be string with comma separated list of words in lowercase%text>
-<%text>## (by default client will always send following info:%text>
-<%text>## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that%text>
-<%text>## start with HTTP* this list be extended with additional keywords here%text>
-appenlight.environ_keys_whitelist =
-
-<%text>## list of keywords that should be blanked from request object%text>
-<%text>## can be string with comma separated list of words in lowercase%text>
-<%text>## (by default client will always blank keys that contain following words%text>
-<%text>## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'%text>
-<%text>## this list be extended with additional keywords set here%text>
-appenlight.request_keys_blacklist =
-
-<%text>## list of namespaces that should be ignores when gathering log entries%text>
-<%text>## can be string with comma separated list of namespaces%text>
-<%text>## (by default the client ignores own entries: appenlight_client.client)%text>
-appenlight.log_namespace_blacklist =
-
-%elif error_aggregation_service == 'sentry':
+%if error_aggregation_service == 'sentry':
<%text>################%text>
<%text>### [sentry] ###%text>
<%text>################%text>
@@ -519,12 +438,6 @@ sentry.include_paths =
sentry.exclude_paths =
%endif
-<%text>################################################################################%text>
-<%text>## WARNING: *DEBUG MODE MUST BE OFF IN A PRODUCTION ENVIRONMENT* ##%text>
-<%text>## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##%text>
-<%text>## execute malicious code after an exception is raised. ##%text>
-<%text>################################################################################%text>
-debug = false
<%text>##################################%text>
<%text>### LOGVIEW CONFIG ###%text>
@@ -539,19 +452,19 @@ logview.pylons.util = #eee
<%text>#########################################################%text>
%if database_engine == 'sqlite':
-# SQLITE [default]
+<%text>## SQLITE [default]%text>
sqlalchemy.url = sqlite:///%(here)s/kallithea.db?timeout=60
%elif database_engine == 'postgres':
-# POSTGRESQL
+<%text>## POSTGRESQL%text>
sqlalchemy.url = postgresql://user:pass@localhost/kallithea
%elif database_engine == 'mysql':
-# MySQL
+<%text>## MySQL%text>
sqlalchemy.url = mysql://user:pass@localhost/kallithea?charset=utf8
%endif
-# see sqlalchemy docs for others
+<%text>## see sqlalchemy docs for other backends%text>
sqlalchemy.pool_recycle = 3600
@@ -582,8 +495,8 @@ keys = generic, color_formatter, color_f
[logger_root]
level = NOTSET
handlers = console
-# For coloring based on log level:
-# handlers = console_color
+<%text>## For coloring based on log level:%text>
+#handlers = console_color
[logger_routes]
level = WARN
@@ -620,10 +533,10 @@ qualname = gearbox
level = WARN
handlers =
qualname = sqlalchemy.engine
-# For coloring based on log level and pretty printing of SQL:
-# level = INFO
-# handlers = console_color_sql
-# propagate = 0
+<%text>## For coloring based on log level and pretty printing of SQL:%text>
+#level = INFO
+#handlers = console_color_sql
+#propagate = 0
[logger_whoosh_indexer]
level = WARN
@@ -650,13 +563,13 @@ args = (sys.stderr,)
formatter = generic
[handler_console_color]
-# ANSI color coding based on log level
+<%text>## ANSI color coding based on log level%text>
class = StreamHandler
args = (sys.stderr,)
formatter = color_formatter
[handler_console_color_sql]
-# ANSI color coding and pretty printing of SQL statements
+<%text>## ANSI color coding and pretty printing of SQL statements%text>
class = StreamHandler
args = (sys.stderr,)
formatter = color_formatter_sql
@@ -687,16 +600,16 @@ datefmt = %Y-%m-%d %H:%M:%S
<%text>## SSH LOGGING ##%text>
<%text>#################%text>
-# The default loggers use 'handler_console' that uses StreamHandler with
-# destination 'sys.stderr'. In the context of the SSH server process, these log
-# messages would be sent to the client, which is normally not what you want.
-# By default, when running ssh-serve, just use NullHandler and disable logging
-# completely. For other logging options, see:
-# https://docs.python.org/2/library/logging.handlers.html
+<%text>## The default loggers use 'handler_console' that uses StreamHandler with%text>
+<%text>## destination 'sys.stderr'. In the context of the SSH server process, these log%text>
+<%text>## messages would be sent to the client, which is normally not what you want.%text>
+<%text>## By default, when running ssh-serve, just use NullHandler and disable logging%text>
+<%text>## completely. For other logging options, see:%text>
+<%text>## https://docs.python.org/2/library/logging.handlers.html%text>
[ssh_serve:logger_root]
level = CRITICAL
handlers = null
-# Note: If logging is configured with other handlers, they might need similar
-# muting for ssh-serve too.
+<%text>## Note: If logging is configured with other handlers, they might need similar%text>
+<%text>## muting for ssh-serve too.%text>
diff --git a/kallithea/lib/pidlock.py b/kallithea/lib/pidlock.py
--- a/kallithea/lib/pidlock.py
+++ b/kallithea/lib/pidlock.py
@@ -12,8 +12,6 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-from __future__ import print_function
-
import errno
import os
from multiprocessing.util import Finalize
@@ -137,6 +135,6 @@ class DaemonLock(object):
dir_, file_ = os.path.split(pidfile)
if not os.path.isdir(dir_):
os.makedirs(dir_)
- with open(self.pidfile, 'wb') as f:
+ with open(self.pidfile, 'w') as f:
f.write(lockname)
self.held = True
diff --git a/kallithea/lib/pygmentsutils.py b/kallithea/lib/pygmentsutils.py
--- a/kallithea/lib/pygmentsutils.py
+++ b/kallithea/lib/pygmentsutils.py
@@ -26,7 +26,6 @@ Original author and date, and relevant c
"""
from collections import defaultdict
-from itertools import ifilter
from pygments import lexers
@@ -59,15 +58,11 @@ def get_index_filenames():
"""
Get list of known indexable filenames from pygment lexer internals
"""
-
filenames = []
-
- def likely_filename(s):
- return s.find('*') == -1 and s.find('[') == -1
-
for lx, t in sorted(lexers.LEXERS.items()):
- for f in ifilter(likely_filename, t[-2]):
- filenames.append(f)
+ for f in t[-2]:
+ if '*' not in f and '[' not in f:
+ filenames.append(f)
return filenames
diff --git a/kallithea/lib/rcmail/message.py b/kallithea/lib/rcmail/message.py
--- a/kallithea/lib/rcmail/message.py
+++ b/kallithea/lib/rcmail/message.py
@@ -2,35 +2,6 @@ from kallithea.lib.rcmail.exceptions imp
from kallithea.lib.rcmail.response import MailResponse
-class Attachment(object):
- """
- Encapsulates file attachment information.
-
- :param filename: filename of attachment
- :param content_type: file mimetype
- :param data: the raw file data, either as string or file obj
- :param disposition: content-disposition (if any)
- """
-
- def __init__(self,
- filename=None,
- content_type=None,
- data=None,
- disposition=None):
-
- self.filename = filename
- self.content_type = content_type
- self.disposition = disposition or 'attachment'
- self._data = data
-
- @property
- def data(self):
- if isinstance(self._data, basestring):
- return self._data
- self._data = self._data.read()
- return self._data
-
-
class Message(object):
"""
Encapsulates an email message.
diff --git a/kallithea/lib/rcmail/response.py b/kallithea/lib/rcmail/response.py
--- a/kallithea/lib/rcmail/response.py
+++ b/kallithea/lib/rcmail/response.py
@@ -44,7 +44,9 @@ from email.utils import parseaddr
ADDRESS_HEADERS_WHITELIST = ['From', 'To', 'Delivered-To', 'Cc']
DEFAULT_ENCODING = "utf-8"
-VALUE_IS_EMAIL_ADDRESS = lambda v: '@' in v
+
+def VALUE_IS_EMAIL_ADDRESS(v):
+ return '@' in v
def normalize_header(header):
@@ -87,7 +89,7 @@ class MailBase(object):
def __delitem__(self, key):
del self.headers[normalize_header(key)]
- def __nonzero__(self):
+ def __bool__(self):
return self.body is not None or len(self.headers) > 0 or len(self.parts) > 0
def keys(self):
@@ -339,20 +341,20 @@ def to_message(mail, separator="; "):
try:
out = MIMEPart(ctype, **params)
- except TypeError as exc: # pragma: no cover
+ except TypeError as e: # pragma: no cover
raise EncodingError("Content-Type malformed, not allowed: %r; "
- "%r (Python ERROR: %s" %
- (ctype, params, exc.message))
+ "%r (Python ERROR: %s)" %
+ (ctype, params, e.args[0]))
for k in mail.keys():
if k in ADDRESS_HEADERS_WHITELIST:
- out[k.encode('ascii')] = header_to_mime_encoding(
+ out[k] = header_to_mime_encoding(
mail[k],
not_email=False,
separator=separator
)
else:
- out[k.encode('ascii')] = header_to_mime_encoding(
+ out[k] = header_to_mime_encoding(
mail[k],
not_email=True
)
@@ -392,7 +394,7 @@ class MIMEPart(MIMEBase):
if mail.body is None:
return # only None, '' is still ok
- ctype, ctype_params = mail.content_encoding['Content-Type']
+ ctype, _ctype_params = mail.content_encoding['Content-Type']
cdisp, cdisp_params = mail.content_encoding['Content-Disposition']
assert ctype, ("Extract payload requires that mail.content_encoding "
@@ -422,7 +424,7 @@ def header_to_mime_encoding(value, not_e
return ""
encoder = Charset(DEFAULT_ENCODING)
- if type(value) == list:
+ if isinstance(value, list):
return separator.join(properly_encode_header(
v, encoder, not_email) for v in value)
else:
@@ -443,12 +445,12 @@ def properly_encode_header(value, encode
check different, then change this.
"""
try:
- return value.encode("ascii")
- except UnicodeEncodeError:
+ value.encode("ascii")
+ return value
+ except UnicodeError:
if not not_email and VALUE_IS_EMAIL_ADDRESS(value):
# this could have an email address, make sure we don't screw it up
name, address = parseaddr(value)
- return '"%s" <%s>' % (
- encoder.header_encode(name.encode("utf-8")), address)
+ return '"%s" <%s>' % (encoder.header_encode(name), address)
- return encoder.header_encode(value.encode("utf-8"))
+ return encoder.header_encode(value)
diff --git a/kallithea/lib/rcmail/smtp_mailer.py b/kallithea/lib/rcmail/smtp_mailer.py
--- a/kallithea/lib/rcmail/smtp_mailer.py
+++ b/kallithea/lib/rcmail/smtp_mailer.py
@@ -64,7 +64,7 @@ class SmtpMailer(object):
def send(self, recipients=None, subject='', body='', html='',
attachment_files=None, headers=None):
recipients = recipients or []
- if isinstance(recipients, basestring):
+ if isinstance(recipients, str):
recipients = [recipients]
if headers is None:
headers = {}
diff --git a/kallithea/lib/recaptcha.py b/kallithea/lib/recaptcha.py
--- a/kallithea/lib/recaptcha.py
+++ b/kallithea/lib/recaptcha.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
import json
-import urllib
-import urllib2
+import urllib.parse
+import urllib.request
class RecaptchaResponse(object):
@@ -26,17 +26,17 @@ def submit(g_recaptcha_response, private
return RecaptchaResponse(is_valid=False, error_code='incorrect-captcha-sol')
def encode_if_necessary(s):
- if isinstance(s, unicode):
+ if isinstance(s, str):
return s.encode('utf-8')
return s
- params = urllib.urlencode({
+ params = urllib.parse.urlencode({
'secret': encode_if_necessary(private_key),
'remoteip': encode_if_necessary(remoteip),
'response': encode_if_necessary(g_recaptcha_response),
- })
+ }).encode('ascii')
- req = urllib2.Request(
+ req = urllib.request.Request(
url="https://www.google.com/recaptcha/api/siteverify",
data=params,
headers={
@@ -45,7 +45,7 @@ def submit(g_recaptcha_response, private
}
)
- httpresp = urllib2.urlopen(req)
+ httpresp = urllib.request.urlopen(req)
return_values = json.loads(httpresp.read())
httpresp.close()
diff --git a/kallithea/lib/ssh.py b/kallithea/lib/ssh.py
--- a/kallithea/lib/ssh.py
+++ b/kallithea/lib/ssh.py
@@ -21,12 +21,14 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-import binascii
+import base64
import logging
import re
from tg.i18n import ugettext as _
+from kallithea.lib.utils2 import ascii_bytes, ascii_str
+
log = logging.getLogger(__name__)
@@ -42,32 +44,32 @@ def parse_pub_key(ssh_key):
>>> parse_pub_key('')
Traceback (most recent call last):
...
- SshKeyParseError: SSH key is missing
+ kallithea.lib.ssh.SshKeyParseError: SSH key is missing
>>> parse_pub_key('''AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''')
Traceback (most recent call last):
...
- SshKeyParseError: Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='
+ kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must have both a key type and a base64 part, like 'ssh-rsa ASRNeaZu4FA...xlJp='
>>> parse_pub_key('''abc AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''')
Traceback (most recent call last):
...
- SshKeyParseError: Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'
+ kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - it must start with 'ssh-(rsa|dss|ed25519)'
>>> parse_pub_key('''ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ''')
Traceback (most recent call last):
...
- SshKeyParseError: Incorrect SSH key - failed to decode base64 part 'AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ'
+ kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - failed to decode base64 part 'AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ'
>>> parse_pub_key('''ssh-rsa AAAAB2NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==''')
Traceback (most recent call last):
...
- SshKeyParseError: Incorrect SSH key - base64 part is not 'ssh-rsa' as claimed but 'csh-rsa'
+ kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - base64 part is not 'ssh-rsa' as claimed but 'csh-rsa'
>>> parse_pub_key('''ssh-rsa AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ''')
Traceback (most recent call last):
...
- SshKeyParseError: Incorrect SSH key - unexpected characters in base64 part "AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ"
+ kallithea.lib.ssh.SshKeyParseError: Incorrect SSH key - unexpected characters in base64 part "AAAAB3NzaC1yc2EAAAA'LVGhpcyBpcyBmYWtlIQ"
>>> parse_pub_key(''' ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ== and a comment
... ''')
- ('ssh-rsa', '\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x0bThis is fake!', 'and a comment\n')
+ ('ssh-rsa', b'\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x0bThis is fake!', 'and a comment\n')
>>> parse_pub_key('''ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIP1NA2kBQIKe74afUXmIWD9ByDYQJqUwW44Y4gJOBRuo''')
- ('ssh-ed25519', '\x00\x00\x00\x0bssh-ed25519\x00\x00\x00 \xfdM\x03i\x01@\x82\x9e\xef\x86\x9fQy\x88X?A\xc86\x10&\xa50[\x8e\x18\xe2\x02N\x05\x1b\xa8', '')
+ ('ssh-ed25519', b'\x00\x00\x00\x0bssh-ed25519\x00\x00\x00 \xfdM\x03i\x01@\x82\x9e\xef\x86\x9fQy\x88X?A\xc86\x10&\xa50[\x8e\x18\xe2\x02N\x05\x1b\xa8', '')
"""
if not ssh_key:
raise SshKeyParseError(_("SSH key is missing"))
@@ -84,14 +86,14 @@ def parse_pub_key(ssh_key):
raise SshKeyParseError(_("Incorrect SSH key - unexpected characters in base64 part %r") % keyvalue)
try:
- decoded = keyvalue.decode('base64')
- except binascii.Error:
+ key_bytes = base64.b64decode(keyvalue)
+ except base64.binascii.Error:
raise SshKeyParseError(_("Incorrect SSH key - failed to decode base64 part %r") % keyvalue)
- if not decoded.startswith('\x00\x00\x00' + chr(len(keytype)) + str(keytype) + '\x00'):
- raise SshKeyParseError(_("Incorrect SSH key - base64 part is not %r as claimed but %r") % (str(keytype), str(decoded[4:].split('\0', 1)[0])))
+ if not key_bytes.startswith(b'\x00\x00\x00%c%s\x00' % (len(keytype), ascii_bytes(keytype))):
+ raise SshKeyParseError(_("Incorrect SSH key - base64 part is not %r as claimed but %r") % (keytype, ascii_str(key_bytes[4:].split(b'\0', 1)[0])))
- return keytype, decoded, comment
+ return keytype, key_bytes, comment
SSH_OPTIONS = 'no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding'
@@ -121,13 +123,14 @@ def authorized_keys_line(kallithea_cli_p
'no-pty,no-port-forwarding,no-X11-forwarding,no-agent-forwarding,command="/srv/kallithea/venv/bin/kallithea-cli ssh-serve -c /srv/kallithea/my.ini 7 17" ssh-rsa AAAAB3NzaC1yc2EAAAALVGhpcyBpcyBmYWtlIQ==\\n'
"""
try:
- keytype, decoded, comment = parse_pub_key(key.public_key)
+ keytype, key_bytes, comment = parse_pub_key(key.public_key)
except SshKeyParseError:
return '# Invalid Kallithea SSH key: %s %s\n' % (key.user.user_id, key.user_ssh_key_id)
- mimekey = decoded.encode('base64').replace('\n', '')
- if not _safe_check(mimekey):
+ base64_key = ascii_str(base64.b64encode(key_bytes))
+ assert '\n' not in base64_key
+ if not _safe_check(base64_key):
return '# Invalid Kallithea SSH key - bad base64 encoding: %s %s\n' % (key.user.user_id, key.user_ssh_key_id)
return '%s,command="%s ssh-serve -c %s %s %s" %s %s\n' % (
SSH_OPTIONS, kallithea_cli_path, config_file,
key.user.user_id, key.user_ssh_key_id,
- keytype, mimekey)
+ keytype, base64_key)
diff --git a/kallithea/lib/timerproxy.py b/kallithea/lib/timerproxy.py
--- a/kallithea/lib/timerproxy.py
+++ b/kallithea/lib/timerproxy.py
@@ -20,7 +20,7 @@ from sqlalchemy.interfaces import Connec
log = logging.getLogger('timerproxy')
-BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
+BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(30, 38)
def color_sql(sql):
diff --git a/kallithea/lib/utils.py b/kallithea/lib/utils.py
--- a/kallithea/lib/utils.py
+++ b/kallithea/lib/utils.py
@@ -31,21 +31,24 @@ import os
import re
import sys
import traceback
+import urllib.error
from distutils.version import StrictVersion
-import beaker
-from beaker.cache import _cache_decorate
-from tg.i18n import ugettext as _
+import mercurial.config
+import mercurial.error
+import mercurial.ui
-from kallithea.lib.exceptions import HgsubversionImportError
-from kallithea.lib.utils2 import get_current_authuser, safe_str, safe_unicode
-from kallithea.lib.vcs.exceptions import VCSError
+import kallithea.config.conf
+from kallithea.lib.exceptions import InvalidCloneUriException
+from kallithea.lib.utils2 import ascii_bytes, aslist, get_current_authuser, safe_bytes, safe_str
+from kallithea.lib.vcs.backends.git.repository import GitRepository
+from kallithea.lib.vcs.backends.hg.repository import MercurialRepository
+from kallithea.lib.vcs.conf import settings
+from kallithea.lib.vcs.exceptions import RepositoryError, VCSError
from kallithea.lib.vcs.utils.fakemod import create_module
from kallithea.lib.vcs.utils.helpers import get_scm
-from kallithea.lib.vcs.utils.hgcompat import config, ui
-from kallithea.model import meta
+from kallithea.model import db, meta
from kallithea.model.db import RepoGroup, Repository, Setting, Ui, User, UserGroup, UserLog
-from kallithea.model.repo_group import RepoGroupModel
log = logging.getLogger(__name__)
@@ -102,7 +105,6 @@ def fix_repo_id_name(path):
rest = '/' + rest_
repo_id = _get_permanent_id(first)
if repo_id is not None:
- from kallithea.model.db import Repository
repo = Repository.get(repo_id)
if repo is not None:
return repo.repo_name + rest
@@ -130,7 +132,7 @@ def action_logger(user, action, repo, ip
if getattr(user, 'user_id', None):
user_obj = User.get(user.user_id)
- elif isinstance(user, basestring):
+ elif isinstance(user, str):
user_obj = User.get_by_username(user)
else:
raise Exception('You have to provide a user object or a username')
@@ -138,17 +140,17 @@ def action_logger(user, action, repo, ip
if getattr(repo, 'repo_id', None):
repo_obj = Repository.get(repo.repo_id)
repo_name = repo_obj.repo_name
- elif isinstance(repo, basestring):
+ elif isinstance(repo, str):
repo_name = repo.lstrip('/')
repo_obj = Repository.get_by_repo_name(repo_name)
else:
repo_obj = None
- repo_name = u''
+ repo_name = ''
user_log = UserLog()
user_log.user_id = user_obj.user_id
user_log.username = user_obj.username
- user_log.action = safe_unicode(action)
+ user_log.action = action
user_log.repository = repo_obj
user_log.repository_name = repo_name
@@ -158,7 +160,7 @@ def action_logger(user, action, repo, ip
meta.Session().add(user_log)
log.info('Logging action:%s on %s by user:%s ip:%s',
- action, safe_unicode(repo), user_obj, ipaddr)
+ action, repo, user_obj, ipaddr)
if commit:
meta.Session().commit()
@@ -172,7 +174,7 @@ def get_filesystem_repos(path):
"""
# remove ending slash for better results
- path = safe_str(path.rstrip(os.sep))
+ path = path.rstrip(os.sep)
log.debug('now scanning in %s', path)
def isdir(*n):
@@ -223,37 +225,43 @@ def get_filesystem_repos(path):
def is_valid_repo_uri(repo_type, url, ui):
- """Check if the url seems like a valid remote repo location - raise an Exception if any problems"""
+ """Check if the url seems like a valid remote repo location
+ Raise InvalidCloneUriException if any problems"""
if repo_type == 'hg':
- from kallithea.lib.vcs.backends.hg.repository import MercurialRepository
if url.startswith('http') or url.startswith('ssh'):
# initially check if it's at least the proper URL
# or does it pass basic auth
- MercurialRepository._check_url(url, ui)
+ try:
+ MercurialRepository._check_url(url, ui)
+ except urllib.error.URLError as e:
+ raise InvalidCloneUriException('URI %s URLError: %s' % (url, e))
+ except mercurial.error.RepoError as e:
+ raise InvalidCloneUriException('Mercurial %s: %s' % (type(e).__name__, safe_str(bytes(e))))
elif url.startswith('svn+http'):
try:
from hgsubversion.svnrepo import svnremoterepo
except ImportError:
- raise HgsubversionImportError(_('Unable to activate hgsubversion support. '
- 'The "hgsubversion" library is missing'))
+ raise InvalidCloneUriException('URI type %s not supported - hgsubversion is not available' % (url,))
svnremoterepo(ui, url).svn.uuid
elif url.startswith('git+http'):
- raise NotImplementedError()
+ raise InvalidCloneUriException('URI type %s not implemented' % (url,))
else:
- raise Exception('URI %s not allowed' % (url,))
+ raise InvalidCloneUriException('URI %s not allowed' % (url,))
elif repo_type == 'git':
- from kallithea.lib.vcs.backends.git.repository import GitRepository
if url.startswith('http') or url.startswith('git'):
# initially check if it's at least the proper URL
# or does it pass basic auth
- GitRepository._check_url(url)
+ try:
+ GitRepository._check_url(url)
+ except urllib.error.URLError as e:
+ raise InvalidCloneUriException('URI %s URLError: %s' % (url, e))
elif url.startswith('svn+http'):
- raise NotImplementedError()
+ raise InvalidCloneUriException('URI type %s not implemented' % (url,))
elif url.startswith('hg+http'):
- raise NotImplementedError()
+ raise InvalidCloneUriException('URI type %s not implemented' % (url,))
else:
- raise Exception('URI %s not allowed' % (url))
+ raise InvalidCloneUriException('URI %s not allowed' % (url))
def is_valid_repo(repo_name, base_path, scm=None):
@@ -269,7 +277,7 @@ def is_valid_repo(repo_name, base_path,
:return True: if given path is a valid repository
"""
# TODO: paranoid security checks?
- full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
+ full_path = os.path.join(base_path, repo_name)
try:
scm_ = get_scm(full_path)
@@ -287,7 +295,7 @@ def is_valid_repo_group(repo_group_name,
:param repo_name:
:param base_path:
"""
- full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
+ full_path = os.path.join(base_path, repo_group_name)
# check if it's not a repo
if is_valid_repo(repo_group_name, base_path):
@@ -309,65 +317,41 @@ def is_valid_repo_group(repo_group_name,
return False
-# propagated from mercurial documentation
-ui_sections = ['alias', 'auth',
- 'decode/encode', 'defaults',
- 'diff', 'email',
- 'extensions', 'format',
- 'merge-patterns', 'merge-tools',
- 'hooks', 'http_proxy',
- 'smtp', 'patch',
- 'paths', 'profiling',
- 'server', 'trusted',
- 'ui', 'web', ]
-
-
-def make_ui(repo_path=None, clear_session=True):
+def make_ui(repo_path=None):
"""
Create an Mercurial 'ui' object based on database Ui settings, possibly
augmenting with content from a hgrc file.
"""
- baseui = ui.ui()
+ baseui = mercurial.ui.ui()
# clean the baseui object
- baseui._ocfg = config.config()
- baseui._ucfg = config.config()
- baseui._tcfg = config.config()
+ baseui._ocfg = mercurial.config.config()
+ baseui._ucfg = mercurial.config.config()
+ baseui._tcfg = mercurial.config.config()
sa = meta.Session()
- for ui_ in sa.query(Ui).all():
+ for ui_ in sa.query(Ui).order_by(Ui.ui_section, Ui.ui_key):
if ui_.ui_active:
- ui_val = '' if ui_.ui_value is None else safe_str(ui_.ui_value)
log.debug('config from db: [%s] %s=%r', ui_.ui_section,
- ui_.ui_key, ui_val)
- baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
- ui_val)
- if clear_session:
- meta.Session.remove()
+ ui_.ui_key, ui_.ui_value)
+ baseui.setconfig(ascii_bytes(ui_.ui_section), ascii_bytes(ui_.ui_key),
+ b'' if ui_.ui_value is None else safe_bytes(ui_.ui_value))
# force set push_ssl requirement to False, Kallithea handles that
- baseui.setconfig('web', 'push_ssl', False)
- baseui.setconfig('web', 'allow_push', '*')
+ baseui.setconfig(b'web', b'push_ssl', False)
+ baseui.setconfig(b'web', b'allow_push', b'*')
# prevent interactive questions for ssh password / passphrase
- ssh = baseui.config('ui', 'ssh', default='ssh')
- baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
+ ssh = baseui.config(b'ui', b'ssh', default=b'ssh')
+ baseui.setconfig(b'ui', b'ssh', b'%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
# push / pull hooks
- baseui.setconfig('hooks', 'changegroup.kallithea_log_push_action', 'python:kallithea.lib.hooks.log_push_action')
- baseui.setconfig('hooks', 'outgoing.kallithea_log_pull_action', 'python:kallithea.lib.hooks.log_pull_action')
+ baseui.setconfig(b'hooks', b'changegroup.kallithea_log_push_action', b'python:kallithea.lib.hooks.log_push_action')
+ baseui.setconfig(b'hooks', b'outgoing.kallithea_log_pull_action', b'python:kallithea.lib.hooks.log_pull_action')
if repo_path is not None:
- hgrc_path = os.path.join(repo_path, '.hg', 'hgrc')
- if os.path.isfile(hgrc_path):
- log.debug('reading hgrc from %s', hgrc_path)
- cfg = config.config()
- cfg.read(hgrc_path)
- for section in ui_sections:
- for k, v in cfg.items(section):
- log.debug('config from file: [%s] %s=%s', section, k, v)
- baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
- else:
- log.debug('hgrc file is not present at %s, skipping...', hgrc_path)
+ # Note: MercurialRepository / mercurial.localrepo.instance will do this too, so it will always be possible to override db settings or what is hardcoded above
+ baseui.readconfig(repo_path)
+ assert baseui.plain() # set by hgcompat.monkey_do (invoked from import of vcs.backends.hg) to minimize potential impact of loading config files
return baseui
@@ -377,12 +361,10 @@ def set_app_settings(config):
:param config:
"""
- try:
- hgsettings = Setting.get_app_settings()
- for k, v in hgsettings.items():
- config[k] = v
- finally:
- meta.Session.remove()
+ hgsettings = Setting.get_app_settings()
+ for k, v in hgsettings.items():
+ config[k] = v
+ config['base_path'] = Ui.get_repos_location()
def set_vcs_config(config):
@@ -391,16 +373,14 @@ def set_vcs_config(config):
:param config: kallithea.CONFIG
"""
- from kallithea.lib.vcs import conf
- from kallithea.lib.utils2 import aslist
- conf.settings.BACKENDS = {
+ settings.BACKENDS = {
'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository',
'git': 'kallithea.lib.vcs.backends.git.GitRepository',
}
- conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
- conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
- conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
+ settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
+ settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
+ settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
'utf-8'), sep=',')
@@ -410,13 +390,11 @@ def set_indexer_config(config):
:param config: kallithea.CONFIG
"""
- from kallithea.config import conf
-
log.debug('adding extra into INDEX_EXTENSIONS')
- conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', '')))
+ kallithea.config.conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', '')))
log.debug('adding extra into INDEX_FILENAMES')
- conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', '')))
+ kallithea.config.conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', '')))
def map_groups(path):
@@ -427,8 +405,9 @@ def map_groups(path):
:param paths: full path to repository
"""
+ from kallithea.model.repo_group import RepoGroupModel
sa = meta.Session()
- groups = path.split(Repository.url_sep())
+ groups = path.split(db.URL_SEP)
parent = None
group = None
@@ -437,7 +416,7 @@ def map_groups(path):
rgm = RepoGroupModel()
owner = User.get_first_admin()
for lvl, group_name in enumerate(groups):
- group_name = u'/'.join(groups[:lvl] + [group_name])
+ group_name = '/'.join(groups[:lvl] + [group_name])
group = RepoGroup.get_by_group_name(group_name)
desc = '%s group' % group_name
@@ -459,14 +438,14 @@ def map_groups(path):
return group
-def repo2db_mapper(initial_repo_list, remove_obsolete=False,
+def repo2db_mapper(initial_repo_dict, remove_obsolete=False,
install_git_hooks=False, user=None, overwrite_git_hooks=False):
"""
- maps all repos given in initial_repo_list, non existing repositories
+ maps all repos given in initial_repo_dict, non existing repositories
are created, if remove_obsolete is True it also check for db entries
- that are not in initial_repo_list and removes them.
+ that are not in initial_repo_dict and removes them.
- :param initial_repo_list: list of repositories found by scanning methods
+ :param initial_repo_dict: mapping with repositories found by scanning methods
:param remove_obsolete: check for obsolete entries in database
:param install_git_hooks: if this is True, also check and install git hook
for a repo if missing
@@ -487,10 +466,9 @@ def repo2db_mapper(initial_repo_list, re
enable_downloads = defs.get('repo_enable_downloads')
private = defs.get('repo_private')
- for name, repo in initial_repo_list.items():
+ for name, repo in initial_repo_dict.items():
group = map_groups(name)
- unicode_name = safe_unicode(name)
- db_repo = repo_model.get_by_repo_name(unicode_name)
+ db_repo = repo_model.get_by_repo_name(name)
# found repo that is on filesystem not in Kallithea database
if not db_repo:
log.info('repository %s not found, creating now', name)
@@ -526,9 +504,8 @@ def repo2db_mapper(initial_repo_list, re
removed = []
# remove from database those repositories that are not in the filesystem
- unicode_initial_repo_list = set(safe_unicode(name) for name in initial_repo_list)
for repo in sa.query(Repository).all():
- if repo.repo_name not in unicode_initial_repo_list:
+ if repo.repo_name not in initial_repo_dict:
if remove_obsolete:
log.debug("Removing non-existing repository found in db `%s`",
repo.repo_name)
@@ -544,9 +521,6 @@ def repo2db_mapper(initial_repo_list, re
def load_rcextensions(root_path):
- import kallithea
- from kallithea.config import conf
-
path = os.path.join(root_path, 'rcextensions', '__init__.py')
if os.path.isfile(path):
rcext = create_module('rc', path)
@@ -554,17 +528,17 @@ def load_rcextensions(root_path):
log.debug('Found rcextensions now loading %s...', rcext)
# Additional mappings that are not present in the pygments lexers
- conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
+ kallithea.config.conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
# OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
if getattr(EXT, 'INDEX_EXTENSIONS', []):
log.debug('settings custom INDEX_EXTENSIONS')
- conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
+ kallithea.config.conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
# ADDITIONAL MAPPINGS
log.debug('adding extra into INDEX_EXTENSIONS')
- conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
+ kallithea.config.conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
# auto check if the module is not missing any data, set to default if is
# this will help autoupdate new feature of rcext module
@@ -585,28 +559,33 @@ def check_git_version():
Checks what version of git is installed on the system, and raise a system exit
if it's too old for Kallithea to work properly.
"""
- from kallithea import BACKENDS
- from kallithea.lib.vcs.backends.git.repository import GitRepository
- from kallithea.lib.vcs.conf import settings
-
- if 'git' not in BACKENDS:
+ if 'git' not in kallithea.BACKENDS:
return None
if not settings.GIT_EXECUTABLE_PATH:
log.warning('No git executable configured - check "git_path" in the ini file.')
return None
- stdout, stderr = GitRepository._run_git_command(['--version'], _bare=True,
- _safe=True)
+ try:
+ stdout, stderr = GitRepository._run_git_command(['--version'])
+ except RepositoryError as e:
+ # message will already have been logged as error
+ log.warning('No working git executable found - check "git_path" in the ini file.')
+ return None
if stderr:
- log.warning('Error/stderr from "%s --version": %r', settings.GIT_EXECUTABLE_PATH, stderr)
+ log.warning('Error/stderr from "%s --version":\n%s', settings.GIT_EXECUTABLE_PATH, safe_str(stderr))
- m = re.search(r"\d+.\d+.\d+", stdout)
+ if not stdout:
+ log.warning('No working git executable found - check "git_path" in the ini file.')
+ return None
+
+ output = safe_str(stdout).strip()
+ m = re.search(r"\d+.\d+.\d+", output)
if m:
ver = StrictVersion(m.group(0))
log.debug('Git executable: "%s", version %s (parsed from: "%s")',
- settings.GIT_EXECUTABLE_PATH, ver, stdout.strip())
+ settings.GIT_EXECUTABLE_PATH, ver, output)
if ver < git_req_ver:
log.error('Kallithea detected %s version %s, which is too old '
'for the system to function properly. '
@@ -618,68 +597,7 @@ def check_git_version():
sys.exit(1)
else:
ver = StrictVersion('0.0.0')
- log.warning('Error finding version number in "%s --version" stdout: %r',
- settings.GIT_EXECUTABLE_PATH, stdout.strip())
+ log.warning('Error finding version number in "%s --version" stdout:\n%s',
+ settings.GIT_EXECUTABLE_PATH, output)
return ver
-
-
-#===============================================================================
-# CACHE RELATED METHODS
-#===============================================================================
-
-# set cache regions for beaker so celery can utilise it
-def setup_cache_regions(settings):
- # Create dict with just beaker cache configs with prefix stripped
- cache_settings = {'regions': None}
- prefix = 'beaker.cache.'
- for key in settings:
- if key.startswith(prefix):
- name = key[len(prefix):]
- cache_settings[name] = settings[key]
- # Find all regions, apply defaults, and apply to beaker
- if cache_settings['regions']:
- for region in cache_settings['regions'].split(','):
- region = region.strip()
- prefix = region + '.'
- region_settings = {}
- for key in cache_settings:
- if key.startswith(prefix):
- name = key[len(prefix):]
- region_settings[name] = cache_settings[key]
- region_settings.setdefault('expire',
- cache_settings.get('expire', '60'))
- region_settings.setdefault('lock_dir',
- cache_settings.get('lock_dir'))
- region_settings.setdefault('data_dir',
- cache_settings.get('data_dir'))
- region_settings.setdefault('type',
- cache_settings.get('type', 'memory'))
- beaker.cache.cache_regions[region] = region_settings
-
-
-def conditional_cache(region, prefix, condition, func):
- """
-
- Conditional caching function use like::
- def _c(arg):
- #heavy computation function
- return data
-
- # depending from condition the compute is wrapped in cache or not
- compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func)
- return compute(arg)
-
- :param region: name of cache region
- :param prefix: cache region prefix
- :param condition: condition for cache to be triggered, and return data cached
- :param func: wrapped heavy function to compute
-
- """
- wrapped = func
- if condition:
- log.debug('conditional_cache: True, wrapping call of '
- 'func: %s into %s region cache' % (region, func))
- wrapped = _cache_decorate((prefix,), None, None, region)(func)
-
- return wrapped
diff --git a/kallithea/lib/utils2.py b/kallithea/lib/utils2.py
--- a/kallithea/lib/utils2.py
+++ b/kallithea/lib/utils2.py
@@ -27,25 +27,37 @@ Original author and date, and relevant c
:license: GPLv3, see LICENSE.md for more details.
"""
-from __future__ import print_function
-
import binascii
import datetime
+import json
import os
-import pwd
import re
import time
-import urllib
+import urllib.parse
import urlobject
from tg.i18n import ugettext as _
from tg.i18n import ungettext
from webhelpers2.text import collapse, remove_formatting, strip_tags
-from kallithea.lib.compat import json
+from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export
from kallithea.lib.vcs.utils.lazy import LazyProperty
+try:
+ import pwd
+except ImportError:
+ pass
+
+
+# mute pyflakes "imported but unused"
+assert ascii_bytes
+assert ascii_str
+assert safe_bytes
+assert safe_str
+assert LazyProperty
+
+
def str2bool(_str):
"""
returns True/False value from given string, it tries to translate the
@@ -71,7 +83,7 @@ def aslist(obj, sep=None, strip=True):
:param sep:
:param strip:
"""
- if isinstance(obj, (basestring)):
+ if isinstance(obj, (str)):
lst = obj.split(sep)
if strip:
lst = [v.strip() for v in lst]
@@ -98,14 +110,12 @@ def convert_line_endings(line, mode):
:rtype: str
:return: converted line according to mode
"""
- from string import replace
-
if mode == 0:
- line = replace(line, '\r\n', '\n')
- line = replace(line, '\r', '\n')
+ line = line.replace('\r\n', '\n')
+ line = line.replace('\r', '\n')
elif mode == 1:
- line = replace(line, '\r\n', '\r')
- line = replace(line, '\n', '\r')
+ line = line.replace('\r\n', '\r')
+ line = line.replace('\n', '\r')
elif mode == 2:
line = re.sub("\r(?!\n)|(? outputs long shas (and we need all 40 characters)
# --root ==> doesn't put '^' character for boundaries
# -r sha ==> blames for the given revision
- so, se = self.repository.run_git_command(cmd)
+ so = self.repository.run_git_command(cmd)
for i, blame_line in enumerate(so.split('\n')[:-1]):
- ln_no = i + 1
sha, line = re.split(r' ', blame_line, 1)
- yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
+ yield (i + 1, sha, lambda sha=sha: self.repository.get_changeset(sha), line)
def fill_archive(self, stream=None, kind='tgz', prefix=None,
subrepos=False):
@@ -353,12 +334,15 @@ class GitChangeset(BaseChangeset):
:raise ImproperArchiveTypeError: If given kind is wrong.
:raise VcsError: If given stream is None
-
"""
- allowed_kinds = settings.ARCHIVE_SPECS.keys()
+ allowed_kinds = settings.ARCHIVE_SPECS
if kind not in allowed_kinds:
raise ImproperArchiveTypeError('Archive kind not supported use one'
- 'of %s' % allowed_kinds)
+ 'of %s' % ' '.join(allowed_kinds))
+
+ if stream is None:
+ raise VCSError('You need to pass in a valid stream for filling'
+ ' with archival data')
if prefix is None:
prefix = '%s-%s' % (self.repository.name, self.short_id)
@@ -394,25 +378,30 @@ class GitChangeset(BaseChangeset):
popen.communicate()
def get_nodes(self, path):
+ """
+ Returns combined ``DirNode`` and ``FileNode`` objects list representing
+ state of changeset at the given ``path``. If node at the given ``path``
+ is not instance of ``DirNode``, ChangesetError would be raised.
+ """
+
if self._get_kind(path) != NodeKind.DIR:
raise ChangesetError("Directory does not exist for revision %s at "
" '%s'" % (self.revision, path))
- path = self._fix_path(path)
+ path = path.rstrip('/')
id = self._get_id_for_path(path)
tree = self.repository._repo[id]
dirnodes = []
filenodes = []
als = self.repository.alias
- for name, stat, id in tree.iteritems():
+ for name, stat, id in tree.items():
+ obj_path = safe_str(name)
if path != '':
- obj_path = '/'.join((path, name))
- else:
- obj_path = name
+ obj_path = '/'.join((path, obj_path))
if objects.S_ISGITLINK(stat):
root_tree = self.repository._repo[self._tree_id]
- cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(root_tree['.gitmodules'][1]).data))
- url = cf.get(('submodule', obj_path), 'url')
- dirnodes.append(SubModuleNode(obj_path, url=url, changeset=id,
+ cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(root_tree[b'.gitmodules'][1]).data))
+ url = ascii_str(cf.get(('submodule', obj_path), 'url'))
+ dirnodes.append(SubModuleNode(obj_path, url=url, changeset=ascii_str(id),
alias=als))
continue
@@ -434,9 +423,11 @@ class GitChangeset(BaseChangeset):
return nodes
def get_node(self, path):
- if isinstance(path, unicode):
- path = path.encode('utf-8')
- path = self._fix_path(path)
+ """
+ Returns ``Node`` object from the given ``path``. If there is no node at
+ the given ``path``, ``ChangesetError`` would be raised.
+ """
+ path = path.rstrip('/')
if path not in self.nodes:
try:
id_ = self._get_id_for_path(path)
@@ -444,12 +435,12 @@ class GitChangeset(BaseChangeset):
raise NodeDoesNotExistError("Cannot find one of parents' "
"directories for a given path: %s" % path)
- _GL = lambda m: m and objects.S_ISGITLINK(m)
- if _GL(self._stat_modes.get(path)):
+ stat = self._stat_modes.get(path)
+ if stat and objects.S_ISGITLINK(stat):
tree = self.repository._repo[self._tree_id]
- cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(tree['.gitmodules'][1]).data))
- url = cf.get(('submodule', path), 'url')
- node = SubModuleNode(path, url=url, changeset=id_,
+ cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(tree[b'.gitmodules'][1]).data))
+ url = ascii_str(cf.get(('submodule', path), 'url'))
+ node = SubModuleNode(path, url=url, changeset=ascii_str(id_),
alias=self.repository.alias)
else:
obj = self.repository._repo.get_object(id_)
@@ -465,7 +456,7 @@ class GitChangeset(BaseChangeset):
node._blob = obj
else:
raise NodeDoesNotExistError("There is no file nor directory "
- "at the given path '%s' at revision %s"
+ "at the given path: '%s' at revision %s"
% (path, self.short_id))
# cache node
self.nodes[path] = node
@@ -480,16 +471,6 @@ class GitChangeset(BaseChangeset):
return list(added.union(modified).union(deleted))
@LazyProperty
- def _diff_name_status(self):
- output = []
- for parent in self.parents:
- cmd = ['diff', '--name-status', parent.raw_id, self.raw_id,
- '--encoding=utf8']
- so, se = self.repository.run_git_command(cmd)
- output.append(so.strip())
- return '\n'.join(output)
-
- @LazyProperty
def _changes_cache(self):
added = set()
modified = set()
@@ -503,15 +484,15 @@ class GitChangeset(BaseChangeset):
if isinstance(parent, EmptyChangeset):
oid = None
else:
- oid = _r[parent.raw_id].tree
- changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
+ oid = _r[parent._commit.id].tree
+ changes = _r.object_store.tree_changes(oid, _r[self._commit.id].tree)
for (oldpath, newpath), (_, _), (_, _) in changes:
if newpath and oldpath:
- modified.add(newpath)
+ modified.add(safe_str(newpath))
elif newpath and not oldpath:
- added.add(newpath)
+ added.add(safe_str(newpath))
elif not newpath and oldpath:
- deleted.add(oldpath)
+ deleted.add(safe_str(oldpath))
return added, modified, deleted
def _get_paths_for_status(self, status):
diff --git a/kallithea/lib/vcs/backends/git/inmemory.py b/kallithea/lib/vcs/backends/git/inmemory.py
--- a/kallithea/lib/vcs/backends/git/inmemory.py
+++ b/kallithea/lib/vcs/backends/git/inmemory.py
@@ -7,7 +7,7 @@ from dulwich import objects
from kallithea.lib.vcs.backends.base import BaseInMemoryChangeset
from kallithea.lib.vcs.exceptions import RepositoryError
-from kallithea.lib.vcs.utils import safe_str
+from kallithea.lib.vcs.utils import ascii_str, safe_bytes
class GitInMemoryChangeset(BaseInMemoryChangeset):
@@ -39,7 +39,7 @@ class GitInMemoryChangeset(BaseInMemoryC
repo = self.repository._repo
object_store = repo.object_store
- ENCODING = "UTF-8"
+ ENCODING = b"UTF-8" # TODO: should probably be kept in sync with safe_str/safe_bytes and vcs/conf/settings.py DEFAULT_ENCODINGS
# Create tree and populates it with blobs
commit_tree = self.parents[0] and repo[self.parents[0]._commit.tree] or \
@@ -47,7 +47,7 @@ class GitInMemoryChangeset(BaseInMemoryC
for node in self.added + self.changed:
# Compute subdirs if needed
dirpath, nodename = posixpath.split(node.path)
- dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
+ dirnames = safe_bytes(dirpath).split(b'/') if dirpath else []
parent = commit_tree
ancestors = [('', parent)]
@@ -68,13 +68,9 @@ class GitInMemoryChangeset(BaseInMemoryC
# for dirnames (in reverse order) [this only applies for nodes from added]
new_trees = []
- if not node.is_binary:
- content = node.content.encode(ENCODING)
- else:
- content = node.content
- blob = objects.Blob.from_string(content)
+ blob = objects.Blob.from_string(node.content)
- node_path = node.name.encode(ENCODING)
+ node_path = safe_bytes(node.name)
if dirnames:
# If there are trees which should be created we need to build
# them now (in reverse order)
@@ -104,7 +100,7 @@ class GitInMemoryChangeset(BaseInMemoryC
for tree in new_trees:
object_store.add_object(tree)
for node in self.removed:
- paths = node.path.split('/')
+ paths = safe_bytes(node.path).split(b'/')
tree = commit_tree
trees = [tree]
# Traverse deep into the forest...
@@ -117,7 +113,7 @@ class GitInMemoryChangeset(BaseInMemoryC
except KeyError:
break
# Cut down the blob and all rotten trees on the way back...
- for path, tree in reversed(zip(paths, trees)):
+ for path, tree in reversed(list(zip(paths, trees))):
del tree[path]
if tree:
# This tree still has elements - don't remove it or any
@@ -130,9 +126,9 @@ class GitInMemoryChangeset(BaseInMemoryC
commit = objects.Commit()
commit.tree = commit_tree.id
commit.parents = [p._commit.id for p in self.parents if p]
- commit.author = commit.committer = safe_str(author)
+ commit.author = commit.committer = safe_bytes(author)
commit.encoding = ENCODING
- commit.message = safe_str(message)
+ commit.message = safe_bytes(message)
# Compute date
if date is None:
@@ -150,11 +146,10 @@ class GitInMemoryChangeset(BaseInMemoryC
object_store.add_object(commit)
- ref = 'refs/heads/%s' % branch
+ # Update vcs repository object & recreate dulwich repo
+ ref = b'refs/heads/%s' % safe_bytes(branch)
repo.refs[ref] = commit.id
-
- # Update vcs repository object & recreate dulwich repo
- self.repository.revisions.append(commit.id)
+ self.repository.revisions.append(ascii_str(commit.id))
# invalidate parsed refs after commit
self.repository._parsed_refs = self.repository._get_parsed_refs()
tip = self.repository.get_changeset()
@@ -177,15 +172,15 @@ class GitInMemoryChangeset(BaseInMemoryC
return []
def get_tree_for_dir(tree, dirname):
- for name, mode, id in tree.iteritems():
+ for name, mode, id in tree.items():
if name == dirname:
obj = self.repository._repo[id]
if isinstance(obj, objects.Tree):
return obj
else:
raise RepositoryError("Cannot create directory %s "
- "at tree %s as path is occupied and is not a "
- "Tree" % (dirname, tree))
+ "at tree %s as path is occupied and is not a "
+ "Tree" % (dirname, tree))
return None
trees = []
diff --git a/kallithea/lib/vcs/backends/git/repository.py b/kallithea/lib/vcs/backends/git/repository.py
--- a/kallithea/lib/vcs/backends/git/repository.py
+++ b/kallithea/lib/vcs/backends/git/repository.py
@@ -12,13 +12,15 @@
import errno
import logging
import os
-import posixpath
import re
import time
-import urllib
-import urllib2
+import urllib.error
+import urllib.parse
+import urllib.request
from collections import OrderedDict
+import mercurial.url # import httpbasicauthhandler, httpdigestauthhandler
+import mercurial.util # import url as hg_url
from dulwich.config import ConfigFile
from dulwich.objects import Tag
from dulwich.repo import NotGitRepository, Repo
@@ -26,10 +28,9 @@ from dulwich.repo import NotGitRepositor
from kallithea.lib.vcs import subprocessio
from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
from kallithea.lib.vcs.conf import settings
-from kallithea.lib.vcs.exceptions import (
- BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError)
-from kallithea.lib.vcs.utils import date_fromtimestamp, makedate, safe_str, safe_unicode
-from kallithea.lib.vcs.utils.hgcompat import hg_url, httpbasicauthhandler, httpdigestauthhandler
+from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
+ TagDoesNotExistError)
+from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str
from kallithea.lib.vcs.utils.lazy import LazyProperty
from kallithea.lib.vcs.utils.paths import abspath, get_user_home
@@ -53,7 +54,7 @@ class GitRepository(BaseRepository):
def __init__(self, repo_path, create=False, src_url=None,
update_after_clone=False, bare=False):
- self.path = safe_unicode(abspath(repo_path))
+ self.path = abspath(repo_path)
self.repo = self._get_repo(create, src_url, update_after_clone, bare)
self.bare = self.repo.bare
@@ -97,63 +98,54 @@ class GitRepository(BaseRepository):
return self._get_all_revisions()
@classmethod
- def _run_git_command(cls, cmd, **opts):
+ def _run_git_command(cls, cmd, cwd=None):
"""
- Runs given ``cmd`` as git command and returns tuple
- (stdout, stderr).
+ Runs given ``cmd`` as git command and returns output bytes in a tuple
+ (stdout, stderr) ... or raise RepositoryError.
:param cmd: git command to be executed
- :param opts: env options to pass into Subprocess command
+ :param cwd: passed directly to subprocess
"""
-
- if '_bare' in opts:
- _copts = []
- del opts['_bare']
- else:
- _copts = ['-c', 'core.quotepath=false', ]
- safe_call = False
- if '_safe' in opts:
- # no exc on failure
- del opts['_safe']
- safe_call = True
-
- assert isinstance(cmd, list), cmd
-
- gitenv = os.environ
# need to clean fix GIT_DIR !
- if 'GIT_DIR' in gitenv:
- del gitenv['GIT_DIR']
+ gitenv = dict(os.environ)
+ gitenv.pop('GIT_DIR', None)
gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
- _git_path = settings.GIT_EXECUTABLE_PATH
- cmd = [_git_path] + _copts + cmd
+ assert isinstance(cmd, list), cmd
+ cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd
+ try:
+ p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False)
+ except (EnvironmentError, OSError) as err:
+ # output from the failing process is in str(EnvironmentError)
+ msg = ("Couldn't run git command %s.\n"
+ "Subprocess failed with '%s': %s\n" %
+ (cmd, type(err).__name__, err)
+ ).strip()
+ log.error(msg)
+ raise RepositoryError(msg)
try:
- _opts = dict(
- env=gitenv,
- shell=False,
- )
- _opts.update(opts)
- p = subprocessio.SubprocessIOChunker(cmd, **_opts)
- except (EnvironmentError, OSError) as err:
- tb_err = ("Couldn't run git command (%s).\n"
- "Original error was:%s\n" % (cmd, err))
- log.error(tb_err)
- if safe_call:
- return '', err
- else:
- raise RepositoryError(tb_err)
-
- try:
- return ''.join(p.output), ''.join(p.error)
+ stdout = b''.join(p.output)
+ stderr = b''.join(p.error)
finally:
p.close()
+ # TODO: introduce option to make commands fail if they have any stderr output?
+ if stderr:
+ log.debug('stderr from %s:\n%s', cmd, stderr)
+ else:
+ log.debug('stderr from %s: None', cmd)
+ return stdout, stderr
def run_git_command(self, cmd):
- opts = {}
+ """
+ Runs given ``cmd`` as git command with cwd set to current repo.
+ Returns stdout as unicode str ... or raise RepositoryError.
+ """
+ cwd = None
if os.path.isdir(self.path):
- opts['cwd'] = self.path
- return self._run_git_command(cmd, **opts)
+ cwd = self.path
+ stdout, _stderr = self._run_git_command(cmd, cwd=cwd)
+ return safe_str(stdout)
@classmethod
def _check_url(cls, url):
@@ -166,7 +158,6 @@ class GitRepository(BaseRepository):
On failures it'll raise urllib2.HTTPError, exception is also thrown
when the return code is non 200
"""
-
# check first if it's not an local url
if os.path.isdir(url) or url.startswith('file:'):
return True
@@ -178,29 +169,30 @@ class GitRepository(BaseRepository):
url = url[url.find('+') + 1:]
handlers = []
- url_obj = hg_url(url)
+ url_obj = mercurial.util.url(safe_bytes(url))
test_uri, authinfo = url_obj.authinfo()
- url_obj.passwd = '*****'
+ if not test_uri.endswith(b'info/refs'):
+ test_uri = test_uri.rstrip(b'/') + b'/info/refs'
+
+ url_obj.passwd = b'*****'
cleaned_uri = str(url_obj)
- if not test_uri.endswith('info/refs'):
- test_uri = test_uri.rstrip('/') + '/info/refs'
-
if authinfo:
# create a password manager
- passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
passmgr.add_password(*authinfo)
- handlers.extend((httpbasicauthhandler(passmgr),
- httpdigestauthhandler(passmgr)))
+ handlers.extend((mercurial.url.httpbasicauthhandler(passmgr),
+ mercurial.url.httpdigestauthhandler(passmgr)))
- o = urllib2.build_opener(*handlers)
+ o = urllib.request.build_opener(*handlers)
o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
- q = {"service": 'git-upload-pack'}
- qs = '?%s' % urllib.urlencode(q)
- cu = "%s%s" % (test_uri, qs)
- req = urllib2.Request(cu, None, {})
+ req = urllib.request.Request(
+ "%s?%s" % (
+ safe_str(test_uri),
+ urllib.parse.urlencode({"service": 'git-upload-pack'})
+ ))
try:
resp = o.open(req)
@@ -208,13 +200,13 @@ class GitRepository(BaseRepository):
raise Exception('Return Code is not 200')
except Exception as e:
# means it cannot be cloned
- raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
+ raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
# now detect if it's proper git repo
gitdata = resp.read()
- if 'service=git-upload-pack' not in gitdata:
- raise urllib2.URLError(
- "url [%s] does not look like an git" % (cleaned_uri))
+ if b'service=git-upload-pack' not in gitdata:
+ raise urllib.error.URLError(
+ "url [%s] does not look like an git" % cleaned_uri)
return True
@@ -253,7 +245,7 @@ class GitRepository(BaseRepository):
rev_filter = settings.GIT_REV_FILTER
cmd = ['rev-list', rev_filter, '--reverse', '--date-order']
try:
- so, se = self.run_git_command(cmd)
+ so = self.run_git_command(cmd)
except RepositoryError:
# Can be raised for empty repositories
return []
@@ -261,58 +253,56 @@ class GitRepository(BaseRepository):
def _get_all_revisions2(self):
# alternate implementation using dulwich
- includes = [x[1][0] for x in self._parsed_refs.iteritems()
- if x[1][1] != 'T']
+ includes = [ascii_str(sha) for key, (sha, type_) in self._parsed_refs.items()
+ if type_ != b'T']
return [c.commit.id for c in self._repo.get_walker(include=includes)]
def _get_revision(self, revision):
"""
- For git backend we always return integer here. This way we ensure
- that changeset's revision attribute would become integer.
+ Given any revision identifier, returns a 40 char string with revision hash.
"""
-
- is_null = lambda o: len(o) == revision.count('0')
-
if self._empty:
raise EmptyRepositoryError("There are no changesets yet")
if revision in (None, '', 'tip', 'HEAD', 'head', -1):
- return self.revisions[-1]
+ revision = -1
- is_bstr = isinstance(revision, (str, unicode))
- if ((is_bstr and revision.isdigit() and len(revision) < 12)
- or isinstance(revision, int) or is_null(revision)
- ):
+ if isinstance(revision, int):
try:
- revision = self.revisions[int(revision)]
+ return self.revisions[revision]
except IndexError:
- msg = ("Revision %s does not exist for %s" % (revision, self))
+ msg = "Revision %r does not exist for %s" % (revision, self.name)
raise ChangesetDoesNotExistError(msg)
- elif is_bstr:
- # get by branch/tag name
- _ref_revision = self._parsed_refs.get(revision)
- if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']:
- return _ref_revision[0]
+ if isinstance(revision, str):
+ if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')):
+ try:
+ return self.revisions[int(revision)]
+ except IndexError:
+ msg = "Revision %r does not exist for %s" % (revision, self)
+ raise ChangesetDoesNotExistError(msg)
- _tags_shas = self.tags.values()
+ # get by branch/tag name
+ _ref_revision = self._parsed_refs.get(safe_bytes(revision))
+ if _ref_revision: # and _ref_revision[1] in [b'H', b'RH', b'T']:
+ return ascii_str(_ref_revision[0])
+
+ if revision in self.revisions:
+ return revision
+
# maybe it's a tag ? we don't have them in self.revisions
- if revision in _tags_shas:
- return _tags_shas[_tags_shas.index(revision)]
+ if revision in self.tags.values():
+ return revision
- elif not SHA_PATTERN.match(revision) or revision not in self.revisions:
- msg = ("Revision %s does not exist for %s" % (revision, self))
+ if SHA_PATTERN.match(revision):
+ msg = "Revision %r does not exist for %s" % (revision, self.name)
raise ChangesetDoesNotExistError(msg)
- # Ensure we return full id
- if not SHA_PATTERN.match(str(revision)):
- raise ChangesetDoesNotExistError("Given revision %s not recognized"
- % revision)
- return revision
+ raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision)
def get_ref_revision(self, ref_type, ref_name):
"""
- Returns ``MercurialChangeset`` object representing repository's
+ Returns ``GitChangeset`` object representing repository's
changeset at the given ``revision``.
"""
return self._get_revision(ref_name)
@@ -327,20 +317,10 @@ class GitRepository(BaseRepository):
Returns normalized url. If schema is not given, would fall to
filesystem (``file:///``) schema.
"""
- url = safe_str(url)
if url != 'default' and '://' not in url:
url = ':///'.join(('file', url))
return url
- def get_hook_location(self):
- """
- returns absolute path to location where hooks are stored
- """
- loc = os.path.join(self.path, 'hooks')
- if not self.bare:
- loc = os.path.join(self.path, '.git', 'hooks')
- return loc
-
@LazyProperty
def name(self):
return os.path.basename(self.path)
@@ -367,23 +347,20 @@ class GitRepository(BaseRepository):
@LazyProperty
def description(self):
- undefined_description = u'unknown'
- _desc = self._repo.get_description()
- return safe_unicode(_desc or undefined_description)
+ return safe_str(self._repo.get_description() or b'unknown')
@LazyProperty
def contact(self):
- undefined_contact = u'Unknown'
+ undefined_contact = 'Unknown'
return undefined_contact
@property
def branches(self):
if not self.revisions:
return {}
- sortkey = lambda ctx: ctx[0]
- _branches = [(x[0], x[1][0])
- for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
- return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
+ _branches = [(safe_str(key), ascii_str(sha))
+ for key, (sha, type_) in self._parsed_refs.items() if type_ == b'H']
+ return OrderedDict(sorted(_branches, key=(lambda ctx: ctx[0]), reverse=False))
@LazyProperty
def closed_branches(self):
@@ -396,11 +373,9 @@ class GitRepository(BaseRepository):
def _get_tags(self):
if not self.revisions:
return {}
-
- sortkey = lambda ctx: ctx[0]
- _tags = [(x[0], x[1][0])
- for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
- return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
+ _tags = [(safe_str(key), ascii_str(sha))
+ for key, (sha, type_) in self._parsed_refs.items() if type_ == b'T']
+ return OrderedDict(sorted(_tags, key=(lambda ctx: ctx[0]), reverse=True))
def tag(self, name, user, revision=None, message=None, date=None,
**kwargs):
@@ -420,7 +395,7 @@ class GitRepository(BaseRepository):
changeset = self.get_changeset(revision)
message = message or "Added tag %s for commit %s" % (name,
changeset.raw_id)
- self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
+ self._repo.refs[b"refs/tags/%s" % safe_bytes(name)] = changeset._commit.id
self._parsed_refs = self._get_parsed_refs()
self.tags = self._get_tags()
@@ -439,7 +414,8 @@ class GitRepository(BaseRepository):
"""
if name not in self.tags:
raise TagDoesNotExistError("Tag %s does not exist" % name)
- tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
+ # self._repo.refs is a DiskRefsContainer, and .path gives the full absolute path of '.git'
+ tagpath = os.path.join(safe_str(self._repo.refs.path), 'refs', 'tags', name)
try:
os.remove(tagpath)
self._parsed_refs = self._get_parsed_refs()
@@ -459,18 +435,20 @@ class GitRepository(BaseRepository):
return self._get_parsed_refs()
def _get_parsed_refs(self):
- # cache the property
+ """Return refs as a dict, like:
+ { b'v0.2.0': [b'599ba911aa24d2981225f3966eb659dfae9e9f30', b'T'] }
+ """
_repo = self._repo
refs = _repo.get_refs()
- keys = [('refs/heads/', 'H'),
- ('refs/remotes/origin/', 'RH'),
- ('refs/tags/', 'T')]
+ keys = [(b'refs/heads/', b'H'),
+ (b'refs/remotes/origin/', b'RH'),
+ (b'refs/tags/', b'T')]
_refs = {}
- for ref, sha in refs.iteritems():
+ for ref, sha in refs.items():
for k, type_ in keys:
if ref.startswith(k):
_key = ref[len(k):]
- if type_ == 'T':
+ if type_ == b'T':
obj = _repo.get_object(sha)
if isinstance(obj, Tag):
sha = _repo.get_object(sha).object[1]
@@ -483,13 +461,13 @@ class GitRepository(BaseRepository):
heads = {}
for key, val in refs.items():
- for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
+ for ref_key in [b'refs/heads/', b'refs/remotes/origin/']:
if key.startswith(ref_key):
n = key[len(ref_key):]
- if n not in ['HEAD']:
+ if n not in [b'HEAD']:
heads[n] = val
- return heads if reverse else dict((y, x) for x, y in heads.iteritems())
+ return heads if reverse else dict((y, x) for x, y in heads.items())
def get_changeset(self, revision=None):
"""
@@ -498,9 +476,7 @@ class GitRepository(BaseRepository):
"""
if isinstance(revision, GitChangeset):
return revision
- revision = self._get_revision(revision)
- changeset = GitChangeset(repository=self, revision=revision)
- return changeset
+ return GitChangeset(repository=self, revision=self._get_revision(revision))
def get_changesets(self, start=None, end=None, start_date=None,
end_date=None, branch_name=None, reverse=False, max_revisions=None):
@@ -547,7 +523,7 @@ class GitRepository(BaseRepository):
else:
cmd.append(settings.GIT_REV_FILTER)
- revs = self.run_git_command(cmd)[0].splitlines()
+ revs = self.run_git_command(cmd).splitlines()
start_pos = 0
end_pos = len(revs)
if start:
@@ -572,14 +548,15 @@ class GitRepository(BaseRepository):
revs = revs[start_pos:end_pos]
if reverse:
- revs = reversed(revs)
+ revs.reverse()
+
return CollectionGenerator(self, revs)
def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
context=3):
"""
- Returns (git like) *diff*, as plain text. Shows changes introduced by
- ``rev2`` since ``rev1``.
+ Returns (git like) *diff*, as plain bytes text. Shows changes
+ introduced by ``rev2`` since ``rev1``.
:param rev1: Entry point from which diff is shown. Can be
``self.EMPTY_CHANGESET`` - in this case, patch showing all
@@ -633,14 +610,13 @@ class GitRepository(BaseRepository):
if path:
cmd += ['--', path]
- stdout, stderr = self.run_git_command(cmd)
- # TODO: don't ignore stderr
+ stdout, stderr = self._run_git_command(cmd, cwd=self.path)
# If we used 'show' command, strip first few lines (until actual diff
# starts)
if rev1 == self.EMPTY_CHANGESET:
- parts = stdout.split('\ndiff ', 1)
+ parts = stdout.split(b'\ndiff ', 1)
if len(parts) > 1:
- stdout = 'diff ' + parts[1]
+ stdout = b'diff ' + parts[1]
return stdout
@LazyProperty
@@ -683,7 +659,7 @@ class GitRepository(BaseRepository):
Tries to pull changes from external location.
"""
url = self._get_url(url)
- so, se = self.run_git_command(['ls-remote', '-h', url])
+ so = self.run_git_command(['ls-remote', '-h', url])
cmd = ['fetch', url, '--']
for line in (x for x in so.splitlines()):
sha, ref = line.split('\t')
@@ -721,7 +697,7 @@ class GitRepository(BaseRepository):
"""
if config_file is None:
config_file = []
- elif isinstance(config_file, basestring):
+ elif isinstance(config_file, str):
config_file = [config_file]
def gen_configs():
@@ -733,9 +709,10 @@ class GitRepository(BaseRepository):
for config in gen_configs():
try:
- return config.get(section, name)
+ value = config.get(section, name)
except KeyError:
continue
+ return None if value is None else safe_str(value)
return None
def get_user_name(self, config_file=None):
diff --git a/kallithea/lib/vcs/backends/git/ssh.py b/kallithea/lib/vcs/backends/git/ssh.py
--- a/kallithea/lib/vcs/backends/git/ssh.py
+++ b/kallithea/lib/vcs/backends/git/ssh.py
@@ -17,7 +17,6 @@ import os
from kallithea.lib.hooks import log_pull_action
from kallithea.lib.utils import make_ui
-from kallithea.lib.utils2 import safe_str, safe_unicode
from kallithea.lib.vcs.backends.ssh import BaseSshHandler
@@ -33,15 +32,15 @@ class GitSshHandler(BaseSshHandler):
>>> import shlex
>>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).repo_name
- u'foo bar'
+ 'foo bar'
>>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).verb
'git-upload-pack'
>>> GitSshHandler.make(shlex.split(" git-upload-pack /blåbærgrød ")).repo_name # might not be necessary to support no quoting ... but we can
- u'bl\xe5b\xe6rgr\xf8d'
+ 'bl\xe5b\xe6rgr\xf8d'
>>> GitSshHandler.make(shlex.split('''git-upload-pack "/foo'bar"''')).repo_name
- u"foo'bar"
+ "foo'bar"
>>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).repo_name
- u'foo'
+ 'foo'
>>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).verb
'git-receive-pack'
@@ -56,7 +55,7 @@ class GitSshHandler(BaseSshHandler):
ssh_command_parts[0] in ['git-upload-pack', 'git-receive-pack'] and
ssh_command_parts[1].startswith('/')
):
- return cls(safe_unicode(ssh_command_parts[1][1:]), ssh_command_parts[0])
+ return cls(ssh_command_parts[1][1:], ssh_command_parts[0])
return None
@@ -70,7 +69,7 @@ class GitSshHandler(BaseSshHandler):
log_pull_action(ui=make_ui(), repo=self.db_repo.scm_instance._repo)
else: # probably verb 'git-receive-pack', action 'push'
if not self.allow_push:
- self.exit('Push access to %r denied' % safe_str(self.repo_name))
+ self.exit('Push access to %r denied' % self.repo_name)
# Note: push logging is handled by Git post-receive hook
# git shell is not a real shell but use shell inspired quoting *inside* the argument.
diff --git a/kallithea/lib/vcs/backends/git/workdir.py b/kallithea/lib/vcs/backends/git/workdir.py
--- a/kallithea/lib/vcs/backends/git/workdir.py
+++ b/kallithea/lib/vcs/backends/git/workdir.py
@@ -1,5 +1,6 @@
import re
+from kallithea.lib.utils2 import ascii_str, safe_str
from kallithea.lib.vcs.backends.base import BaseWorkdir
from kallithea.lib.vcs.exceptions import BranchDoesNotExistError, RepositoryError
@@ -7,9 +8,9 @@ from kallithea.lib.vcs.exceptions import
class GitWorkdir(BaseWorkdir):
def get_branch(self):
- headpath = self.repository._repo.refs.refpath('HEAD')
+ headpath = self.repository._repo.refs.refpath(b'HEAD')
try:
- content = open(headpath).read()
+ content = safe_str(open(headpath, 'rb').read())
match = re.match(r'^ref: refs/heads/(?P.+)\n$', content)
if match:
return match.groupdict()['branch']
@@ -20,7 +21,7 @@ class GitWorkdir(BaseWorkdir):
raise RepositoryError("Couldn't compute workdir's branch")
def get_changeset(self):
- wk_dir_id = self.repository._repo.refs.as_dict().get('HEAD')
+ wk_dir_id = ascii_str(self.repository._repo.refs.as_dict().get(b'HEAD'))
return self.repository.get_changeset(wk_dir_id)
def checkout_branch(self, branch=None):
diff --git a/kallithea/lib/vcs/backends/hg/__init__.py b/kallithea/lib/vcs/backends/hg/__init__.py
--- a/kallithea/lib/vcs/backends/hg/__init__.py
+++ b/kallithea/lib/vcs/backends/hg/__init__.py
@@ -9,6 +9,8 @@
:copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
"""
+from kallithea.lib.vcs.utils import hgcompat
+
from .changeset import MercurialChangeset
from .inmemory import MercurialInMemoryChangeset
from .repository import MercurialRepository
@@ -19,3 +21,5 @@ __all__ = [
'MercurialRepository', 'MercurialChangeset',
'MercurialInMemoryChangeset', 'MercurialWorkdir',
]
+
+hgcompat.monkey_do()
diff --git a/kallithea/lib/vcs/backends/hg/changeset.py b/kallithea/lib/vcs/backends/hg/changeset.py
--- a/kallithea/lib/vcs/backends/hg/changeset.py
+++ b/kallithea/lib/vcs/backends/hg/changeset.py
@@ -1,41 +1,44 @@
import os
import posixpath
+import mercurial.archival
+import mercurial.node
+import mercurial.obsutil
+
from kallithea.lib.vcs.backends.base import BaseChangeset
from kallithea.lib.vcs.conf import settings
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, VCSError
-from kallithea.lib.vcs.nodes import (
- AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode)
-from kallithea.lib.vcs.utils import date_fromtimestamp, safe_str, safe_unicode
-from kallithea.lib.vcs.utils.hgcompat import archival, hex, obsutil
+from kallithea.lib.vcs.nodes import (AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode,
+ SubModuleNode)
+from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_bytes, safe_str
from kallithea.lib.vcs.utils.lazy import LazyProperty
from kallithea.lib.vcs.utils.paths import get_dirs_for_path
class MercurialChangeset(BaseChangeset):
"""
- Represents state of the repository at the single revision.
+ Represents state of the repository at a revision.
"""
def __init__(self, repository, revision):
self.repository = repository
- assert isinstance(revision, basestring), repr(revision)
- self.raw_id = revision
- self._ctx = repository._repo[revision]
+ assert isinstance(revision, str), repr(revision)
+ self._ctx = repository._repo[ascii_bytes(revision)]
+ self.raw_id = ascii_str(self._ctx.hex())
self.revision = self._ctx._rev
self.nodes = {}
@LazyProperty
def tags(self):
- return map(safe_unicode, self._ctx.tags())
+ return [safe_str(tag) for tag in self._ctx.tags()]
@LazyProperty
def branch(self):
- return safe_unicode(self._ctx.branch())
+ return safe_str(self._ctx.branch())
@LazyProperty
def branches(self):
- return [safe_unicode(self._ctx.branch())]
+ return [safe_str(self._ctx.branch())]
@LazyProperty
def closesbranch(self):
@@ -47,17 +50,11 @@ class MercurialChangeset(BaseChangeset):
@LazyProperty
def bumped(self):
- try:
- return self._ctx.phasedivergent()
- except AttributeError: # renamed in Mercurial 4.6 (9fa874fb34e1)
- return self._ctx.bumped()
+ return self._ctx.phasedivergent()
@LazyProperty
def divergent(self):
- try:
- return self._ctx.contentdivergent()
- except AttributeError: # renamed in Mercurial 4.6 (8b2d7684407b)
- return self._ctx.divergent()
+ return self._ctx.contentdivergent()
@LazyProperty
def extinct(self):
@@ -65,10 +62,7 @@ class MercurialChangeset(BaseChangeset):
@LazyProperty
def unstable(self):
- try:
- return self._ctx.orphan()
- except AttributeError: # renamed in Mercurial 4.6 (03039ff3082b)
- return self._ctx.unstable()
+ return self._ctx.orphan()
@LazyProperty
def phase(self):
@@ -81,33 +75,30 @@ class MercurialChangeset(BaseChangeset):
@LazyProperty
def successors(self):
- successors = obsutil.successorssets(self._ctx._repo, self._ctx.node(), closest=True)
- if successors:
- # flatten the list here handles both divergent (len > 1)
- # and the usual case (len = 1)
- successors = [hex(n)[:12] for sub in successors for n in sub if n != self._ctx.node()]
-
- return successors
+ successors = mercurial.obsutil.successorssets(self._ctx._repo, self._ctx.node(), closest=True)
+ # flatten the list here handles both divergent (len > 1)
+ # and the usual case (len = 1)
+ return [safe_str(mercurial.node.hex(n)[:12]) for sub in successors for n in sub if n != self._ctx.node()]
@LazyProperty
def predecessors(self):
- return [hex(n)[:12] for n in obsutil.closestpredecessors(self._ctx._repo, self._ctx.node())]
+ return [safe_str(mercurial.node.hex(n)[:12]) for n in mercurial.obsutil.closestpredecessors(self._ctx._repo, self._ctx.node())]
@LazyProperty
def bookmarks(self):
- return map(safe_unicode, self._ctx.bookmarks())
+ return [safe_str(bookmark) for bookmark in self._ctx.bookmarks()]
@LazyProperty
def message(self):
- return safe_unicode(self._ctx.description())
+ return safe_str(self._ctx.description())
@LazyProperty
def committer(self):
- return safe_unicode(self.author)
+ return safe_str(self.author)
@LazyProperty
def author(self):
- return safe_unicode(self._ctx.user())
+ return safe_str(self._ctx.user())
@LazyProperty
def date(self):
@@ -127,7 +118,7 @@ class MercurialChangeset(BaseChangeset):
@LazyProperty
def _file_paths(self):
- return list(self._ctx)
+ return list(safe_str(f) for f in self._ctx)
@LazyProperty
def _dir_paths(self):
@@ -140,12 +131,6 @@ class MercurialChangeset(BaseChangeset):
return self._dir_paths + self._file_paths
@LazyProperty
- def id(self):
- if self.last:
- return u'tip'
- return self.short_id
-
- @LazyProperty
def short_id(self):
return self.raw_id[:12]
@@ -202,22 +187,11 @@ class MercurialChangeset(BaseChangeset):
return cs
def diff(self):
- # Only used for feed diffstat
- return ''.join(self._ctx.diff())
-
- def _fix_path(self, path):
- """
- Paths are stored without trailing slash so we need to get rid off it if
- needed. Also mercurial keeps filenodes as str so we need to decode
- from unicode to str
- """
- if path.endswith('/'):
- path = path.rstrip('/')
-
- return safe_str(path)
+ # Only used to feed diffstat
+ return b''.join(self._ctx.diff())
def _get_kind(self, path):
- path = self._fix_path(path)
+ path = path.rstrip('/')
if path in self._file_paths:
return NodeKind.FILE
elif path in self._dir_paths:
@@ -227,11 +201,11 @@ class MercurialChangeset(BaseChangeset):
% (path))
def _get_filectx(self, path):
- path = self._fix_path(path)
+ path = path.rstrip('/')
if self._get_kind(path) != NodeKind.FILE:
raise ChangesetError("File does not exist for revision %s at "
" '%s'" % (self.raw_id, path))
- return self._ctx.filectx(path)
+ return self._ctx.filectx(safe_bytes(path))
def _extract_submodules(self):
"""
@@ -245,10 +219,10 @@ class MercurialChangeset(BaseChangeset):
Returns stat mode of the file at the given ``path``.
"""
fctx = self._get_filectx(path)
- if 'x' in fctx.flags():
- return 0100755
+ if b'x' in fctx.flags():
+ return 0o100755
else:
- return 0100644
+ return 0o100644
def get_file_content(self, path):
"""
@@ -280,7 +254,7 @@ class MercurialChangeset(BaseChangeset):
cnt = 0
for cs in reversed([x for x in fctx.filelog()]):
cnt += 1
- hist.append(hex(fctx.filectx(cs).node()))
+ hist.append(mercurial.node.hex(fctx.filectx(cs).node()))
if limit is not None and cnt == limit:
break
@@ -292,13 +266,10 @@ class MercurialChangeset(BaseChangeset):
lineno, sha, changeset lazy loader and line
"""
annotations = self._get_filectx(path).annotate()
- try:
- annotation_lines = [(annotateline.fctx, annotateline.text) for annotateline in annotations]
- except AttributeError: # annotateline was introduced in Mercurial 4.6 (b33b91ca2ec2)
- annotation_lines = [(aline.fctx, l) for aline, l in annotations]
- for i, (fctx, l) in enumerate(annotation_lines):
- sha = fctx.hex()
- yield (i + 1, sha, lambda sha=sha, l=l: self.repository.get_changeset(sha), l)
+ annotation_lines = [(annotateline.fctx, annotateline.text) for annotateline in annotations]
+ for i, (fctx, line) in enumerate(annotation_lines):
+ sha = ascii_str(fctx.hex())
+ yield (i + 1, sha, lambda sha=sha: self.repository.get_changeset(sha), line)
def fill_archive(self, stream=None, kind='tgz', prefix=None,
subrepos=False):
@@ -316,11 +287,10 @@ class MercurialChangeset(BaseChangeset):
:raise ImproperArchiveTypeError: If given kind is wrong.
:raise VcsError: If given stream is None
"""
-
- allowed_kinds = settings.ARCHIVE_SPECS.keys()
+ allowed_kinds = settings.ARCHIVE_SPECS
if kind not in allowed_kinds:
raise ImproperArchiveTypeError('Archive kind not supported use one'
- 'of %s' % allowed_kinds)
+ 'of %s' % ' '.join(allowed_kinds))
if stream is None:
raise VCSError('You need to pass in a valid stream for filling'
@@ -333,8 +303,8 @@ class MercurialChangeset(BaseChangeset):
elif prefix.strip() == '':
raise VCSError("Prefix cannot be empty")
- archival.archive(self.repository._repo, stream, self.raw_id,
- kind, prefix=prefix, subrepos=subrepos)
+ mercurial.archival.archive(self.repository._repo, stream, ascii_bytes(self.raw_id),
+ safe_bytes(kind), prefix=safe_bytes(prefix), subrepos=subrepos)
def get_nodes(self, path):
"""
@@ -346,8 +316,7 @@ class MercurialChangeset(BaseChangeset):
if self._get_kind(path) != NodeKind.DIR:
raise ChangesetError("Directory does not exist for revision %s at "
" '%s'" % (self.revision, path))
- path = self._fix_path(path)
-
+ path = path.rstrip('/')
filenodes = [FileNode(f, changeset=self) for f in self._file_paths
if os.path.dirname(f) == path]
dirs = path == '' and '' or [d for d in self._dir_paths
@@ -356,18 +325,16 @@ class MercurialChangeset(BaseChangeset):
if os.path.dirname(d) == path]
als = self.repository.alias
- for k, vals in self._extract_submodules().iteritems():
+ for k, vals in self._extract_submodules().items():
#vals = url,rev,type
loc = vals[0]
cs = vals[1]
dirnodes.append(SubModuleNode(k, url=loc, changeset=cs,
alias=als))
nodes = dirnodes + filenodes
- # cache nodes
for node in nodes:
self.nodes[node.path] = node
nodes.sort()
-
return nodes
def get_node(self, path):
@@ -375,9 +342,7 @@ class MercurialChangeset(BaseChangeset):
Returns ``Node`` object from the given ``path``. If there is no node at
the given ``path``, ``ChangesetError`` would be raised.
"""
-
- path = self._fix_path(path)
-
+ path = path.rstrip('/')
if path not in self.nodes:
if path in self._file_paths:
node = FileNode(path, changeset=self)
@@ -406,21 +371,21 @@ class MercurialChangeset(BaseChangeset):
"""
Returns list of added ``FileNode`` objects.
"""
- return AddedFileNodesGenerator([n for n in self.status[1]], self)
+ return AddedFileNodesGenerator([safe_str(n) for n in self.status.added], self)
@property
def changed(self):
"""
Returns list of modified ``FileNode`` objects.
"""
- return ChangedFileNodesGenerator([n for n in self.status[0]], self)
+ return ChangedFileNodesGenerator([safe_str(n) for n in self.status.modified], self)
@property
def removed(self):
"""
Returns list of removed ``FileNode`` objects.
"""
- return RemovedFileNodesGenerator([n for n in self.status[2]], self)
+ return RemovedFileNodesGenerator([safe_str(n) for n in self.status.removed], self)
@LazyProperty
def extra(self):
diff --git a/kallithea/lib/vcs/backends/hg/inmemory.py b/kallithea/lib/vcs/backends/hg/inmemory.py
--- a/kallithea/lib/vcs/backends/hg/inmemory.py
+++ b/kallithea/lib/vcs/backends/hg/inmemory.py
@@ -1,14 +1,17 @@
import datetime
+import mercurial.context
+import mercurial.node
+
from kallithea.lib.vcs.backends.base import BaseInMemoryChangeset
from kallithea.lib.vcs.exceptions import RepositoryError
-from kallithea.lib.vcs.utils.hgcompat import hex, memctx, memfilectx, tolocal
+from kallithea.lib.vcs.utils import ascii_str, safe_bytes, safe_str
class MercurialInMemoryChangeset(BaseInMemoryChangeset):
def commit(self, message, author, parents=None, branch=None, date=None,
- **kwargs):
+ **kwargs):
"""
Performs in-memory commit (doesn't check workdir in any way) and
returns newly created ``Changeset``. Updates repository's
@@ -27,21 +30,22 @@ class MercurialInMemoryChangeset(BaseInM
"""
self.check_integrity(parents)
+ if not isinstance(message, str):
+ raise RepositoryError('message must be a str - got %r' % type(message))
+ if not isinstance(author, str):
+ raise RepositoryError('author must be a str - got %r' % type(author))
+
from .repository import MercurialRepository
- if not isinstance(message, unicode) or not isinstance(author, unicode):
- raise RepositoryError('Given message and author needs to be '
- 'an instance got %r & %r instead'
- % (type(message), type(author)))
-
if branch is None:
branch = MercurialRepository.DEFAULT_BRANCH_NAME
- kwargs['branch'] = branch
+ kwargs[b'branch'] = safe_bytes(branch)
- def filectxfn(_repo, memctx, path):
+ def filectxfn(_repo, memctx, bytes_path):
"""
- Marks given path as added/changed/removed in a given _repo. This is
- for internal mercurial commit function.
+ Callback from Mercurial, returning ctx to commit for the given
+ path.
"""
+ path = safe_str(bytes_path)
# check if this path is removed
if path in (node.path for node in self.removed):
@@ -50,9 +54,8 @@ class MercurialInMemoryChangeset(BaseInM
# check if this path is added
for node in self.added:
if node.path == path:
- return memfilectx(_repo, memctx, path=node.path,
- data=(node.content.encode('utf-8')
- if not node.is_binary else node.content),
+ return mercurial.context.memfilectx(_repo, memctx, path=bytes_path,
+ data=node.content,
islink=False,
isexec=node.is_executable,
copysource=False)
@@ -60,14 +63,13 @@ class MercurialInMemoryChangeset(BaseInM
# or changed
for node in self.changed:
if node.path == path:
- return memfilectx(_repo, memctx, path=node.path,
- data=(node.content.encode('utf-8')
- if not node.is_binary else node.content),
+ return mercurial.context.memfilectx(_repo, memctx, path=bytes_path,
+ data=node.content,
islink=False,
isexec=node.is_executable,
copysource=False)
- raise RepositoryError("Given path haven't been marked as added,"
+ raise RepositoryError("Given path haven't been marked as added, "
"changed or removed (%s)" % path)
parents = [None, None]
@@ -76,22 +78,21 @@ class MercurialInMemoryChangeset(BaseInM
parents[i] = parent._ctx.node()
if date and isinstance(date, datetime.datetime):
- date = date.strftime('%a, %d %b %Y %H:%M:%S')
+ date = safe_bytes(date.strftime('%a, %d %b %Y %H:%M:%S'))
- commit_ctx = memctx(repo=self.repository._repo,
+ commit_ctx = mercurial.context.memctx(
+ repo=self.repository._repo,
parents=parents,
- text='',
- files=self.get_paths(),
+ text=b'',
+ files=[safe_bytes(x) for x in self.get_paths()],
filectxfn=filectxfn,
- user=author,
+ user=safe_bytes(author),
date=date,
extra=kwargs)
- loc = lambda u: tolocal(u.encode('utf-8'))
-
# injecting given _repo params
- commit_ctx._text = loc(message)
- commit_ctx._user = loc(author)
+ commit_ctx._text = safe_bytes(message)
+ commit_ctx._user = safe_bytes(author)
commit_ctx._date = date
# TODO: Catch exceptions!
@@ -100,9 +101,8 @@ class MercurialInMemoryChangeset(BaseInM
self._commit_ctx = commit_ctx # For reference
# Update vcs repository object & recreate mercurial _repo
# new_ctx = self.repository._repo[node]
- # new_tip = self.repository.get_changeset(new_ctx.hex())
- new_id = hex(n)
- self.repository.revisions.append(new_id)
+ # new_tip = ascii_str(self.repository.get_changeset(new_ctx.hex()))
+ self.repository.revisions.append(ascii_str(mercurial.node.hex(n)))
self._repo = self.repository._get_repo(create=False)
self.repository.branches = self.repository._get_branches()
tip = self.repository.get_changeset()
diff --git a/kallithea/lib/vcs/backends/hg/repository.py b/kallithea/lib/vcs/backends/hg/repository.py
--- a/kallithea/lib/vcs/backends/hg/repository.py
+++ b/kallithea/lib/vcs/backends/hg/repository.py
@@ -13,16 +13,33 @@ import datetime
import logging
import os
import time
-import urllib
-import urllib2
+import urllib.error
+import urllib.parse
+import urllib.request
from collections import OrderedDict
+import mercurial.commands
+import mercurial.error
+import mercurial.exchange
+import mercurial.hg
+import mercurial.hgweb
+import mercurial.httppeer
+import mercurial.localrepo
+import mercurial.match
+import mercurial.mdiff
+import mercurial.node
+import mercurial.patch
+import mercurial.scmutil
+import mercurial.sshpeer
+import mercurial.tags
+import mercurial.ui
+import mercurial.url
+import mercurial.util
+
from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
-from kallithea.lib.vcs.exceptions import (
- BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
-from kallithea.lib.vcs.utils import author_email, author_name, date_fromtimestamp, makedate, safe_str, safe_unicode
-from kallithea.lib.vcs.utils.hgcompat import (
- Abort, RepoError, RepoLookupError, clone, diffopts, get_contact, hex, hg_url, httpbasicauthhandler, httpdigestauthhandler, httppeer, localrepo, match_exact, nullid, patch, peer, scmutil, sshpeer, tag, ui)
+from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
+ TagDoesNotExistError, VCSError)
+from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str
from kallithea.lib.vcs.utils.lazy import LazyProperty
from kallithea.lib.vcs.utils.paths import abspath
@@ -60,9 +77,8 @@ class MercurialRepository(BaseRepository
raise VCSError('Mercurial backend requires repository path to '
'be instance of got %s instead' %
type(repo_path))
-
self.path = abspath(repo_path)
- self.baseui = baseui or ui.ui()
+ self.baseui = baseui or mercurial.ui.ui()
# We've set path and ui, now we can set _repo itself
self._repo = self._get_repo(create, src_url, update_after_clone)
@@ -115,14 +131,13 @@ class MercurialRepository(BaseRepository
return {}
bt = OrderedDict()
- for bn, _heads, tip, isclosed in sorted(self._repo.branchmap().iterbranches()):
+ for bn, _heads, node, isclosed in sorted(self._repo.branchmap().iterbranches()):
if isclosed:
if closed:
- bt[safe_unicode(bn)] = hex(tip)
+ bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
else:
if normal:
- bt[safe_unicode(bn)] = hex(tip)
-
+ bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
return bt
@LazyProperty
@@ -136,11 +151,11 @@ class MercurialRepository(BaseRepository
if self._empty:
return {}
- sortkey = lambda ctx: ctx[0] # sort by name
- _tags = [(safe_unicode(n), hex(h),) for n, h in
- self._repo.tags().items()]
-
- return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
+ return OrderedDict(sorted(
+ ((safe_str(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo.tags().items()),
+ reverse=True,
+ key=lambda x: x[0], # sort by name
+ ))
def tag(self, name, user, revision=None, message=None, date=None,
**kwargs):
@@ -165,12 +180,12 @@ class MercurialRepository(BaseRepository
changeset.short_id)
if date is None:
- date = datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S')
+ date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
try:
- tag(self._repo, name, changeset._ctx.node(), message, local, user, date)
- except Abort as e:
- raise RepositoryError(e.message)
+ mercurial.tags.tag(self._repo, safe_bytes(name), changeset._ctx.node(), safe_bytes(message), local, safe_bytes(user), date)
+ except mercurial.error.Abort as e:
+ raise RepositoryError(e.args[0])
# Reinitialize tags
self.tags = self._get_tags()
@@ -194,14 +209,14 @@ class MercurialRepository(BaseRepository
if message is None:
message = "Removed tag %s" % name
if date is None:
- date = datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S')
+ date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
local = False
try:
- tag(self._repo, name, nullid, message, local, user, date)
+ mercurial.tags.tag(self._repo, safe_bytes(name), mercurial.commands.nullid, safe_bytes(message), local, safe_bytes(user), date)
self.tags = self._get_tags()
- except Abort as e:
- raise RepositoryError(e.message)
+ except mercurial.error.Abort as e:
+ raise RepositoryError(e.args[0])
@LazyProperty
def bookmarks(self):
@@ -214,14 +229,14 @@ class MercurialRepository(BaseRepository
if self._empty:
return {}
- sortkey = lambda ctx: ctx[0] # sort by name
- _bookmarks = [(safe_unicode(n), hex(h),) for n, h in
- self._repo._bookmarks.items()]
- return OrderedDict(sorted(_bookmarks, key=sortkey, reverse=True))
+ return OrderedDict(sorted(
+ ((safe_str(n), ascii_str(h)) for n, h in self._repo._bookmarks.items()),
+ reverse=True,
+ key=lambda x: x[0], # sort by name
+ ))
def _get_all_revisions(self):
-
- return [self._repo[x].hex() for x in self._repo.filtered('visible').changelog.revs()]
+ return [ascii_str(self._repo[x].hex()) for x in self._repo.filtered(b'visible').changelog.revs()]
def get_diff(self, rev1, rev2, path='', ignore_whitespace=False,
context=3):
@@ -257,12 +272,12 @@ class MercurialRepository(BaseRepository
self.get_changeset(rev1)
self.get_changeset(rev2)
if path:
- file_filter = match_exact(path)
+ file_filter = mercurial.match.exact(path)
else:
file_filter = None
- return ''.join(patch.diff(self._repo, rev1, rev2, match=file_filter,
- opts=diffopts(git=True,
+ return b''.join(mercurial.patch.diff(self._repo, rev1, rev2, match=file_filter,
+ opts=mercurial.mdiff.diffopts(git=True,
showfunc=True,
ignorews=ignore_whitespace,
context=context)))
@@ -279,42 +294,46 @@ class MercurialRepository(BaseRepository
when the return code is non 200
"""
# check first if it's not an local url
- if os.path.isdir(url) or url.startswith('file:'):
+ url = safe_bytes(url)
+ if os.path.isdir(url) or url.startswith(b'file:'):
return True
- if url.startswith('ssh:'):
+ if url.startswith(b'ssh:'):
# in case of invalid uri or authentication issues, sshpeer will
# throw an exception.
- sshpeer.instance(repoui or ui.ui(), url, False).lookup('tip')
+ mercurial.sshpeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
return True
url_prefix = None
- if '+' in url[:url.find('://')]:
- url_prefix, url = url.split('+', 1)
+ if b'+' in url[:url.find(b'://')]:
+ url_prefix, url = url.split(b'+', 1)
handlers = []
- url_obj = hg_url(url)
+ url_obj = mercurial.util.url(url)
test_uri, authinfo = url_obj.authinfo()
- url_obj.passwd = '*****'
+ url_obj.passwd = b'*****'
cleaned_uri = str(url_obj)
if authinfo:
# create a password manager
- passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
passmgr.add_password(*authinfo)
- handlers.extend((httpbasicauthhandler(passmgr),
- httpdigestauthhandler(passmgr)))
+ handlers.extend((mercurial.url.httpbasicauthhandler(passmgr),
+ mercurial.url.httpdigestauthhandler(passmgr)))
- o = urllib2.build_opener(*handlers)
+ o = urllib.request.build_opener(*handlers)
o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
('Accept', 'application/mercurial-0.1')]
- q = {"cmd": 'between'}
- q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
- qs = '?%s' % urllib.urlencode(q)
- cu = "%s%s" % (test_uri, qs)
- req = urllib2.Request(cu, None, {})
+ req = urllib.request.Request(
+ "%s?%s" % (
+ test_uri,
+ urllib.parse.urlencode({
+ 'cmd': 'between',
+ 'pairs': "%s-%s" % ('0' * 40, '0' * 40),
+ })
+ ))
try:
resp = o.open(req)
@@ -322,14 +341,14 @@ class MercurialRepository(BaseRepository
raise Exception('Return Code is not 200')
except Exception as e:
# means it cannot be cloned
- raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
+ raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
if not url_prefix: # skip svn+http://... (and git+... too)
# now check if it's a proper hg repo
try:
- httppeer.instance(repoui or ui.ui(), url, False).lookup('tip')
+ mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
except Exception as e:
- raise urllib2.URLError(
+ raise urllib.error.URLError(
"url [%s] does not look like an hg repo org_exc: %s"
% (cleaned_uri, e))
@@ -345,26 +364,25 @@ class MercurialRepository(BaseRepository
location at given clone_point. Additionally it'll make update to
working copy accordingly to ``update_after_clone`` flag
"""
-
try:
if src_url:
- url = safe_str(self._get_url(src_url))
+ url = safe_bytes(self._get_url(src_url))
opts = {}
if not update_after_clone:
opts.update({'noupdate': True})
MercurialRepository._check_url(url, self.baseui)
- clone(self.baseui, url, self.path, **opts)
+ mercurial.commands.clone(self.baseui, url, safe_bytes(self.path), **opts)
# Don't try to create if we've already cloned repo
create = False
- return localrepo.instance(self.baseui, self.path, create=create)
- except (Abort, RepoError) as err:
+ return mercurial.localrepo.instance(self.baseui, safe_bytes(self.path), create=create)
+ except (mercurial.error.Abort, mercurial.error.RepoError) as err:
if create:
msg = "Cannot create repository at %s. Original error was %s" \
- % (self.path, err)
+ % (self.name, err)
else:
msg = "Not valid repository at %s. Original error was %s" \
- % (self.path, err)
+ % (self.name, err)
raise RepositoryError(msg)
@LazyProperty
@@ -373,15 +391,13 @@ class MercurialRepository(BaseRepository
@LazyProperty
def description(self):
- undefined_description = u'unknown'
- _desc = self._repo.ui.config('web', 'description', None, untrusted=True)
- return safe_unicode(_desc or undefined_description)
+ _desc = self._repo.ui.config(b'web', b'description', None, untrusted=True)
+ return safe_str(_desc or b'unknown')
@LazyProperty
def contact(self):
- undefined_contact = u'Unknown'
- return safe_unicode(get_contact(self._repo.ui.config)
- or undefined_contact)
+ return safe_str(mercurial.hgweb.common.get_contact(self._repo.ui.config)
+ or b'Unknown')
@LazyProperty
def last_change(self):
@@ -404,39 +420,33 @@ class MercurialRepository(BaseRepository
def _get_revision(self, revision):
"""
- Gets an ID revision given as str. This will always return a full
- 40 char revision number
+ Given any revision identifier, returns a 40 char string with revision hash.
:param revision: str or int or None
"""
- if isinstance(revision, unicode):
- revision = safe_str(revision)
-
if self._empty:
raise EmptyRepositoryError("There are no changesets yet")
if revision in [-1, None]:
- revision = 'tip'
+ revision = b'tip'
+ elif isinstance(revision, str):
+ revision = safe_bytes(revision)
try:
if isinstance(revision, int):
- return self._repo[revision].hex()
- try:
- return scmutil.revsymbol(self._repo, revision).hex()
- except AttributeError: # revsymbol was introduced in Mercurial 4.6
- return self._repo[revision].hex()
- except (IndexError, ValueError, RepoLookupError, TypeError):
- msg = ("Revision %s does not exist for %s" % (revision, self))
+ return ascii_str(self._repo[revision].hex())
+ return ascii_str(mercurial.scmutil.revsymbol(self._repo, revision).hex())
+ except (IndexError, ValueError, mercurial.error.RepoLookupError, TypeError):
+ msg = "Revision %r does not exist for %s" % (safe_str(revision), self.name)
raise ChangesetDoesNotExistError(msg)
except (LookupError, ):
- msg = ("Ambiguous identifier `%s` for %s" % (revision, self))
+ msg = "Ambiguous identifier `%s` for %s" % (safe_str(revision), self.name)
raise ChangesetDoesNotExistError(msg)
def get_ref_revision(self, ref_type, ref_name):
"""
Returns revision number for the given reference.
"""
- ref_name = safe_str(ref_name)
if ref_type == 'rev' and not ref_name.strip('0'):
return self.EMPTY_CHANGESET
# lookup up the exact node id
@@ -451,17 +461,13 @@ class MercurialRepository(BaseRepository
try:
revs = self._repo.revs(rev_spec, ref_name, ref_name)
except LookupError:
- msg = ("Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name))
+ msg = "Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name)
raise ChangesetDoesNotExistError(msg)
- except RepoLookupError:
- msg = ("Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name))
+ except mercurial.error.RepoLookupError:
+ msg = "Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name)
raise ChangesetDoesNotExistError(msg)
if revs:
- try:
- revision = revs.last()
- except AttributeError:
- # removed in hg 3.2
- revision = revs[-1]
+ revision = revs.last()
else:
# TODO: just report 'not found'?
revision = ref_name
@@ -469,39 +475,29 @@ class MercurialRepository(BaseRepository
return self._get_revision(revision)
def _get_archives(self, archive_name='tip'):
- allowed = self.baseui.configlist("web", "allow_archive",
+ allowed = self.baseui.configlist(b"web", b"allow_archive",
untrusted=True)
- for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
- if i[0] in allowed or self._repo.ui.configbool("web",
- "allow" + i[0],
+ for name, ext in [(b'zip', '.zip'), (b'gz', '.tar.gz'), (b'bz2', '.tar.bz2')]:
+ if name in allowed or self._repo.ui.configbool(b"web",
+ b"allow" + name,
untrusted=True):
- yield {"type": i[0], "extension": i[1], "node": archive_name}
+ yield {"type": safe_str(name), "extension": ext, "node": archive_name}
def _get_url(self, url):
"""
- Returns normalized url. If schema is not given, would fall
- to filesystem
- (``file:///``) schema.
+ Returns normalized url. If schema is not given, fall back to
+ filesystem (``file:///``) schema.
"""
- url = safe_str(url)
if url != 'default' and '://' not in url:
- url = "file:" + urllib.pathname2url(url)
+ url = "file:" + urllib.request.pathname2url(url)
return url
- def get_hook_location(self):
- """
- returns absolute path to location where hooks are stored
- """
- return os.path.join(self.path, '.hg', '.hgrc')
-
def get_changeset(self, revision=None):
"""
Returns ``MercurialChangeset`` object representing repository's
changeset at the given ``revision``.
"""
- revision = self._get_revision(revision)
- changeset = MercurialChangeset(repository=self, revision=revision)
- return changeset
+ return MercurialChangeset(repository=self, revision=self._get_revision(revision))
def get_changesets(self, start=None, end=None, start_date=None,
end_date=None, branch_name=None, reverse=False, max_revisions=None):
@@ -517,35 +513,35 @@ class MercurialRepository(BaseRepository
:param reversed: return changesets in reversed order
"""
start_raw_id = self._get_revision(start)
- start_pos = self.revisions.index(start_raw_id) if start else None
+ start_pos = None if start is None else self.revisions.index(start_raw_id)
end_raw_id = self._get_revision(end)
- end_pos = self.revisions.index(end_raw_id) if end else None
+ end_pos = None if end is None else self.revisions.index(end_raw_id)
- if None not in [start, end] and start_pos > end_pos:
+ if start_pos is not None and end_pos is not None and start_pos > end_pos:
raise RepositoryError("Start revision '%s' cannot be "
"after end revision '%s'" % (start, end))
- if branch_name and branch_name not in self.allbranches.keys():
- msg = ("Branch %s not found in %s" % (branch_name, self))
+ if branch_name and branch_name not in self.allbranches:
+ msg = "Branch %r not found in %s" % (branch_name, self.name)
raise BranchDoesNotExistError(msg)
if end_pos is not None:
end_pos += 1
# filter branches
filter_ = []
if branch_name:
- filter_.append('branch("%s")' % safe_str(branch_name))
+ filter_.append(b'branch("%s")' % safe_bytes(branch_name))
if start_date:
- filter_.append('date(">%s")' % start_date)
+ filter_.append(b'date(">%s")' % safe_bytes(str(start_date)))
if end_date:
- filter_.append('date("<%s")' % end_date)
+ filter_.append(b'date("<%s")' % safe_bytes(str(end_date)))
if filter_ or max_revisions:
if filter_:
- revspec = ' and '.join(filter_)
+ revspec = b' and '.join(filter_)
else:
- revspec = 'all()'
+ revspec = b'all()'
if max_revisions:
- revspec = 'limit(%s, %s)' % (revspec, max_revisions)
- revisions = scmutil.revrange(self._repo, [revspec])
+ revspec = b'limit(%s, %d)' % (revspec, max_revisions)
+ revisions = mercurial.scmutil.revrange(self._repo, [revspec])
else:
revisions = self.revisions
@@ -553,7 +549,7 @@ class MercurialRepository(BaseRepository
# would be to get rid of this function entirely and use revsets
revs = list(revisions)[start_pos:end_pos]
if reverse:
- revs = reversed(revs)
+ revs.reverse()
return CollectionGenerator(self, revs)
@@ -561,15 +557,10 @@ class MercurialRepository(BaseRepository
"""
Tries to pull changes from external location.
"""
- url = self._get_url(url)
- other = peer(self._repo, {}, url)
+ other = mercurial.hg.peer(self._repo, {}, safe_bytes(self._get_url(url)))
try:
- # hg 3.2 moved push / pull to exchange module
- from mercurial import exchange
- exchange.pull(self._repo, other, heads=None, force=None)
- except ImportError:
- self._repo.pull(other, heads=None, force=None)
- except Abort as err:
+ mercurial.exchange.pull(self._repo, other, heads=None, force=None)
+ except mercurial.error.Abort as err:
# Propagate error but with vcs's type
raise RepositoryError(str(err))
@@ -591,15 +582,16 @@ class MercurialRepository(BaseRepository
"""
if config_file is None:
config_file = []
- elif isinstance(config_file, basestring):
+ elif isinstance(config_file, str):
config_file = [config_file]
config = self._repo.ui
if config_file:
- config = ui.ui()
+ config = mercurial.ui.ui()
for path in config_file:
- config.readconfig(path)
- return config.config(section, name)
+ config.readconfig(safe_bytes(path))
+ value = config.config(safe_bytes(section), safe_bytes(name))
+ return value if value is None else safe_str(value)
def get_user_name(self, config_file=None):
"""
diff --git a/kallithea/lib/vcs/backends/hg/ssh.py b/kallithea/lib/vcs/backends/hg/ssh.py
--- a/kallithea/lib/vcs/backends/hg/ssh.py
+++ b/kallithea/lib/vcs/backends/hg/ssh.py
@@ -14,18 +14,12 @@
import logging
-from mercurial import hg
+import mercurial.hg
+import mercurial.wireprotoserver
from kallithea.lib.utils import make_ui
-from kallithea.lib.utils2 import safe_str, safe_unicode
from kallithea.lib.vcs.backends.ssh import BaseSshHandler
-
-
-try:
- from mercurial.wireprotoserver import sshserver
-except ImportError:
- from mercurial.sshserver import sshserver # moved in Mercurial 4.6 (1bf5263fe5cc)
-
+from kallithea.lib.vcs.utils import safe_bytes
log = logging.getLogger(__name__)
@@ -40,11 +34,11 @@ class MercurialSshHandler(BaseSshHandler
>>> import shlex
>>> MercurialSshHandler.make(shlex.split('hg -R "foo bar" serve --stdio')).repo_name
- u'foo bar'
+ 'foo bar'
>>> MercurialSshHandler.make(shlex.split(' hg -R blåbærgrød serve --stdio ')).repo_name
- u'bl\xe5b\xe6rgr\xf8d'
+ 'bl\xe5b\xe6rgr\xf8d'
>>> MercurialSshHandler.make(shlex.split('''hg -R 'foo"bar' serve --stdio''')).repo_name
- u'foo"bar'
+ 'foo"bar'
>>> MercurialSshHandler.make(shlex.split('/bin/hg -R "foo" serve --stdio'))
>>> MercurialSshHandler.make(shlex.split('''hg -R "foo"bar" serve --stdio''')) # ssh-serve will report: Error parsing SSH command "...": invalid syntax
@@ -53,7 +47,7 @@ class MercurialSshHandler(BaseSshHandler
>>> MercurialSshHandler.make(shlex.split('git-upload-pack "/foo"')) # not handled here
"""
if ssh_command_parts[:2] == ['hg', '-R'] and ssh_command_parts[3:] == ['serve', '--stdio']:
- return cls(safe_unicode(ssh_command_parts[2]))
+ return cls(ssh_command_parts[2])
return None
@@ -61,9 +55,9 @@ class MercurialSshHandler(BaseSshHandler
# Note: we want a repo with config based on .hg/hgrc and can thus not use self.db_repo.scm_instance._repo.ui
baseui = make_ui(repo_path=self.db_repo.repo_full_path)
if not self.allow_push:
- baseui.setconfig('hooks', 'pretxnopen._ssh_reject', 'python:kallithea.lib.hooks.rejectpush')
- baseui.setconfig('hooks', 'prepushkey._ssh_reject', 'python:kallithea.lib.hooks.rejectpush')
+ baseui.setconfig(b'hooks', b'pretxnopen._ssh_reject', b'python:kallithea.lib.hooks.rejectpush')
+ baseui.setconfig(b'hooks', b'prepushkey._ssh_reject', b'python:kallithea.lib.hooks.rejectpush')
- repo = hg.repository(baseui, safe_str(self.db_repo.repo_full_path))
+ repo = mercurial.hg.repository(baseui, safe_bytes(self.db_repo.repo_full_path))
log.debug("Starting Mercurial sshserver for %s", self.db_repo.repo_full_path)
- sshserver(baseui, repo).serve_forever()
+ mercurial.wireprotoserver.sshserver(baseui, repo).serve_forever()
diff --git a/kallithea/lib/vcs/backends/hg/workdir.py b/kallithea/lib/vcs/backends/hg/workdir.py
--- a/kallithea/lib/vcs/backends/hg/workdir.py
+++ b/kallithea/lib/vcs/backends/hg/workdir.py
@@ -1,15 +1,17 @@
+import mercurial.merge
+
from kallithea.lib.vcs.backends.base import BaseWorkdir
from kallithea.lib.vcs.exceptions import BranchDoesNotExistError
-from kallithea.lib.vcs.utils.hgcompat import hg_merge
+from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_str
class MercurialWorkdir(BaseWorkdir):
def get_branch(self):
- return self.repository._repo.dirstate.branch()
+ return safe_str(self.repository._repo.dirstate.branch())
def get_changeset(self):
- wk_dir_id = self.repository._repo[None].parents()[0].hex()
+ wk_dir_id = ascii_str(self.repository._repo[None].parents()[0].hex())
return self.repository.get_changeset(wk_dir_id)
def checkout_branch(self, branch=None):
@@ -19,4 +21,4 @@ class MercurialWorkdir(BaseWorkdir):
raise BranchDoesNotExistError
raw_id = self.repository.branches[branch]
- hg_merge.update(self.repository._repo, raw_id, False, False, None)
+ mercurial.merge.update(self.repository._repo, ascii_bytes(raw_id), False, False, None)
diff --git a/kallithea/lib/vcs/backends/ssh.py b/kallithea/lib/vcs/backends/ssh.py
--- a/kallithea/lib/vcs/backends/ssh.py
+++ b/kallithea/lib/vcs/backends/ssh.py
@@ -24,7 +24,7 @@ import logging
import sys
from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
-from kallithea.lib.utils2 import safe_str, set_hook_environment
+from kallithea.lib.utils2 import set_hook_environment
from kallithea.model.db import Repository, User, UserSshKeys
from kallithea.model.meta import Session
@@ -82,7 +82,7 @@ class BaseSshHandler(object):
elif HasPermissionAnyMiddleware('repository.read')(self.authuser, self.repo_name):
self.allow_push = False
else:
- self.exit('Access to %r denied' % safe_str(self.repo_name))
+ self.exit('Access to %r denied' % self.repo_name)
self.db_repo = Repository.get_by_repo_name(self.repo_name)
if self.db_repo is None:
diff --git a/kallithea/lib/vcs/conf/settings.py b/kallithea/lib/vcs/conf/settings.py
--- a/kallithea/lib/vcs/conf/settings.py
+++ b/kallithea/lib/vcs/conf/settings.py
@@ -1,24 +1,7 @@
-import os
-import tempfile
-
from kallithea.lib.vcs.utils import aslist
-from kallithea.lib.vcs.utils.paths import get_user_home
-abspath = lambda * p: os.path.abspath(os.path.join(*p))
-
-VCSRC_PATH = os.environ.get('VCSRC_PATH')
-
-if not VCSRC_PATH:
- HOME_ = get_user_home()
- if not HOME_:
- HOME_ = tempfile.gettempdir()
-
-VCSRC_PATH = VCSRC_PATH or abspath(HOME_, '.vcsrc')
-if os.path.isdir(VCSRC_PATH):
- VCSRC_PATH = os.path.join(VCSRC_PATH, '__init__.py')
-
-# list of default encoding used in safe_unicode/safe_str methods
+# list of default encoding used in safe_str/safe_bytes methods
DEFAULT_ENCODINGS = aslist('utf-8')
# path to git executable run by run_git_command function
diff --git a/kallithea/lib/vcs/exceptions.py b/kallithea/lib/vcs/exceptions.py
--- a/kallithea/lib/vcs/exceptions.py
+++ b/kallithea/lib/vcs/exceptions.py
@@ -30,10 +30,6 @@ class TagDoesNotExistError(RepositoryErr
pass
-class BranchAlreadyExistError(RepositoryError):
- pass
-
-
class BranchDoesNotExistError(RepositoryError):
pass
@@ -50,10 +46,6 @@ class CommitError(RepositoryError):
pass
-class NothingChangedError(CommitError):
- pass
-
-
class NodeError(VCSError):
pass
@@ -88,7 +80,3 @@ class NodeAlreadyRemovedError(CommitErro
class ImproperArchiveTypeError(VCSError):
pass
-
-
-class CommandError(VCSError):
- pass
diff --git a/kallithea/lib/vcs/nodes.py b/kallithea/lib/vcs/nodes.py
--- a/kallithea/lib/vcs/nodes.py
+++ b/kallithea/lib/vcs/nodes.py
@@ -9,13 +9,14 @@
:copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
"""
+import functools
import mimetypes
import posixpath
import stat
from kallithea.lib.vcs.backends.base import EmptyChangeset
from kallithea.lib.vcs.exceptions import NodeError, RemovedFileNodeError
-from kallithea.lib.vcs.utils import safe_str, safe_unicode
+from kallithea.lib.vcs.utils import safe_bytes, safe_str
from kallithea.lib.vcs.utils.lazy import LazyProperty
@@ -26,10 +27,10 @@ class NodeKind:
class NodeState:
- ADDED = u'added'
- CHANGED = u'changed'
- NOT_CHANGED = u'not changed'
- REMOVED = u'removed'
+ ADDED = 'added'
+ CHANGED = 'changed'
+ NOT_CHANGED = 'not changed'
+ REMOVED = 'removed'
class NodeGeneratorBase(object):
@@ -44,11 +45,9 @@ class NodeGeneratorBase(object):
self.cs = cs
self.current_paths = current_paths
- def __call__(self):
- return [n for n in self]
-
- def __getslice__(self, i, j):
- for p in self.current_paths[i:j]:
+ def __getitem__(self, key):
+ assert isinstance(key, slice), key
+ for p in self.current_paths[key]:
yield self.cs.get_node(p)
def __len__(self):
@@ -81,11 +80,13 @@ class RemovedFileNodesGenerator(NodeGene
for p in self.current_paths:
yield RemovedFileNode(path=p)
- def __getslice__(self, i, j):
- for p in self.current_paths[i:j]:
+ def __getitem__(self, key):
+ assert isinstance(key, slice), key
+ for p in self.current_paths[key]:
yield RemovedFileNode(path=p)
+@functools.total_ordering
class Node(object):
"""
Simplest class representing file or directory on repository. SCM backends
@@ -101,7 +102,7 @@ class Node(object):
if path.startswith('/'):
raise NodeError("Cannot initialize Node objects with slash at "
"the beginning as only relative paths are supported")
- self.path = safe_str(path.rstrip('/')) # we store paths as str
+ self.path = path.rstrip('/')
if path == '' and kind != NodeKind.DIR:
raise NodeError("Only DirNode and its subclasses may be "
"initialized with empty path")
@@ -120,67 +121,34 @@ class Node(object):
return None
@LazyProperty
- def unicode_path(self):
- return safe_unicode(self.path)
-
- @LazyProperty
def name(self):
"""
Returns name of the node so if its path
then only last part is returned.
"""
- return safe_unicode(self.path.rstrip('/').split('/')[-1])
-
- def _get_kind(self):
- return self._kind
-
- def _set_kind(self, kind):
- if hasattr(self, '_kind'):
- raise NodeError("Cannot change node's kind")
- else:
- self._kind = kind
- # Post setter check (path's trailing slash)
- if self.path.endswith('/'):
- raise NodeError("Node's path cannot end with slash")
-
- kind = property(_get_kind, _set_kind)
-
- def __cmp__(self, other):
- """
- Comparator using name of the node, needed for quick list sorting.
- """
- kind_cmp = cmp(self.kind, other.kind)
- if kind_cmp:
- return kind_cmp
- return cmp(self.name, other.name)
+ return self.path.rstrip('/').split('/')[-1]
def __eq__(self, other):
- for attr in ['name', 'path', 'kind']:
- if getattr(self, attr) != getattr(other, attr):
- return False
- if self.is_file():
- if self.content != other.content:
- return False
- else:
- # For DirNode's check without entering each dir
- self_nodes_paths = list(sorted(n.path for n in self.nodes))
- other_nodes_paths = list(sorted(n.path for n in self.nodes))
- if self_nodes_paths != other_nodes_paths:
- return False
- return True
+ if type(self) is not type(other):
+ return False
+ if self.kind != other.kind:
+ return False
+ if self.path != other.path:
+ return False
- def __nq__(self, other):
- return not self.__eq__(other)
+ def __lt__(self, other):
+ if self.kind < other.kind:
+ return True
+ if self.kind > other.kind:
+ return False
+ if self.path < other.path:
+ return True
+ if self.path > other.path:
+ return False
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, self.path)
- def __str__(self):
- return self.__repr__()
-
- def __unicode__(self):
- return self.name
-
def get_parent_path(self):
"""
Returns node's parent path or empty string if node is root.
@@ -258,8 +226,24 @@ class FileNode(Node):
raise NodeError("Cannot use both content and changeset")
super(FileNode, self).__init__(path, kind=NodeKind.FILE)
self.changeset = changeset
+ if not isinstance(content, bytes) and content is not None:
+ # File content is one thing that inherently must be bytes ... but
+ # VCS module tries to be "user friendly" and support unicode ...
+ content = safe_bytes(content)
self._content = content
- self._mode = mode or 0100644
+ self._mode = mode or 0o100644
+
+ def __eq__(self, other):
+ eq = super(FileNode, self).__eq__(other)
+ if eq is not None:
+ return eq
+ return self.content == other.content
+
+ def __lt__(self, other):
+ lt = super(FileNode, self).__lt__(other)
+ if lt is not None:
+ return lt
+ return self.content < other.content
@LazyProperty
def mode(self):
@@ -273,25 +257,17 @@ class FileNode(Node):
mode = self._mode
return mode
- def _get_content(self):
+ @property
+ def content(self):
+ """
+ Returns lazily byte content of the FileNode.
+ """
if self.changeset:
content = self.changeset.get_file_content(self.path)
else:
content = self._content
return content
- @property
- def content(self):
- """
- Returns lazily content of the FileNode. If possible, would try to
- decode content from UTF-8.
- """
- content = self._get_content()
-
- if bool(content and '\0' in content):
- return content
- return safe_unicode(content)
-
@LazyProperty
def size(self):
if self.changeset:
@@ -361,7 +337,7 @@ class FileNode(Node):
"""
from pygments import lexers
try:
- lexer = lexers.guess_lexer_for_filename(self.name, self.content, stripnl=False)
+ lexer = lexers.guess_lexer_for_filename(self.name, safe_str(self.content), stripnl=False)
except lexers.ClassNotFound:
lexer = lexers.TextLexer(stripnl=False)
# returns first alias
@@ -409,8 +385,7 @@ class FileNode(Node):
"""
Returns True if file has binary content.
"""
- _bin = '\0' in self._get_content()
- return _bin
+ return b'\0' in self.content
def is_browser_compatible_image(self):
return self.mimetype in [
@@ -488,10 +463,23 @@ class DirNode(Node):
self.changeset = changeset
self._nodes = nodes
- @LazyProperty
- def content(self):
- raise NodeError("%s represents a dir and has no ``content`` attribute"
- % self)
+ def __eq__(self, other):
+ eq = super(DirNode, self).__eq__(other)
+ if eq is not None:
+ return eq
+ # check without entering each dir
+ self_nodes_paths = list(sorted(n.path for n in self.nodes))
+ other_nodes_paths = list(sorted(n.path for n in self.nodes))
+ return self_nodes_paths == other_nodes_paths
+
+ def __lt__(self, other):
+ lt = super(DirNode, self).__lt__(other)
+ if lt is not None:
+ return lt
+ # check without entering each dir
+ self_nodes_paths = list(sorted(n.path for n in self.nodes))
+ other_nodes_paths = list(sorted(n.path for n in self.nodes))
+ return self_nodes_paths < other_nodes_paths
@LazyProperty
def nodes(self):
@@ -595,12 +583,13 @@ class SubModuleNode(Node):
size = 0
def __init__(self, name, url, changeset=None, alias=None):
- self.path = name
+ # Note: Doesn't call Node.__init__!
+ self.path = name.rstrip('/')
self.kind = NodeKind.SUBMODULE
self.alias = alias
# we have to use emptyChangeset here since this can point to svn/git/hg
# submodules we cannot get from repository
- self.changeset = EmptyChangeset(str(changeset), alias=alias)
+ self.changeset = EmptyChangeset(changeset, alias=alias)
self.url = url
def __repr__(self):
@@ -613,5 +602,5 @@ class SubModuleNode(Node):
Returns name of the node so if its path
then only last part is returned.
"""
- org = safe_unicode(self.path.rstrip('/').split('/')[-1])
- return u'%s @ %s' % (org, self.changeset.short_id)
+ org = self.path.rstrip('/').rsplit('/', 1)[-1]
+ return '%s @ %s' % (org, self.changeset.short_id)
diff --git a/kallithea/lib/vcs/subprocessio.py b/kallithea/lib/vcs/subprocessio.py
--- a/kallithea/lib/vcs/subprocessio.py
+++ b/kallithea/lib/vcs/subprocessio.py
@@ -44,7 +44,7 @@ class StreamFeeder(threading.Thread):
if type(source) in (type(''), bytes, bytearray): # string-like
self.bytes = bytes(source)
else: # can be either file pointer or file-like
- if type(source) in (int, long): # file pointer it is
+ if isinstance(source, int): # file pointer it is
# converting file descriptor (int) stdin into file-like
source = os.fdopen(source, 'rb', 16384)
# let's see if source is file-like by now
@@ -125,11 +125,7 @@ class InputStreamChunker(threading.Threa
if len(t) > ccm:
kr.clear()
kr.wait(2)
- # # this only works on 2.7.x and up
- # if not kr.wait(10):
- # raise Exception("Timed out while waiting for input to be read.")
- # instead we'll use this
- if len(t) > ccm + 3:
+ if not kr.wait(10):
raise IOError(
"Timed out while waiting for input from subprocess.")
t.append(b)
@@ -178,7 +174,7 @@ class BufferedGenerator(object):
def __iter__(self):
return self
- def next(self):
+ def __next__(self):
while not len(self.data) and not self.worker.EOF.is_set():
self.worker.data_added.clear()
self.worker.data_added.wait(0.2)
@@ -225,17 +221,6 @@ class BufferedGenerator(object):
return not self.worker.keep_reading.is_set()
@property
- def done_reading_event(self):
- """
- Done_reading does not mean that the iterator's buffer is empty.
- Iterator might have done reading from underlying source, but the read
- chunks might still be available for serving through .next() method.
-
- :returns: An threading.Event class instance.
- """
- return self.worker.EOF
-
- @property
def done_reading(self):
"""
Done_reading does not mean that the iterator's buffer is empty.
@@ -286,7 +271,7 @@ class SubprocessIOChunker(object):
- We are multithreaded. Writing in and reading out, err are all sep threads.
- We support concurrent (in and out) stream processing.
- - The output is not a stream. It's a queue of read string (bytes, not unicode)
+ - The output is not a stream. It's a queue of read string (bytes, not str)
chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
- We are non-blocking in more respects than communicate()
(reading from subprocess out pauses when internal buffer is full, but
@@ -367,18 +352,17 @@ class SubprocessIOChunker(object):
and returncode != 0
): # and it failed
bg_out.stop()
- out = ''.join(bg_out)
+ out = b''.join(bg_out)
bg_err.stop()
- err = ''.join(bg_err)
- if (err.strip() == 'fatal: The remote end hung up unexpectedly' and
- out.startswith('0034shallow ')
+ err = b''.join(bg_err)
+ if (err.strip() == b'fatal: The remote end hung up unexpectedly' and
+ out.startswith(b'0034shallow ')
):
# hack inspired by https://github.com/schacon/grack/pull/7
bg_out = iter([out])
_p = None
elif err:
- raise EnvironmentError(
- "Subprocess exited due to an error:\n" + err)
+ raise EnvironmentError("Subprocess exited due to an error: %s" % err)
else:
raise EnvironmentError(
"Subprocess exited with non 0 ret code: %s" % returncode)
@@ -390,7 +374,7 @@ class SubprocessIOChunker(object):
def __iter__(self):
return self
- def next(self):
+ def __next__(self):
if self.process:
returncode = self.process.poll()
if (returncode is not None # process has terminated
@@ -400,7 +384,7 @@ class SubprocessIOChunker(object):
self.error.stop()
err = ''.join(self.error)
raise EnvironmentError("Subprocess exited due to an error:\n" + err)
- return self.output.next()
+ return next(self.output)
def throw(self, type, value=None, traceback=None):
if self.output.length or not self.output.done_reading:
diff --git a/kallithea/lib/vcs/utils/__init__.py b/kallithea/lib/vcs/utils/__init__.py
--- a/kallithea/lib/vcs/utils/__init__.py
+++ b/kallithea/lib/vcs/utils/__init__.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
"""
This module provides some useful tools for ``vcs`` like annotate/diff html
output. It also includes some internal helpers.
@@ -25,7 +27,7 @@ def aslist(obj, sep=None, strip=True):
:param sep:
:param strip:
"""
- if isinstance(obj, (basestring)):
+ if isinstance(obj, str):
lst = obj.split(sep)
if strip:
lst = [v.strip() for v in lst]
@@ -66,89 +68,107 @@ def safe_int(val, default=None):
return val
-def safe_unicode(str_, from_encoding=None):
+def safe_str(s):
"""
- safe unicode function. Does few trick to turn str_ into unicode
-
- In case of UnicodeDecode error we try to return it with encoding detected
- by chardet library if it fails fallback to unicode with errors replaced
-
- :param str_: string to decode
- :rtype: unicode
- :returns: unicode object
+ Safe unicode str function. Use a few tricks to turn s into str:
+ In case of UnicodeDecodeError with configured default encodings, try to
+ detect encoding with chardet library, then fall back to first encoding with
+ errors replaced.
"""
- if isinstance(str_, unicode):
- return str_
+ if isinstance(s, str):
+ return s
- if not from_encoding:
- from kallithea.lib.vcs.conf import settings
- from_encoding = settings.DEFAULT_ENCODINGS
-
- if not isinstance(from_encoding, (list, tuple)):
- from_encoding = [from_encoding]
+ if not isinstance(s, bytes): # use __str__ and don't expect UnicodeDecodeError
+ return str(s)
- try:
- return unicode(str_)
- except UnicodeDecodeError:
- pass
-
- for enc in from_encoding:
+ from kallithea.lib.vcs.conf import settings
+ for enc in settings.DEFAULT_ENCODINGS:
try:
- return unicode(str_, enc)
+ return str(s, enc)
except UnicodeDecodeError:
pass
try:
import chardet
- encoding = chardet.detect(str_)['encoding']
- if encoding is None:
- raise Exception()
- return str_.decode(encoding)
- except (ImportError, UnicodeDecodeError, Exception):
- return unicode(str_, from_encoding[0], 'replace')
+ encoding = chardet.detect(s)['encoding']
+ if encoding is not None:
+ return s.decode(encoding)
+ except (ImportError, UnicodeDecodeError):
+ pass
+
+ return str(s, settings.DEFAULT_ENCODINGS[0], 'replace')
-def safe_str(unicode_, to_encoding=None):
+def safe_bytes(s):
"""
- safe str function. Does few trick to turn unicode_ into string
-
- In case of UnicodeEncodeError we try to return it with encoding detected
- by chardet library if it fails fallback to string with errors replaced
-
- :param unicode_: unicode to encode
- :rtype: str
- :returns: str object
+ Safe bytes function. Use a few tricks to turn s into bytes string:
+ In case of UnicodeEncodeError with configured default encodings, fall back
+ to first configured encoding with errors replaced.
"""
+ if isinstance(s, bytes):
+ return s
- # if it's not basestr cast to str
- if not isinstance(unicode_, basestring):
- return str(unicode_)
-
- if isinstance(unicode_, str):
- return unicode_
+ assert isinstance(s, str), repr(s) # bytes cannot coerse with __str__ or handle None or int
- if not to_encoding:
- from kallithea.lib.vcs.conf import settings
- to_encoding = settings.DEFAULT_ENCODINGS
-
- if not isinstance(to_encoding, (list, tuple)):
- to_encoding = [to_encoding]
-
- for enc in to_encoding:
+ from kallithea.lib.vcs.conf import settings
+ for enc in settings.DEFAULT_ENCODINGS:
try:
- return unicode_.encode(enc)
+ return s.encode(enc)
except UnicodeEncodeError:
pass
- try:
- import chardet
- encoding = chardet.detect(unicode_)['encoding']
- if encoding is None:
- raise UnicodeEncodeError()
+ return s.encode(settings.DEFAULT_ENCODINGS[0], 'replace')
+
+
+def ascii_bytes(s):
+ """
+ Simple conversion from str to bytes, *assuming* all codepoints are
+ 7-bit and it thus is pure ASCII.
+ Will fail badly with UnicodeError on invalid input.
+ This should be used where enocding and "safe" ambiguity should be avoided.
+ Where strings already have been encoded in other ways but still are unicode
+ string - for example to hex, base64, json, urlencoding, or are known to be
+ identifiers.
- return unicode_.encode(encoding)
- except (ImportError, UnicodeEncodeError):
- return unicode_.encode(to_encoding[0], 'replace')
+ >>> ascii_bytes('a')
+ b'a'
+ >>> ascii_bytes(u'a')
+ b'a'
+ >>> ascii_bytes('å')
+ Traceback (most recent call last):
+ UnicodeEncodeError: 'ascii' codec can't encode character '\xe5' in position 0: ordinal not in range(128)
+ >>> ascii_bytes('å'.encode('utf8'))
+ Traceback (most recent call last):
+ AssertionError: b'\xc3\xa5'
+ """
+ assert isinstance(s, str), repr(s)
+ return s.encode('ascii')
+
+
+def ascii_str(s):
+ r"""
+ Simple conversion from bytes to str, *assuming* all codepoints are
+ 7-bit and it thus is pure ASCII.
+ Will fail badly with UnicodeError on invalid input.
+ This should be used where enocding and "safe" ambiguity should be avoided.
+ Where strings are encoded but also in other ways are known to be ASCII, and
+ where a unicode string is wanted without caring about encoding. For example
+ to hex, base64, urlencoding, or are known to be identifiers.
+
+ >>> ascii_str(b'a')
+ 'a'
+ >>> ascii_str(u'a')
+ Traceback (most recent call last):
+ AssertionError: 'a'
+ >>> ascii_str('å'.encode('utf8'))
+ Traceback (most recent call last):
+ UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 0: ordinal not in range(128)
+ >>> ascii_str(u'å')
+ Traceback (most recent call last):
+ AssertionError: 'å'
+ """
+ assert isinstance(s, bytes), repr(s)
+ return s.decode('ascii')
# Regex taken from http://www.regular-expressions.info/email.html
@@ -178,7 +198,7 @@ def author_email(author):
m = email_re.search(author)
if m is None:
return ''
- return safe_str(m.group(0))
+ return m.group(0)
def author_name(author):
diff --git a/kallithea/lib/vcs/utils/annotate.py b/kallithea/lib/vcs/utils/annotate.py
deleted file mode 100644
--- a/kallithea/lib/vcs/utils/annotate.py
+++ /dev/null
@@ -1,179 +0,0 @@
-import StringIO
-
-from pygments import highlight
-from pygments.formatters import HtmlFormatter
-
-from kallithea.lib.vcs.exceptions import VCSError
-from kallithea.lib.vcs.nodes import FileNode
-
-
-def annotate_highlight(filenode, annotate_from_changeset_func=None,
- order=None, headers=None, **options):
- """
- Returns html portion containing annotated table with 3 columns: line
- numbers, changeset information and pygmentized line of code.
-
- :param filenode: FileNode object
- :param annotate_from_changeset_func: function taking changeset and
- returning single annotate cell; needs break line at the end
- :param order: ordered sequence of ``ls`` (line numbers column),
- ``annotate`` (annotate column), ``code`` (code column); Default is
- ``['ls', 'annotate', 'code']``
- :param headers: dictionary with headers (keys are whats in ``order``
- parameter)
- """
- options['linenos'] = True
- formatter = AnnotateHtmlFormatter(filenode=filenode, order=order,
- headers=headers,
- annotate_from_changeset_func=annotate_from_changeset_func, **options)
- lexer = filenode.lexer
- highlighted = highlight(filenode.content, lexer, formatter)
- return highlighted
-
-
-class AnnotateHtmlFormatter(HtmlFormatter):
-
- def __init__(self, filenode, annotate_from_changeset_func=None,
- order=None, **options):
- """
- If ``annotate_from_changeset_func`` is passed it should be a function
- which returns string from the given changeset. For example, we may pass
- following function as ``annotate_from_changeset_func``::
-
- def changeset_to_anchor(changeset):
- return '%s\n' % \
- (changeset.id, changeset.id)
-
- :param annotate_from_changeset_func: see above
- :param order: (default: ``['ls', 'annotate', 'code']``); order of
- columns;
- :param options: standard pygment's HtmlFormatter options, there is
- extra option tough, ``headers``. For instance we can pass::
-
- formatter = AnnotateHtmlFormatter(filenode, headers={
- 'ls': '#',
- 'annotate': 'Annotate',
- 'code': 'Code',
- })
-
- """
- super(AnnotateHtmlFormatter, self).__init__(**options)
- self.annotate_from_changeset_func = annotate_from_changeset_func
- self.order = order or ('ls', 'annotate', 'code')
- headers = options.pop('headers', None)
- if headers and not ('ls' in headers and 'annotate' in headers and
- 'code' in headers
- ):
- raise ValueError("If headers option dict is specified it must "
- "all 'ls', 'annotate' and 'code' keys")
- self.headers = headers
- if isinstance(filenode, FileNode):
- self.filenode = filenode
- else:
- raise VCSError("This formatter expect FileNode parameter, not %r"
- % type(filenode))
-
- def annotate_from_changeset(self, changeset):
- """
- Returns full html line for single changeset per annotated line.
- """
- if self.annotate_from_changeset_func:
- return self.annotate_from_changeset_func(changeset)
- else:
- return ''.join((changeset.id, '\n'))
-
- def _wrap_tablelinenos(self, inner):
- dummyoutfile = StringIO.StringIO()
- lncount = 0
- for t, line in inner:
- if t:
- lncount += 1
- dummyoutfile.write(line)
-
- fl = self.linenostart
- mw = len(str(lncount + fl - 1))
- sp = self.linenospecial
- st = self.linenostep
- la = self.lineanchors
- aln = self.anchorlinenos
- if sp:
- lines = []
-
- for i in range(fl, fl + lncount):
- if i % st == 0:
- if i % sp == 0:
- if aln:
- lines.append(''
- '%*d' %
- (la, i, mw, i))
- else:
- lines.append(''
- '%*d' % (mw, i))
- else:
- if aln:
- lines.append(''
- '%*d' % (la, i, mw, i))
- else:
- lines.append('%*d' % (mw, i))
- else:
- lines.append('')
- ls = '\n'.join(lines)
- else:
- lines = []
- for i in range(fl, fl + lncount):
- if i % st == 0:
- if aln:
- lines.append('%*d'
- % (la, i, mw, i))
- else:
- lines.append('%*d' % (mw, i))
- else:
- lines.append('')
- ls = '\n'.join(lines)
-
- annotate_changesets = [tup[1] for tup in self.filenode.annotate]
- # If pygments cropped last lines break we need do that too
- ln_cs = len(annotate_changesets)
- ln_ = len(ls.splitlines())
- if ln_cs > ln_:
- annotate_changesets = annotate_changesets[:ln_ - ln_cs]
- annotate = ''.join((self.annotate_from_changeset(changeset)
- for changeset in annotate_changesets))
- # in case you wonder about the seemingly redundant
here:
- # since the content in the other cell also is wrapped in a div,
- # some browsers in some configurations seem to mess up the formatting.
- '''
- yield 0, ('