Changeset - e67b2ef07a8e
[Not reviewed]
beta
0 7 0
Marcin Kuzminski - 13 years ago 2013-02-17 22:58:09
marcin@python-works.com
git executable is now configurable via .ini files
7 files changed with 47 insertions and 24 deletions:
0 comments (0 inline, 0 general)
development.ini
Show inline comments
 
################################################################################
 
################################################################################
 
# RhodeCode - Pylons environment configuration                                 #
 
#                                                                              # 
 
# The %(here)s variable will be replaced with the parent directory of this file#
 
################################################################################
 

	
 
[DEFAULT]
 
debug = true
 
pdebug = false
 
################################################################################
 
## Uncomment and replace with the address which should receive                ## 
 
## any error reports after application crash                                  ##
 
## Additionally those settings will be used by RhodeCode mailing system       ##
 
################################################################################
 
#email_to = admin@localhost
 
#error_email_from = paste_error@localhost
 
#app_email_from = rhodecode-noreply@localhost
 
#error_message =
 
#email_prefix = [RhodeCode]
 

	
 
#smtp_server = mail.server.com
 
#smtp_username = 
 
#smtp_password = 
 
#smtp_port = 
 
#smtp_use_tls = false
 
#smtp_use_ssl = true
 
# Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
 
#smtp_auth = 
 

	
 
[server:main]
 
## PASTE
 
##nr of threads to spawn
 
#threadpool_workers = 5
 

	
 
##max request before thread respawn
 
#threadpool_max_requests = 10
 

	
 
##option to use threads of process
 
#use_threadpool = true
 

	
 
#use = egg:Paste#http
 

	
 
#WAITRESS
 
threads = 5
 
#100GB
 
max_request_body_size = 107374182400
 
use = egg:waitress#main
 

	
 
host = 0.0.0.0
 
port = 5000
 

	
 
[filter:proxy-prefix]
 
# prefix middleware for rc
 
use = egg:PasteDeploy#prefix
 
prefix = /<your-prefix>
 

	
 
[app:main]
 
use = egg:rhodecode
 
#filter-with = proxy-prefix
 
full_stack = true
 
static_files = true
 
# Optional Languages
 
# en, fr, ja, pt_BR, zh_CN, zh_TW, pl
 
lang = en
 
cache_dir = %(here)s/data
 
index_dir = %(here)s/data/index
 
app_instance_uuid = rc-develop
 
cut_off_limit = 256000
 
vcs_full_cache = True
 
# force https in RhodeCode, fixes https redirects, assumes it's always https
 
force_https = false
 
# use Strict-Transport-Security headers
 
use_htsts = false
 
commit_parse_limit = 25
 
# number of items displayed in lightweight dashboard before paginating
 
dashboard_items = 100
 
use_gravatar = true
 

	
 
# path to git executable
 
git_path = git
 

	
 
## RSS feed options
 

	
 
rss_cut_off_limit = 256000
 
rss_items_per_page = 10
 
rss_include_diff = false
 

	
 

	
 
## alternative_gravatar_url allows you to use your own avatar server application
 
## the following parts of the URL will be replaced
 
## {email}        user email
 
## {md5email}     md5 hash of the user email (like at gravatar.com)
 
## {size}         size of the image that is expected from the server application
 
## {scheme}       http/https from RhodeCode server
 
## {netloc}       network location from RhodeCode server
 
#alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
 
#alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
 

	
 
container_auth_enabled = false
 
proxypass_auth_enabled = false
 
## default encoding used to convert from and to unicode
 
## can be also a comma seperated list of encoding in case of mixed encodings
 
default_encoding = utf8
 

	
 
## overwrite schema of clone url
 
## available vars:
 
## scheme - http/https
 
## user - current user
 
## pass - password 
 
## netloc - network location
 
## path - usually repo_name
 

	
 
#clone_uri = {scheme}://{user}{pass}{netloc}{path}
 

	
 
## issue tracking mapping for commits messages
 
## comment out issue_pat, issue_server, issue_prefix to enable
 

	
 
## pattern to get the issues from commit messages
 
## default one used here is #<numbers> with a regex passive group for `#`
 
## {id} will be all groups matched from this pattern
 

	
 
issue_pat = (?:\s*#)(\d+)
 

	
 
## server url to the issue, each {id} will be replaced with match
 
## fetched from the regex and {repo} is replaced with full repository name
 
## including groups {repo_name} is replaced with just name of repo
 

	
 
issue_server_link = https://myissueserver.com/{repo}/issue/{id}
 

	
 
## prefix to add to link to indicate it's an url
 
## #314 will be replaced by <issue_prefix><id>
 

	
 
issue_prefix = #
 

	
 
## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
 
## multiple patterns, to other issues server, wiki or others
 
## below an example how to create a wiki pattern 
 
#  #wiki-some-id -> https://mywiki.com/some-id
 

	
 
#issue_pat_wiki = (?:wiki-)(.+)
 
#issue_server_link_wiki = https://mywiki.com/{id}
 
#issue_prefix_wiki = WIKI-
 

	
 

	
 
## instance-id prefix
 
## a prefix key for this instance used for cache invalidation when running 
 
## multiple instances of rhodecode, make sure it's globally unique for 
 
## all running rhodecode instances. Leave empty if you don't use it
 
instance_id = 
 

	
 
## alternative return HTTP header for failed authentication. Default HTTP
 
## response is 401 HTTPUnauthorized. Currently HG clients have troubles with 
 
## handling that. Set this variable to 403 to return HTTPForbidden
 
auth_ret_code =
 

	
 
####################################
 
###        CELERY CONFIG        ####
 
####################################
 
use_celery = false
 
broker.host = localhost
 
broker.vhost = rabbitmqhost
 
broker.port = 5672
 
broker.user = rabbitmq
 
broker.password = qweqwe
 

	
 
celery.imports = rhodecode.lib.celerylib.tasks
 

	
 
celery.result.backend = amqp
 
celery.result.dburi = amqp://
 
celery.result.serialier = json
 

	
 
#celery.send.task.error.emails = true
 
#celery.amqp.task.result.expires = 18000
 

	
 
celeryd.concurrency = 2
 
#celeryd.log.file = celeryd.log
 
celeryd.log.level = debug
 
celeryd.max.tasks.per.child = 1
 

	
 
#tasks will never be sent to the queue, but executed locally instead.
 
celery.always.eager = false
 

	
 
####################################
 
###         BEAKER CACHE        ####
 
####################################
 
beaker.cache.data_dir=%(here)s/data/cache/data
 
beaker.cache.lock_dir=%(here)s/data/cache/lock
 

	
 
beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
 

	
 
beaker.cache.super_short_term.type=memory
 
beaker.cache.super_short_term.expire=10
 
beaker.cache.super_short_term.key_length = 256
 

	
 
beaker.cache.short_term.type=memory
 
beaker.cache.short_term.expire=60
 
beaker.cache.short_term.key_length = 256
 

	
 
beaker.cache.long_term.type=memory
 
beaker.cache.long_term.expire=36000
 
beaker.cache.long_term.key_length = 256
 

	
 
beaker.cache.sql_cache_short.type=memory
 
beaker.cache.sql_cache_short.expire=10
 
beaker.cache.sql_cache_short.key_length = 256
 

	
 
beaker.cache.sql_cache_med.type=memory
 
beaker.cache.sql_cache_med.expire=360
 
beaker.cache.sql_cache_med.key_length = 256
 

	
 
beaker.cache.sql_cache_long.type=file
 
beaker.cache.sql_cache_long.expire=3600
 
beaker.cache.sql_cache_long.key_length = 256
 

	
 
####################################
 
###       BEAKER SESSION        ####
 
####################################
 
## Type of storage used for the session, current types are 
 
## dbm, file, memcached, database, and memory. 
 
## The storage uses the Container API 
 
## that is also used by the cache system.
 

	
 
## db session ##
 
#beaker.session.type = ext:database
 
#beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
 
#beaker.session.table_name = db_session 
 

	
 
## encrypted cookie client side session, good for many instances ##
 
#beaker.session.type = cookie
 

	
 
## file based cookies (default) ##
 
#beaker.session.type = file
 

	
 

	
 
beaker.session.key = rhodecode
 
## secure cookie requires AES python libraries ##
 
#beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
 
#beaker.session.validate_key = 9712sds2212c--zxc123
 
## sets session as invalid if it haven't been accessed for given amount of time
 
beaker.session.timeout = 2592000
 
beaker.session.httponly = true
 
#beaker.session.cookie_path = /<your-prefix>
 

	
 
## uncomment for https secure cookie ##
 
beaker.session.secure = false
 

	
 
## auto save the session to not to use .save() ##
 
beaker.session.auto = False
 

	
 
## default cookie expiration time in seconds `true` expire at browser close ##
 
#beaker.session.cookie_expires = 3600
 

	
 

	
 
############################
 
## ERROR HANDLING SYSTEMS ##
 
############################
 

	
 
####################
 
### [errormator] ###
 
####################
 

	
 
# Errormator is tailored to work with RhodeCode, see 
 
# http://errormator.com for details how to obtain an account
 
# you must install python package `errormator_client` to make it work
 

	
 
# errormator enabled
 
errormator = true
 

	
 
errormator.server_url = https://api.errormator.com
 
errormator.api_key = YOUR_API_KEY
 

	
 
# TWEAK AMOUNT OF INFO SENT HERE
 

	
 
# enables 404 error logging (default False)
 
errormator.report_404 = false
 

	
 
# time in seconds after request is considered being slow (default 1)
 
errormator.slow_request_time = 1
 

	
 
# record slow requests in application
 
# (needs to be enabled for slow datastore recording and time tracking)
 
errormator.slow_requests = true
 

	
 
# enable hooking to application loggers
 
# errormator.logging = true
 

	
 
# minimum log level for log capture
 
# errormator.logging.level = WARNING
 

	
 
# send logs only from erroneous/slow requests
 
# (saves API quota for intensive logging)
 
errormator.logging_on_error = false
 

	
 
# list of additonal keywords that should be grabbed from environ object 
 
# can be string with comma separated list of words in lowercase
 
# (by default client will always send following info:
 
# 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that 
 
# start with HTTP* this list be extended with additional keywords here
 
errormator.environ_keys_whitelist = 
 

	
 

	
 
# list of keywords that should be blanked from request object 
 
# can be string with comma separated list of words in lowercase
 
# (by default client will always blank keys that contain following words 
 
# 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
 
# this list be extended with additional keywords set here
 
errormator.request_keys_blacklist =
 

	
 

	
 
# list of namespaces that should be ignores when gathering log entries
 
# can be string with comma separated list of namespaces
 
# (by default the client ignores own entries: errormator_client.client)
 
errormator.log_namespace_blacklist =  
 

	
 

	
 
################
 
### [sentry] ###
 
################
 

	
 
# sentry is a alternative open source error aggregator
 
# you must install python packages `sentry` and `raven` to enable 
 

	
 
sentry.dsn = YOUR_DNS
 
sentry.servers =
 
sentry.name =
 
sentry.key =
 
sentry.public_key =
 
sentry.secret_key =
 
sentry.project =
 
sentry.site =
 
sentry.include_paths =
 
sentry.exclude_paths =
 

	
 

	
 
################################################################################
 
## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
 
## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
 
## execute malicious code after an exception is raised.                       ##
 
################################################################################
 
#set debug = false
 

	
 
##################################
 
###       LOGVIEW CONFIG       ###
 
##################################
 
logview.sqlalchemy = #faa
 
logview.pylons.templating = #bfb
 
logview.pylons.util = #eee
 

	
 
#########################################################
 
### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
 
#########################################################
 
#sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
 
sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
 
sqlalchemy.db1.echo = false
 
sqlalchemy.db1.pool_recycle = 3600
 
sqlalchemy.db1.convert_unicode = true
 

	
 
################################
 
### LOGGING CONFIGURATION   ####
 
################################
 
[loggers]
 
keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
 

	
 
[handlers]
 
keys = console, console_sql
 

	
 
[formatters]
 
keys = generic, color_formatter, color_formatter_sql
 

	
 
#############
 
## LOGGERS ##
 
#############
 
[logger_root]
 
level = NOTSET
 
handlers = console
 

	
 
[logger_routes]
 
level = DEBUG
 
handlers = 
 
qualname = routes.middleware
 
# "level = DEBUG" logs the route matched and routing variables.
 
propagate = 1
 

	
 
[logger_beaker]
 
level = DEBUG
 
handlers = 
 
qualname = beaker.container
 
propagate = 1
 

	
 
[logger_templates]
 
level = INFO
 
handlers = 
 
qualname = pylons.templating
 
propagate = 1
 

	
 
[logger_rhodecode]
 
level = DEBUG
 
handlers = 
 
qualname = rhodecode
 
propagate = 1
 

	
 
[logger_sqlalchemy]
 
level = INFO
 
handlers = console_sql
 
qualname = sqlalchemy.engine
 
propagate = 0
 

	
 
[logger_whoosh_indexer]
 
level = DEBUG
 
handlers = 
 
qualname = whoosh_indexer
 
propagate = 1
 

	
 
##############
 
## HANDLERS ##
 
##############
 

	
 
[handler_console]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = DEBUG
 
formatter = color_formatter
 

	
 
[handler_console_sql]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = DEBUG
 
formatter = color_formatter_sql
 

	
 
################
 
## FORMATTERS ##
 
################
 

	
 
[formatter_generic]
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter]
 
class=rhodecode.lib.colored_formatter.ColorFormatter
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter_sql]
 
class=rhodecode.lib.colored_formatter.ColorFormatterSql
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
production.ini
Show inline comments
 
################################################################################
 
################################################################################
 
# RhodeCode - Pylons environment configuration                                 #
 
#                                                                              # 
 
# The %(here)s variable will be replaced with the parent directory of this file#
 
################################################################################
 

	
 
[DEFAULT]
 
debug = true
 
pdebug = false
 
################################################################################
 
## Uncomment and replace with the address which should receive                ## 
 
## any error reports after application crash                                  ##
 
## Additionally those settings will be used by RhodeCode mailing system       ##
 
################################################################################
 
#email_to = admin@localhost
 
#error_email_from = paste_error@localhost
 
#app_email_from = rhodecode-noreply@localhost
 
#error_message =
 
#email_prefix = [RhodeCode]
 

	
 
#smtp_server = mail.server.com
 
#smtp_username = 
 
#smtp_password = 
 
#smtp_port = 
 
#smtp_use_tls = false
 
#smtp_use_ssl = true
 
# Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
 
#smtp_auth = 
 

	
 
[server:main]
 
## PASTE
 
##nr of threads to spawn
 
#threadpool_workers = 5
 

	
 
##max request before thread respawn
 
#threadpool_max_requests = 10
 

	
 
##option to use threads of process
 
#use_threadpool = true
 

	
 
#use = egg:Paste#http
 

	
 
#WAITRESS
 
threads = 5
 
#100GB
 
max_request_body_size = 107374182400
 
use = egg:waitress#main
 

	
 
host = 127.0.0.1
 
port = 8001
 

	
 
[filter:proxy-prefix]
 
# prefix middleware for rc
 
use = egg:PasteDeploy#prefix
 
prefix = /<your-prefix>
 

	
 
[app:main]
 
use = egg:rhodecode
 
#filter-with = proxy-prefix
 
full_stack = true
 
static_files = true
 
# Optional Languages
 
# en, fr, ja, pt_BR, zh_CN, zh_TW, pl
 
lang = en
 
cache_dir = %(here)s/data
 
index_dir = %(here)s/data/index
 
app_instance_uuid = rc-production
 
cut_off_limit = 256000
 
vcs_full_cache = True
 
# force https in RhodeCode, fixes https redirects, assumes it's always https
 
force_https = false
 
# use Strict-Transport-Security headers
 
use_htsts = false
 
commit_parse_limit = 50
 
# number of items displayed in lightweight dashboard before paginating
 
dashboard_items = 100
 
use_gravatar = true
 

	
 
# path to git executable
 
git_path = git
 

	
 
## RSS feed options
 

	
 
rss_cut_off_limit = 256000
 
rss_items_per_page = 10
 
rss_include_diff = false
 

	
 

	
 
## alternative_gravatar_url allows you to use your own avatar server application
 
## the following parts of the URL will be replaced
 
## {email}        user email
 
## {md5email}     md5 hash of the user email (like at gravatar.com)
 
## {size}         size of the image that is expected from the server application
 
## {scheme}       http/https from RhodeCode server
 
## {netloc}       network location from RhodeCode server
 
#alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
 
#alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
 

	
 
container_auth_enabled = false
 
proxypass_auth_enabled = false
 
## default encoding used to convert from and to unicode
 
## can be also a comma seperated list of encoding in case of mixed encodings
 
default_encoding = utf8
 

	
 
## overwrite schema of clone url
 
## available vars:
 
## scheme - http/https
 
## user - current user
 
## pass - password 
 
## netloc - network location
 
## path - usually repo_name
 

	
 
#clone_uri = {scheme}://{user}{pass}{netloc}{path}
 

	
 
## issue tracking mapping for commits messages
 
## comment out issue_pat, issue_server, issue_prefix to enable
 

	
 
## pattern to get the issues from commit messages
 
## default one used here is #<numbers> with a regex passive group for `#`
 
## {id} will be all groups matched from this pattern
 

	
 
issue_pat = (?:\s*#)(\d+)
 

	
 
## server url to the issue, each {id} will be replaced with match
 
## fetched from the regex and {repo} is replaced with full repository name
 
## including groups {repo_name} is replaced with just name of repo
 

	
 
issue_server_link = https://myissueserver.com/{repo}/issue/{id}
 

	
 
## prefix to add to link to indicate it's an url
 
## #314 will be replaced by <issue_prefix><id>
 

	
 
issue_prefix = #
 

	
 
## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
 
## multiple patterns, to other issues server, wiki or others
 
## below an example how to create a wiki pattern 
 
#  #wiki-some-id -> https://mywiki.com/some-id
 

	
 
#issue_pat_wiki = (?:wiki-)(.+)
 
#issue_server_link_wiki = https://mywiki.com/{id}
 
#issue_prefix_wiki = WIKI-
 

	
 

	
 
## instance-id prefix
 
## a prefix key for this instance used for cache invalidation when running 
 
## multiple instances of rhodecode, make sure it's globally unique for 
 
## all running rhodecode instances. Leave empty if you don't use it
 
instance_id = 
 

	
 
## alternative return HTTP header for failed authentication. Default HTTP
 
## response is 401 HTTPUnauthorized. Currently HG clients have troubles with 
 
## handling that. Set this variable to 403 to return HTTPForbidden
 
auth_ret_code =
 

	
 
####################################
 
###        CELERY CONFIG        ####
 
####################################
 
use_celery = false
 
broker.host = localhost
 
broker.vhost = rabbitmqhost
 
broker.port = 5672
 
broker.user = rabbitmq
 
broker.password = qweqwe
 

	
 
celery.imports = rhodecode.lib.celerylib.tasks
 

	
 
celery.result.backend = amqp
 
celery.result.dburi = amqp://
 
celery.result.serialier = json
 

	
 
#celery.send.task.error.emails = true
 
#celery.amqp.task.result.expires = 18000
 

	
 
celeryd.concurrency = 2
 
#celeryd.log.file = celeryd.log
 
celeryd.log.level = debug
 
celeryd.max.tasks.per.child = 1
 

	
 
#tasks will never be sent to the queue, but executed locally instead.
 
celery.always.eager = false
 

	
 
####################################
 
###         BEAKER CACHE        ####
 
####################################
 
beaker.cache.data_dir=%(here)s/data/cache/data
 
beaker.cache.lock_dir=%(here)s/data/cache/lock
 

	
 
beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
 

	
 
beaker.cache.super_short_term.type=memory
 
beaker.cache.super_short_term.expire=10
 
beaker.cache.super_short_term.key_length = 256
 

	
 
beaker.cache.short_term.type=memory
 
beaker.cache.short_term.expire=60
 
beaker.cache.short_term.key_length = 256
 

	
 
beaker.cache.long_term.type=memory
 
beaker.cache.long_term.expire=36000
 
beaker.cache.long_term.key_length = 256
 

	
 
beaker.cache.sql_cache_short.type=memory
 
beaker.cache.sql_cache_short.expire=10
 
beaker.cache.sql_cache_short.key_length = 256
 

	
 
beaker.cache.sql_cache_med.type=memory
 
beaker.cache.sql_cache_med.expire=360
 
beaker.cache.sql_cache_med.key_length = 256
 

	
 
beaker.cache.sql_cache_long.type=file
 
beaker.cache.sql_cache_long.expire=3600
 
beaker.cache.sql_cache_long.key_length = 256
 

	
 
####################################
 
###       BEAKER SESSION        ####
 
####################################
 
## Type of storage used for the session, current types are 
 
## dbm, file, memcached, database, and memory. 
 
## The storage uses the Container API 
 
## that is also used by the cache system.
 

	
 
## db session ##
 
#beaker.session.type = ext:database
 
#beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
 
#beaker.session.table_name = db_session 
 

	
 
## encrypted cookie client side session, good for many instances ##
 
#beaker.session.type = cookie
 

	
 
## file based cookies (default) ##
 
#beaker.session.type = file
 

	
 

	
 
beaker.session.key = rhodecode
 
## secure cookie requires AES python libraries ##
 
#beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
 
#beaker.session.validate_key = 9712sds2212c--zxc123
 
## sets session as invalid if it haven't been accessed for given amount of time
 
beaker.session.timeout = 2592000
 
beaker.session.httponly = true
 
#beaker.session.cookie_path = /<your-prefix>
 

	
 
## uncomment for https secure cookie ##
 
beaker.session.secure = false
 

	
 
## auto save the session to not to use .save() ##
 
beaker.session.auto = False
 

	
 
## default cookie expiration time in seconds `true` expire at browser close ##
 
#beaker.session.cookie_expires = 3600
 

	
 

	
 
############################
 
## ERROR HANDLING SYSTEMS ##
 
############################
 

	
 
####################
 
### [errormator] ###
 
####################
 

	
 
# Errormator is tailored to work with RhodeCode, see 
 
# http://errormator.com for details how to obtain an account
 
# you must install python package `errormator_client` to make it work
 

	
 
# errormator enabled
 
errormator = true
 

	
 
errormator.server_url = https://api.errormator.com
 
errormator.api_key = YOUR_API_KEY
 

	
 
# TWEAK AMOUNT OF INFO SENT HERE
 

	
 
# enables 404 error logging (default False)
 
errormator.report_404 = false
 

	
 
# time in seconds after request is considered being slow (default 1)
 
errormator.slow_request_time = 1
 

	
 
# record slow requests in application
 
# (needs to be enabled for slow datastore recording and time tracking)
 
errormator.slow_requests = true
 

	
 
# enable hooking to application loggers
 
# errormator.logging = true
 

	
 
# minimum log level for log capture
 
# errormator.logging.level = WARNING
 

	
 
# send logs only from erroneous/slow requests
 
# (saves API quota for intensive logging)
 
errormator.logging_on_error = false
 

	
 
# list of additonal keywords that should be grabbed from environ object 
 
# can be string with comma separated list of words in lowercase
 
# (by default client will always send following info:
 
# 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that 
 
# start with HTTP* this list be extended with additional keywords here
 
errormator.environ_keys_whitelist = 
 

	
 

	
 
# list of keywords that should be blanked from request object 
 
# can be string with comma separated list of words in lowercase
 
# (by default client will always blank keys that contain following words 
 
# 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
 
# this list be extended with additional keywords set here
 
errormator.request_keys_blacklist =
 

	
 

	
 
# list of namespaces that should be ignores when gathering log entries
 
# can be string with comma separated list of namespaces
 
# (by default the client ignores own entries: errormator_client.client)
 
errormator.log_namespace_blacklist =  
 

	
 

	
 
################
 
### [sentry] ###
 
################
 

	
 
# sentry is a alternative open source error aggregator
 
# you must install python packages `sentry` and `raven` to enable 
 

	
 
sentry.dsn = YOUR_DNS
 
sentry.servers =
 
sentry.name =
 
sentry.key =
 
sentry.public_key =
 
sentry.secret_key =
 
sentry.project =
 
sentry.site =
 
sentry.include_paths =
 
sentry.exclude_paths =
 

	
 

	
 
################################################################################
 
## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
 
## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
 
## execute malicious code after an exception is raised.                       ##
 
################################################################################
 
set debug = false
 

	
 
##################################
 
###       LOGVIEW CONFIG       ###
 
##################################
 
logview.sqlalchemy = #faa
 
logview.pylons.templating = #bfb
 
logview.pylons.util = #eee
 

	
 
#########################################################
 
### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
 
#########################################################
 
#sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
 
sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode
 
sqlalchemy.db1.echo = false
 
sqlalchemy.db1.pool_recycle = 3600
 
sqlalchemy.db1.convert_unicode = true
 

	
 
################################
 
### LOGGING CONFIGURATION   ####
 
################################
 
[loggers]
 
keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
 

	
 
[handlers]
 
keys = console, console_sql
 

	
 
[formatters]
 
keys = generic, color_formatter, color_formatter_sql
 

	
 
#############
 
## LOGGERS ##
 
#############
 
[logger_root]
 
level = NOTSET
 
handlers = console
 

	
 
[logger_routes]
 
level = DEBUG
 
handlers = 
 
qualname = routes.middleware
 
# "level = DEBUG" logs the route matched and routing variables.
 
propagate = 1
 

	
 
[logger_beaker]
 
level = DEBUG
 
handlers = 
 
qualname = beaker.container
 
propagate = 1
 

	
 
[logger_templates]
 
level = INFO
 
handlers = 
 
qualname = pylons.templating
 
propagate = 1
 

	
 
[logger_rhodecode]
 
level = DEBUG
 
handlers = 
 
qualname = rhodecode
 
propagate = 1
 

	
 
[logger_sqlalchemy]
 
level = INFO
 
handlers = console_sql
 
qualname = sqlalchemy.engine
 
propagate = 0
 

	
 
[logger_whoosh_indexer]
 
level = DEBUG
 
handlers = 
 
qualname = whoosh_indexer
 
propagate = 1
 

	
 
##############
 
## HANDLERS ##
 
##############
 

	
 
[handler_console]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = INFO
 
formatter = generic
 

	
 
[handler_console_sql]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = WARN
 
formatter = generic
 

	
 
################
 
## FORMATTERS ##
 
################
 

	
 
[formatter_generic]
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter]
 
class=rhodecode.lib.colored_formatter.ColorFormatter
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter_sql]
 
class=rhodecode.lib.colored_formatter.ColorFormatterSql
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
rhodecode/config/deployment.ini_tmpl
Show inline comments
 
################################################################################
 
################################################################################
 
# RhodeCode - Pylons environment configuration                                 #
 
#                                                                              # 
 
# The %(here)s variable will be replaced with the parent directory of this file#
 
################################################################################
 

	
 
[DEFAULT]
 
debug = true
 
pdebug = false
 
################################################################################
 
## Uncomment and replace with the address which should receive                ## 
 
## any error reports after application crash                                  ##
 
## Additionally those settings will be used by RhodeCode mailing system       ##
 
################################################################################
 
#email_to = admin@localhost
 
#error_email_from = paste_error@localhost
 
#app_email_from = rhodecode-noreply@localhost
 
#error_message =
 
#email_prefix = [RhodeCode]
 

	
 
#smtp_server = mail.server.com
 
#smtp_username = 
 
#smtp_password = 
 
#smtp_port = 
 
#smtp_use_tls = false
 
#smtp_use_ssl = true
 
# Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
 
#smtp_auth = 
 

	
 
[server:main]
 
## PASTE
 
##nr of threads to spawn
 
#threadpool_workers = 5
 

	
 
##max request before thread respawn
 
#threadpool_max_requests = 10
 

	
 
##option to use threads of process
 
#use_threadpool = true
 

	
 
#use = egg:Paste#http
 

	
 
#WAITRESS
 
threads = 5
 
#100GB
 
max_request_body_size = 107374182400
 
use = egg:waitress#main
 

	
 
host = 127.0.0.1
 
port = 5000
 

	
 
[filter:proxy-prefix]
 
# prefix middleware for rc
 
use = egg:PasteDeploy#prefix
 
prefix = /<your-prefix>
 

	
 
[app:main]
 
use = egg:rhodecode
 
#filter-with = proxy-prefix
 
full_stack = true
 
static_files = true
 
# Optional Languages
 
# en, fr, ja, pt_BR, zh_CN, zh_TW, pl
 
lang = en
 
cache_dir = %(here)s/data
 
index_dir = %(here)s/data/index
 
app_instance_uuid = ${app_instance_uuid}
 
cut_off_limit = 256000
 
vcs_full_cache = True
 
# force https in RhodeCode, fixes https redirects, assumes it's always https
 
force_https = false
 
# use Strict-Transport-Security headers
 
use_htsts = false
 
commit_parse_limit = 50
 
# number of items displayed in lightweight dashboard before paginating
 
dashboard_items = 100
 
use_gravatar = true
 

	
 
# path to git executable
 
git_path = git
 

	
 
## RSS feed options
 

	
 
rss_cut_off_limit = 256000
 
rss_items_per_page = 10
 
rss_include_diff = false
 

	
 

	
 
## alternative_gravatar_url allows you to use your own avatar server application
 
## the following parts of the URL will be replaced
 
## {email}        user email
 
## {md5email}     md5 hash of the user email (like at gravatar.com)
 
## {size}         size of the image that is expected from the server application
 
## {scheme}       http/https from RhodeCode server
 
## {netloc}       network location from RhodeCode server
 
#alternative_gravatar_url = http://myavatarserver.com/getbyemail/{email}/{size}
 
#alternative_gravatar_url = http://myavatarserver.com/getbymd5/{md5email}?s={size}
 

	
 
container_auth_enabled = false
 
proxypass_auth_enabled = false
 
## default encoding used to convert from and to unicode
 
## can be also a comma seperated list of encoding in case of mixed encodings
 
default_encoding = utf8
 

	
 
## overwrite schema of clone url
 
## available vars:
 
## scheme - http/https
 
## user - current user
 
## pass - password 
 
## netloc - network location
 
## path - usually repo_name
 

	
 
#clone_uri = {scheme}://{user}{pass}{netloc}{path}
 

	
 
## issue tracking mapping for commits messages
 
## comment out issue_pat, issue_server, issue_prefix to enable
 

	
 
## pattern to get the issues from commit messages
 
## default one used here is #<numbers> with a regex passive group for `#`
 
## {id} will be all groups matched from this pattern
 

	
 
issue_pat = (?:\s*#)(\d+)
 

	
 
## server url to the issue, each {id} will be replaced with match
 
## fetched from the regex and {repo} is replaced with full repository name
 
## including groups {repo_name} is replaced with just name of repo
 

	
 
issue_server_link = https://myissueserver.com/{repo}/issue/{id}
 

	
 
## prefix to add to link to indicate it's an url
 
## #314 will be replaced by <issue_prefix><id>
 

	
 
issue_prefix = #
 

	
 
## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
 
## multiple patterns, to other issues server, wiki or others
 
## below an example how to create a wiki pattern 
 
#  #wiki-some-id -> https://mywiki.com/some-id
 

	
 
#issue_pat_wiki = (?:wiki-)(.+)
 
#issue_server_link_wiki = https://mywiki.com/{id}
 
#issue_prefix_wiki = WIKI-
 

	
 

	
 
## instance-id prefix
 
## a prefix key for this instance used for cache invalidation when running 
 
## multiple instances of rhodecode, make sure it's globally unique for 
 
## all running rhodecode instances. Leave empty if you don't use it
 
instance_id = 
 

	
 
## alternative return HTTP header for failed authentication. Default HTTP
 
## response is 401 HTTPUnauthorized. Currently HG clients have troubles with 
 
## handling that. Set this variable to 403 to return HTTPForbidden
 
auth_ret_code =
 

	
 
####################################
 
###        CELERY CONFIG        ####
 
####################################
 
use_celery = false
 
broker.host = localhost
 
broker.vhost = rabbitmqhost
 
broker.port = 5672
 
broker.user = rabbitmq
 
broker.password = qweqwe
 

	
 
celery.imports = rhodecode.lib.celerylib.tasks
 

	
 
celery.result.backend = amqp
 
celery.result.dburi = amqp://
 
celery.result.serialier = json
 

	
 
#celery.send.task.error.emails = true
 
#celery.amqp.task.result.expires = 18000
 

	
 
celeryd.concurrency = 2
 
#celeryd.log.file = celeryd.log
 
celeryd.log.level = debug
 
celeryd.max.tasks.per.child = 1
 

	
 
#tasks will never be sent to the queue, but executed locally instead.
 
celery.always.eager = false
 

	
 
####################################
 
###         BEAKER CACHE        ####
 
####################################
 
beaker.cache.data_dir=%(here)s/data/cache/data
 
beaker.cache.lock_dir=%(here)s/data/cache/lock
 

	
 
beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
 

	
 
beaker.cache.super_short_term.type=memory
 
beaker.cache.super_short_term.expire=10
 
beaker.cache.super_short_term.key_length = 256
 

	
 
beaker.cache.short_term.type=memory
 
beaker.cache.short_term.expire=60
 
beaker.cache.short_term.key_length = 256
 

	
 
beaker.cache.long_term.type=memory
 
beaker.cache.long_term.expire=36000
 
beaker.cache.long_term.key_length = 256
 

	
 
beaker.cache.sql_cache_short.type=memory
 
beaker.cache.sql_cache_short.expire=10
 
beaker.cache.sql_cache_short.key_length = 256
 

	
 
beaker.cache.sql_cache_med.type=memory
 
beaker.cache.sql_cache_med.expire=360
 
beaker.cache.sql_cache_med.key_length = 256
 

	
 
beaker.cache.sql_cache_long.type=file
 
beaker.cache.sql_cache_long.expire=3600
 
beaker.cache.sql_cache_long.key_length = 256
 

	
 
####################################
 
###       BEAKER SESSION        ####
 
####################################
 
## Type of storage used for the session, current types are 
 
## dbm, file, memcached, database, and memory. 
 
## The storage uses the Container API 
 
## that is also used by the cache system.
 

	
 
## db session ##
 
#beaker.session.type = ext:database
 
#beaker.session.sa.url = postgresql://postgres:qwe@localhost/rhodecode
 
#beaker.session.table_name = db_session 
 

	
 
## encrypted cookie client side session, good for many instances ##
 
#beaker.session.type = cookie
 

	
 
## file based cookies (default) ##
 
#beaker.session.type = file
 

	
 

	
 
beaker.session.key = rhodecode
 
## secure cookie requires AES python libraries ##
 
#beaker.session.encrypt_key = g654dcno0-9873jhgfreyu
 
#beaker.session.validate_key = 9712sds2212c--zxc123
 
## sets session as invalid if it haven't been accessed for given amount of time
 
beaker.session.timeout = 2592000
 
beaker.session.httponly = true
 
#beaker.session.cookie_path = /<your-prefix>
 

	
 
## uncomment for https secure cookie ##
 
beaker.session.secure = false
 

	
 
## auto save the session to not to use .save() ##
 
beaker.session.auto = False
 

	
 
## default cookie expiration time in seconds `true` expire at browser close ##
 
#beaker.session.cookie_expires = 3600
 

	
 

	
 
############################
 
## ERROR HANDLING SYSTEMS ##
 
############################
 

	
 
####################
 
### [errormator] ###
 
####################
 

	
 
# Errormator is tailored to work with RhodeCode, see 
 
# http://errormator.com for details how to obtain an account
 
# you must install python package `errormator_client` to make it work
 

	
 
# errormator enabled
 
errormator = true
 

	
 
errormator.server_url = https://api.errormator.com
 
errormator.api_key = YOUR_API_KEY
 

	
 
# TWEAK AMOUNT OF INFO SENT HERE
 

	
 
# enables 404 error logging (default False)
 
errormator.report_404 = false
 

	
 
# time in seconds after request is considered being slow (default 1)
 
errormator.slow_request_time = 1
 

	
 
# record slow requests in application
 
# (needs to be enabled for slow datastore recording and time tracking)
 
errormator.slow_requests = true
 

	
 
# enable hooking to application loggers
 
# errormator.logging = true
 

	
 
# minimum log level for log capture
 
# errormator.logging.level = WARNING
 

	
 
# send logs only from erroneous/slow requests
 
# (saves API quota for intensive logging)
 
errormator.logging_on_error = false
 

	
 
# list of additonal keywords that should be grabbed from environ object 
 
# can be string with comma separated list of words in lowercase
 
# (by default client will always send following info:
 
# 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that 
 
# start with HTTP* this list be extended with additional keywords here
 
errormator.environ_keys_whitelist = 
 

	
 

	
 
# list of keywords that should be blanked from request object 
 
# can be string with comma separated list of words in lowercase
 
# (by default client will always blank keys that contain following words 
 
# 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
 
# this list be extended with additional keywords set here
 
errormator.request_keys_blacklist =
 

	
 

	
 
# list of namespaces that should be ignores when gathering log entries
 
# can be string with comma separated list of namespaces
 
# (by default the client ignores own entries: errormator_client.client)
 
errormator.log_namespace_blacklist =  
 

	
 

	
 
################
 
### [sentry] ###
 
################
 

	
 
# sentry is a alternative open source error aggregator
 
# you must install python packages `sentry` and `raven` to enable 
 

	
 
sentry.dsn = YOUR_DNS
 
sentry.servers =
 
sentry.name =
 
sentry.key =
 
sentry.public_key =
 
sentry.secret_key =
 
sentry.project =
 
sentry.site =
 
sentry.include_paths =
 
sentry.exclude_paths =
 

	
 

	
 
################################################################################
 
## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
 
## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
 
## execute malicious code after an exception is raised.                       ##
 
################################################################################
 
set debug = false
 

	
 
##################################
 
###       LOGVIEW CONFIG       ###
 
##################################
 
logview.sqlalchemy = #faa
 
logview.pylons.templating = #bfb
 
logview.pylons.util = #eee
 

	
 
#########################################################
 
### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
 
#########################################################
 

	
 
# SQLITE [default]
 
sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
 
 
 
# POSTGRESQL
 
# sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode
 

	
 
# MySQL
 
# sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode
 

	
 
# see sqlalchemy docs for others
 

	
 
sqlalchemy.db1.echo = false
 
sqlalchemy.db1.pool_recycle = 3600
 
sqlalchemy.db1.convert_unicode = true
 

	
 
################################
 
### LOGGING CONFIGURATION   ####
 
################################
 
[loggers]
 
keys = root, routes, rhodecode, sqlalchemy, beaker, templates, whoosh_indexer
 

	
 
[handlers]
 
keys = console, console_sql
 

	
 
[formatters]
 
keys = generic, color_formatter, color_formatter_sql
 

	
 
#############
 
## LOGGERS ##
 
#############
 
[logger_root]
 
level = NOTSET
 
handlers = console
 

	
 
[logger_routes]
 
level = DEBUG
 
handlers = 
 
qualname = routes.middleware
 
# "level = DEBUG" logs the route matched and routing variables.
 
propagate = 1
 

	
 
[logger_beaker]
 
level = DEBUG
 
handlers = 
 
qualname = beaker.container
 
propagate = 1
 

	
 
[logger_templates]
 
level = INFO
 
handlers = 
 
qualname = pylons.templating
 
propagate = 1
 

	
 
[logger_rhodecode]
 
level = DEBUG
 
handlers = 
 
qualname = rhodecode
 
propagate = 1
 

	
 
[logger_sqlalchemy]
 
level = INFO
 
handlers = console_sql
 
qualname = sqlalchemy.engine
 
propagate = 0
 

	
 
[logger_whoosh_indexer]
 
level = DEBUG
 
handlers = 
 
qualname = whoosh_indexer
 
propagate = 1
 

	
 
##############
 
## HANDLERS ##
 
##############
 

	
 
[handler_console]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = INFO
 
formatter = generic
 

	
 
[handler_console_sql]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = WARN
 
formatter = generic
 

	
 
################
 
## FORMATTERS ##
 
################
 

	
 
[formatter_generic]
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter]
 
class=rhodecode.lib.colored_formatter.ColorFormatter
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter_sql]
 
class=rhodecode.lib.colored_formatter.ColorFormatterSql
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
rhodecode/lib/middleware/pygrack.py
Show inline comments
 
import os
 
import socket
 
import logging
 
import subprocess
 
import traceback
 

	
 
from webob import Request, Response, exc
 

	
 
import rhodecode
 
from rhodecode.lib import subprocessio
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FileWrapper(object):
 

	
 
    def __init__(self, fd, content_length):
 
        self.fd = fd
 
        self.content_length = content_length
 
        self.remain = content_length
 

	
 
    def read(self, size):
 
        if size <= self.remain:
 
            try:
 
                data = self.fd.read(size)
 
            except socket.error:
 
                raise IOError(self)
 
            self.remain -= size
 
        elif self.remain:
 
            data = self.fd.read(self.remain)
 
            self.remain = 0
 
        else:
 
            data = None
 
        return data
 

	
 
    def __repr__(self):
 
        return '<FileWrapper %s len: %s, read: %s>' % (
 
            self.fd, self.content_length, self.content_length - self.remain
 
        )
 

	
 

	
 
class GitRepository(object):
 
    git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
 
    commands = ['git-upload-pack', 'git-receive-pack']
 

	
 
    def __init__(self, repo_name, content_path, extras):
 
        files = set([f.lower() for f in os.listdir(content_path)])
 
        if  not (self.git_folder_signature.intersection(files)
 
                == self.git_folder_signature):
 
            raise OSError('%s missing git signature' % content_path)
 
        self.content_path = content_path
 
        self.valid_accepts = ['application/x-%s-result' %
 
                              c for c in self.commands]
 
        self.repo_name = repo_name
 
        self.extras = extras
 

	
 
    def _get_fixedpath(self, path):
 
        """
 
        Small fix for repo_path
 

	
 
        :param path:
 
        :type path:
 
        """
 
        return path.split(self.repo_name, 1)[-1].strip('/')
 

	
 
    def inforefs(self, request, environ):
 
        """
 
        WSGI Response producer for HTTP GET Git Smart
 
        HTTP /info/refs request.
 
        """
 

	
 
        git_command = request.GET.get('service')
 
        if git_command not in self.commands:
 
            log.debug('command %s not allowed' % git_command)
 
            return exc.HTTPMethodNotAllowed()
 

	
 
        # note to self:
 
        # please, resist the urge to add '\n' to git capture and increment
 
        # line count by 1.
 
        # The code in Git client not only does NOT need '\n', but actually
 
        # blows up if you sprinkle "flush" (0000) as "0001\n".
 
        # It reads binary, per number of bytes specified.
 
        # if you do add '\n' as part of data, count it.
 
        server_advert = '# service=%s' % git_command
 
        packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
 
        _git_path = rhodecode.CONFIG.get('git_path', 'git')
 
        try:
 
            out = subprocessio.SubprocessIOChunker(
 
                r'git %s --stateless-rpc --advertise-refs "%s"' % (
 
                                git_command[4:], self.content_path),
 
                r'%s %s --stateless-rpc --advertise-refs "%s"' % (
 
                            _git_path, git_command[4:], self.content_path),
 
                starting_values=[
 
                    packet_len + server_advert + '0000'
 
                ]
 
            )
 
        except EnvironmentError, e:
 
            log.error(traceback.format_exc())
 
            raise exc.HTTPExpectationFailed()
 
        resp = Response()
 
        resp.content_type = 'application/x-%s-advertisement' % str(git_command)
 
        resp.charset = None
 
        resp.app_iter = out
 
        return resp
 

	
 
    def backend(self, request, environ):
 
        """
 
        WSGI Response producer for HTTP POST Git Smart HTTP requests.
 
        Reads commands and data from HTTP POST's body.
 
        returns an iterator obj with contents of git command's
 
        response to stdout
 
        """
 
        git_command = self._get_fixedpath(request.path_info)
 
        if git_command not in self.commands:
 
            log.debug('command %s not allowed' % git_command)
 
            return exc.HTTPMethodNotAllowed()
 

	
 
        if 'CONTENT_LENGTH' in environ:
 
            inputstream = FileWrapper(environ['wsgi.input'],
 
                                      request.content_length)
 
        else:
 
            inputstream = environ['wsgi.input']
 

	
 
        try:
 
            gitenv = os.environ
 
            from rhodecode.lib.compat import json
 
            gitenv['RHODECODE_EXTRAS'] = json.dumps(self.extras)
 
            # forget all configs
 
            gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
 
            opts = dict(
 
                env=gitenv,
 
                cwd=os.getcwd()
 
            )
 
            cmd = r'git %s --stateless-rpc "%s"' % (git_command[4:],
 
                                                    self.content_path),
 
            log.debug('handling cmd %s' % cmd)
 
            out = subprocessio.SubprocessIOChunker(
 
                cmd,
 
                inputstream=inputstream,
 
                **opts
 
            )
 
        except EnvironmentError, e:
 
            log.error(traceback.format_exc())
 
            raise exc.HTTPExpectationFailed()
 

	
 
        if git_command in [u'git-receive-pack']:
 
            # updating refs manually after each push.
 
            # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
 
            cmd = (u'git --git-dir "%s" '
 
                    'update-server-info' % self.content_path)
 
            _git_path = rhodecode.CONFIG.get('git_path', 'git')
 
            cmd = (u'%s --git-dir "%s" '
 
                    'update-server-info' % (_git_path, self.content_path))
 
            log.debug('handling cmd %s' % cmd)
 
            subprocess.call(cmd, shell=True)
 

	
 
        resp = Response()
 
        resp.content_type = 'application/x-%s-result' % git_command.encode('utf8')
 
        resp.charset = None
 
        resp.app_iter = out
 
        return resp
 

	
 
    def __call__(self, environ, start_response):
 
        request = Request(environ)
 
        _path = self._get_fixedpath(request.path_info)
 
        if _path.startswith('info/refs'):
 
            app = self.inforefs
 
        elif [a for a in self.valid_accepts if a in request.accept]:
 
            app = self.backend
 
        try:
 
            resp = app(request, environ)
 
        except exc.HTTPException, e:
 
            resp = e
 
            log.error(traceback.format_exc())
 
        except Exception, e:
 
            log.error(traceback.format_exc())
 
            resp = exc.HTTPInternalServerError()
 
        return resp(environ, start_response)
 

	
 

	
 
class GitDirectory(object):
 

	
 
    def __init__(self, repo_root, repo_name, extras):
 
        repo_location = os.path.join(repo_root, repo_name)
 
        if not os.path.isdir(repo_location):
 
            raise OSError(repo_location)
 

	
 
        self.content_path = repo_location
 
        self.repo_name = repo_name
 
        self.repo_location = repo_location
 
        self.extras = extras
 

	
 
    def __call__(self, environ, start_response):
 
        content_path = self.content_path
 
        try:
 
            app = GitRepository(self.repo_name, content_path, self.extras)
 
        except (AssertionError, OSError):
 
            content_path = os.path.join(content_path, '.git')
 
            if os.path.isdir(content_path):
 
                app = GitRepository(self.repo_name, content_path, self.extras)
 
            else:
 
                return exc.HTTPNotFound()(environ, start_response)
 
        return app(environ, start_response)
 

	
 

	
 
def make_wsgi_app(repo_name, repo_root, extras):
 
    return GitDirectory(repo_root, repo_name, extras)
rhodecode/lib/utils.py
Show inline comments
 
@@ -363,439 +363,438 @@ def invalidate_cache(cache_key, *args):
 
    Puts cache invalidation task into db for
 
    further global cache invalidation
 
    """
 

	
 
    from rhodecode.model.scm import ScmModel
 

	
 
    if cache_key.startswith('get_repo_cached_'):
 
        name = cache_key.split('get_repo_cached_')[-1]
 
        ScmModel().mark_for_invalidation(name)
 

	
 

	
 
def map_groups(path):
 
    """
 
    Given a full path to a repository, create all nested groups that this
 
    repo is inside. This function creates parent-child relationships between
 
    groups and creates default perms for all new groups.
 

	
 
    :param paths: full path to repository
 
    """
 
    sa = meta.Session()
 
    groups = path.split(Repository.url_sep())
 
    parent = None
 
    group = None
 

	
 
    # last element is repo in nested groups structure
 
    groups = groups[:-1]
 
    rgm = ReposGroupModel(sa)
 
    for lvl, group_name in enumerate(groups):
 
        group_name = '/'.join(groups[:lvl] + [group_name])
 
        group = RepoGroup.get_by_group_name(group_name)
 
        desc = '%s group' % group_name
 

	
 
        # skip folders that are now removed repos
 
        if REMOVED_REPO_PAT.match(group_name):
 
            break
 

	
 
        if group is None:
 
            log.debug('creating group level: %s group_name: %s' % (lvl,
 
                                                                   group_name))
 
            group = RepoGroup(group_name, parent)
 
            group.group_description = desc
 
            sa.add(group)
 
            rgm._create_default_perms(group)
 
            sa.flush()
 
        parent = group
 
    return group
 

	
 

	
 
def repo2db_mapper(initial_repo_list, remove_obsolete=False,
 
                   install_git_hook=False):
 
    """
 
    maps all repos given in initial_repo_list, non existing repositories
 
    are created, if remove_obsolete is True it also check for db entries
 
    that are not in initial_repo_list and removes them.
 

	
 
    :param initial_repo_list: list of repositories found by scanning methods
 
    :param remove_obsolete: check for obsolete entries in database
 
    :param install_git_hook: if this is True, also check and install githook
 
        for a repo if missing
 
    """
 
    from rhodecode.model.repo import RepoModel
 
    from rhodecode.model.scm import ScmModel
 
    sa = meta.Session()
 
    rm = RepoModel()
 
    user = sa.query(User).filter(User.admin == True).first()
 
    if user is None:
 
        raise Exception('Missing administrative account!')
 
    added = []
 

	
 
#    # clear cache keys
 
#    log.debug("Clearing cache keys now...")
 
#    CacheInvalidation.clear_cache()
 
#    sa.commit()
 

	
 
    ##creation defaults
 
    defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_locking = defs.get('repo_enable_locking')
 
    enable_downloads = defs.get('repo_enable_downloads')
 
    private = defs.get('repo_private')
 

	
 
    for name, repo in initial_repo_list.items():
 
        group = map_groups(name)
 
        db_repo = rm.get_by_repo_name(name)
 
        # found repo that is on filesystem not in RhodeCode database
 
        if not db_repo:
 
            log.info('repository %s not found, creating now' % name)
 
            added.append(name)
 
            desc = (repo.description
 
                    if repo.description != 'unknown'
 
                    else '%s repository' % name)
 

	
 
            new_repo = rm.create_repo(
 
                repo_name=name,
 
                repo_type=repo.alias,
 
                description=desc,
 
                repos_group=getattr(group, 'group_id', None),
 
                owner=user,
 
                just_db=True,
 
                enable_locking=enable_locking,
 
                enable_downloads=enable_downloads,
 
                enable_statistics=enable_statistics,
 
                private=private
 
            )
 
            # we added that repo just now, and make sure it has githook
 
            # installed
 
            if new_repo.repo_type == 'git':
 
                ScmModel().install_git_hook(new_repo.scm_instance)
 
            new_repo.update_changeset_cache()
 
        elif install_git_hook:
 
            if db_repo.repo_type == 'git':
 
                ScmModel().install_git_hook(db_repo.scm_instance)
 
        # during starting install all cache keys for all repositories in the
 
        # system, this will register all repos and multiple instances
 
        key, _prefix, _org_key = CacheInvalidation._get_key(name)
 
        CacheInvalidation.invalidate(name)
 
        log.debug("Creating a cache key for %s, instance_id %s"
 
                  % (name, _prefix or 'unknown'))
 

	
 
    sa.commit()
 
    removed = []
 
    if remove_obsolete:
 
        # remove from database those repositories that are not in the filesystem
 
        for repo in sa.query(Repository).all():
 
            if repo.repo_name not in initial_repo_list.keys():
 
                log.debug("Removing non-existing repository found in db `%s`" %
 
                          repo.repo_name)
 
                try:
 
                    sa.delete(repo)
 
                    sa.commit()
 
                    removed.append(repo.repo_name)
 
                except:
 
                    #don't hold further removals on error
 
                    log.error(traceback.format_exc())
 
                    sa.rollback()
 

	
 
    return added, removed
 

	
 

	
 
# set cache regions for beaker so celery can utilise it
 
def add_cache(settings):
 
    cache_settings = {'regions': None}
 
    for key in settings.keys():
 
        for prefix in ['beaker.cache.', 'cache.']:
 
            if key.startswith(prefix):
 
                name = key.split(prefix)[1].strip()
 
                cache_settings[name] = settings[key].strip()
 
    if cache_settings['regions']:
 
        for region in cache_settings['regions'].split(','):
 
            region = region.strip()
 
            region_settings = {}
 
            for key, value in cache_settings.items():
 
                if key.startswith(region):
 
                    region_settings[key.split('.')[1]] = value
 
            region_settings['expire'] = int(region_settings.get('expire',
 
                                                                60))
 
            region_settings.setdefault('lock_dir',
 
                                       cache_settings.get('lock_dir'))
 
            region_settings.setdefault('data_dir',
 
                                       cache_settings.get('data_dir'))
 

	
 
            if 'type' not in region_settings:
 
                region_settings['type'] = cache_settings.get('type',
 
                                                             'memory')
 
            beaker.cache.cache_regions[region] = region_settings
 

	
 

	
 
def load_rcextensions(root_path):
 
    import rhodecode
 
    from rhodecode.config import conf
 

	
 
    path = os.path.join(root_path, 'rcextensions', '__init__.py')
 
    if os.path.isfile(path):
 
        rcext = create_module('rc', path)
 
        EXT = rhodecode.EXTENSIONS = rcext
 
        log.debug('Found rcextensions now loading %s...' % rcext)
 

	
 
        # Additional mappings that are not present in the pygments lexers
 
        conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
 

	
 
        #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
 

	
 
        if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
 
            log.debug('settings custom INDEX_EXTENSIONS')
 
            conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
 

	
 
        #ADDITIONAL MAPPINGS
 
        log.debug('adding extra into INDEX_EXTENSIONS')
 
        conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
 

	
 
        # auto check if the module is not missing any data, set to default if is
 
        # this will help autoupdate new feature of rcext module
 
        from rhodecode.config import rcextensions
 
        for k in dir(rcextensions):
 
            if not k.startswith('_') and not hasattr(EXT, k):
 
                setattr(EXT, k, getattr(rcextensions, k))
 

	
 

	
 
def get_custom_lexer(extension):
 
    """
 
    returns a custom lexer if it's defined in rcextensions module, or None
 
    if there's no custom lexer defined
 
    """
 
    import rhodecode
 
    from pygments import lexers
 
    #check if we didn't define this extension as other lexer
 
    if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
 
        _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
 
        return lexers.get_lexer_by_name(_lexer_name)
 

	
 

	
 
#==============================================================================
 
# TEST FUNCTIONS AND CREATORS
 
#==============================================================================
 
def create_test_index(repo_location, config, full_index):
 
    """
 
    Makes default test index
 

	
 
    :param config: test config
 
    :param full_index:
 
    """
 

	
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    from rhodecode.lib.pidlock import DaemonLock, LockHeld
 

	
 
    repo_location = repo_location
 

	
 
    index_location = os.path.join(config['app_conf']['index_dir'])
 
    if not os.path.exists(index_location):
 
        os.makedirs(index_location)
 

	
 
    try:
 
        l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
 
        WhooshIndexingDaemon(index_location=index_location,
 
                             repo_location=repo_location)\
 
            .run(full_index=full_index)
 
        l.release()
 
    except LockHeld:
 
        pass
 

	
 

	
 
def create_test_env(repos_test_path, config):
 
    """
 
    Makes a fresh database and
 
    install test repository into tmp dir
 
    """
 
    from rhodecode.lib.db_manage import DbManage
 
    from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
 

	
 
    # PART ONE create db
 
    dbconf = config['sqlalchemy.db1.url']
 
    log.debug('making test db %s' % dbconf)
 

	
 
    # create test dir if it doesn't exist
 
    if not os.path.isdir(repos_test_path):
 
        log.debug('Creating testdir %s' % repos_test_path)
 
        os.makedirs(repos_test_path)
 

	
 
    dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
 
                        tests=True)
 
    dbmanage.create_tables(override=True)
 
    dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
 
    dbmanage.create_default_user()
 
    dbmanage.admin_prompt()
 
    dbmanage.create_permissions()
 
    dbmanage.populate_default_permissions()
 
    Session().commit()
 
    # PART TWO make test repo
 
    log.debug('making test vcs repositories')
 

	
 
    idx_path = config['app_conf']['index_dir']
 
    data_path = config['app_conf']['cache_dir']
 

	
 
    #clean index and data
 
    if idx_path and os.path.exists(idx_path):
 
        log.debug('remove %s' % idx_path)
 
        shutil.rmtree(idx_path)
 

	
 
    if data_path and os.path.exists(data_path):
 
        log.debug('remove %s' % data_path)
 
        shutil.rmtree(data_path)
 

	
 
    #CREATE DEFAULT TEST REPOS
 
    cur_dir = dn(dn(abspath(__file__)))
 
    tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
 
    tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
 
    tar.close()
 

	
 
    cur_dir = dn(dn(abspath(__file__)))
 
    tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
 
    tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
 
    tar.close()
 

	
 
    #LOAD VCS test stuff
 
    from rhodecode.tests.vcs import setup_package
 
    setup_package()
 

	
 

	
 
#==============================================================================
 
# PASTER COMMANDS
 
#==============================================================================
 
class BasePasterCommand(Command):
 
    """
 
    Abstract Base Class for paster commands.
 

	
 
    The celery commands are somewhat aggressive about loading
 
    celery.conf, and since our module sets the `CELERY_LOADER`
 
    environment variable to our loader, we have to bootstrap a bit and
 
    make sure we've had a chance to load the pylons config off of the
 
    command line, otherwise everything fails.
 
    """
 
    min_args = 1
 
    min_args_error = "Please provide a paster config file as an argument."
 
    takes_config_file = 1
 
    requires_config_file = True
 

	
 
    def notify_msg(self, msg, log=False):
 
        """Make a notification to user, additionally if logger is passed
 
        it logs this action using given logger
 

	
 
        :param msg: message that will be printed to user
 
        :param log: logging instance, to use to additionally log this message
 

	
 
        """
 
        if log and isinstance(log, logging):
 
            log(msg)
 

	
 
    def run(self, args):
 
        """
 
        Overrides Command.run
 

	
 
        Checks for a config file argument and loads it.
 
        """
 
        if len(args) < self.min_args:
 
            raise BadCommand(
 
                self.min_args_error % {'min_args': self.min_args,
 
                                       'actual_args': len(args)})
 

	
 
        # Decrement because we're going to lob off the first argument.
 
        # @@ This is hacky
 
        self.min_args -= 1
 
        self.bootstrap_config(args[0])
 
        self.update_parser()
 
        return super(BasePasterCommand, self).run(args[1:])
 

	
 
    def update_parser(self):
 
        """
 
        Abstract method.  Allows for the class's parser to be updated
 
        before the superclass's `run` method is called.  Necessary to
 
        allow options/arguments to be passed through to the underlying
 
        celery command.
 
        """
 
        raise NotImplementedError("Abstract Method.")
 

	
 
    def bootstrap_config(self, conf):
 
        """
 
        Loads the pylons configuration.
 
        """
 
        from pylons import config as pylonsconfig
 

	
 
        self.path_to_ini_file = os.path.realpath(conf)
 
        conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
 
        pylonsconfig.init_app(conf.global_conf, conf.local_conf)
 

	
 
    def _init_session(self):
 
        """
 
        Inits SqlAlchemy Session
 
        """
 
        logging.config.fileConfig(self.path_to_ini_file)
 
        from pylons import config
 
        from rhodecode.model import init_model
 
        from rhodecode.lib.utils2 import engine_from_config
 

	
 
        #get to remove repos !!
 
        add_cache(config)
 
        engine = engine_from_config(config, 'sqlalchemy.db1.')
 
        init_model(engine)
 

	
 

	
 
def check_git_version():
 
    """
 
    Checks what version of git is installed in system, and issues a warning
 
    if it's too old for RhodeCode to properly work.
 
    """
 
    import subprocess
 
    from rhodecode import BACKENDS
 
    from rhodecode.lib.vcs.backends.git.repository import GitRepository
 
    from distutils.version import StrictVersion
 
    from rhodecode import BACKENDS
 

	
 
    p = subprocess.Popen('git --version', shell=True,
 
                        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
 
    stdout, stderr = p.communicate()
 
    stdout, stderr = GitRepository._run_git_command('--version')
 

	
 
    ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
 
    if len(ver.split('.')) > 3:
 
        #StrictVersion needs to be only 3 element type
 
        ver = '.'.join(ver.split('.')[:3])
 
    try:
 
        _ver = StrictVersion(ver)
 
    except:
 
        _ver = StrictVersion('0.0.0')
 
        stderr = traceback.format_exc()
 

	
 
    req_ver = '1.7.4'
 
    to_old_git = False
 
    if  _ver < StrictVersion(req_ver):
 
        to_old_git = True
 

	
 
    if 'git' in BACKENDS:
 
        log.debug('GIT version detected: %s' % stdout)
 
        if stderr:
 
            log.warning('Unable to detect git version org error was:%r' % stderr)
 
        elif to_old_git:
 
            log.warning('RhodeCode detected git version %s, which is too old '
 
                        'for the system to function properly. Make sure '
 
                        'its version is at least %s' % (ver, req_ver))
 
    return _ver
 

	
 

	
 
@decorator.decorator
 
def jsonify(func, *args, **kwargs):
 
    """Action decorator that formats output for JSON
 

	
 
    Given a function that will return content, this decorator will turn
 
    the result into JSON, with a content-type of 'application/json' and
 
    output it.
 

	
 
    """
 
    from pylons.decorators.util import get_pylons
 
    from rhodecode.lib.ext_json import json
 
    pylons = get_pylons(args)
 
    pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
 
    data = func(*args, **kwargs)
 
    if isinstance(data, (list, tuple)):
 
        msg = "JSON responses with Array envelopes are susceptible to " \
 
              "cross-site data leak attacks, see " \
 
              "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
 
        warnings.warn(msg, Warning, 2)
 
        log.warning(msg)
 
    log.debug("Returning JSON wrapped action output")
 
    return json.dumps(data, encoding='utf-8')
rhodecode/lib/vcs/backends/git/changeset.py
Show inline comments
 
import re
 
from itertools import chain
 
from dulwich import objects
 
from subprocess import Popen, PIPE
 
import rhodecode
 
from rhodecode.lib.vcs.conf import settings
 
from rhodecode.lib.vcs.exceptions import RepositoryError
 
from rhodecode.lib.vcs.exceptions import ChangesetError
 
from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import VCSError
 
from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError
 
from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 
from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, \
 
    RemovedFileNode, SubModuleNode, ChangedFileNodesGenerator,\
 
    AddedFileNodesGenerator, RemovedFileNodesGenerator
 
from rhodecode.lib.vcs.utils import safe_unicode
 
from rhodecode.lib.vcs.utils import date_fromtimestamp
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
class GitChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at single revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self._stat_modes = {}
 
        self.repository = repository
 

	
 
        try:
 
            commit = self.repository._repo.get_object(revision)
 
            if isinstance(commit, objects.Tag):
 
                revision = commit.object[1]
 
                commit = self.repository._repo.get_object(commit.object[1])
 
        except KeyError:
 
            raise RepositoryError("Cannot get object with id %s" % revision)
 
        self.raw_id = revision
 
        self.id = self.raw_id
 
        self.short_id = self.raw_id[:12]
 
        self._commit = commit
 

	
 
        self._tree_id = commit.tree
 
        self._commiter_property = 'committer'
 
        self._author_property = 'author'
 
        self._date_property = 'commit_time'
 
        self._date_tz_property = 'commit_timezone'
 
        self.revision = repository.revisions.index(revision)
 

	
 
        self.message = safe_unicode(commit.message)
 

	
 
        self.nodes = {}
 
        self._paths = {}
 

	
 
    @LazyProperty
 
    def commiter(self):
 
        return safe_unicode(getattr(self._commit, self._commiter_property))
 

	
 
    @LazyProperty
 
    def author(self):
 
        return safe_unicode(getattr(self._commit, self._author_property))
 

	
 
    @LazyProperty
 
    def date(self):
 
        return date_fromtimestamp(getattr(self._commit, self._date_property),
 
                                  getattr(self._commit, self._date_tz_property))
 

	
 
    @LazyProperty
 
    def _timestamp(self):
 
        return getattr(self._commit, self._date_property)
 

	
 
    @LazyProperty
 
    def status(self):
 
        """
 
        Returns modified, added, removed, deleted files for current changeset
 
        """
 
        return self.changed, self.added, self.removed
 

	
 
    @LazyProperty
 
    def tags(self):
 
        _tags = []
 
        for tname, tsha in self.repository.tags.iteritems():
 
            if tsha == self.raw_id:
 
                _tags.append(tname)
 
        return _tags
 

	
 
    @LazyProperty
 
    def branch(self):
 

	
 
        heads = self.repository._heads(reverse=False)
 

	
 
        ref = heads.get(self.raw_id)
 
        if ref:
 
            return safe_unicode(ref)
 

	
 
    def _fix_path(self, path):
 
        """
 
        Paths are stored without trailing slash so we need to get rid off it if
 
        needed.
 
        """
 
        if path.endswith('/'):
 
            path = path.rstrip('/')
 
        return path
 

	
 
    def _get_id_for_path(self, path):
 

	
 
        # FIXME: Please, spare a couple of minutes and make those codes cleaner;
 
        if not path in self._paths:
 
            path = path.strip('/')
 
            # set root tree
 
            tree = self.repository._repo[self._tree_id]
 
            if path == '':
 
                self._paths[''] = tree.id
 
                return tree.id
 
            splitted = path.split('/')
 
            dirs, name = splitted[:-1], splitted[-1]
 
            curdir = ''
 

	
 
            # initially extract things from root dir
 
            for item, stat, id in tree.iteritems():
 
                if curdir:
 
                    name = '/'.join((curdir, item))
 
                else:
 
                    name = item
 
                self._paths[name] = id
 
                self._stat_modes[name] = stat
 

	
 
            for dir in dirs:
 
                if curdir:
 
                    curdir = '/'.join((curdir, dir))
 
                else:
 
                    curdir = dir
 
                dir_id = None
 
                for item, stat, id in tree.iteritems():
 
                    if dir == item:
 
                        dir_id = id
 
                if dir_id:
 
                    # Update tree
 
                    tree = self.repository._repo[dir_id]
 
                    if not isinstance(tree, objects.Tree):
 
                        raise ChangesetError('%s is not a directory' % curdir)
 
                else:
 
                    raise ChangesetError('%s have not been found' % curdir)
 

	
 
                # cache all items from the given traversed tree
 
                for item, stat, id in tree.iteritems():
 
                    if curdir:
 
                        name = '/'.join((curdir, item))
 
                    else:
 
                        name = item
 
                    self._paths[name] = id
 
                    self._stat_modes[name] = stat
 
            if not path in self._paths:
 
                raise NodeDoesNotExistError("There is no file nor directory "
 
                    "at the given path %r at revision %r"
 
                    % (path, self.short_id))
 
        return self._paths[path]
 

	
 
    def _get_kind(self, path):
 
        obj = self.repository._repo[self._get_id_for_path(path)]
 
        if isinstance(obj, objects.Blob):
 
            return NodeKind.FILE
 
        elif isinstance(obj, objects.Tree):
 
            return NodeKind.DIR
 

	
 
    def _get_filectx(self, path):
 
        path = self._fix_path(path)
 
        if self._get_kind(path) != NodeKind.FILE:
 
            raise ChangesetError("File does not exist for revision %r at "
 
                " %r" % (self.raw_id, path))
 
        return path
 

	
 
    def _get_file_nodes(self):
 
        return chain(*(t[2] for t in self.walk()))
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
        return [self.repository.get_changeset(parent)
 
                for parent in self._commit.parents]
 

	
 
    @LazyProperty
 
    def children(self):
 
        """
 
        Returns list of children changesets.
 
        """
 
        so, se = self.repository.run_git_command(
 
            "rev-list --all --children | grep '^%s'" % self.raw_id
 
        )
 

	
 
        children = []
 
        for l in so.splitlines():
 
            childs = l.split(' ')[1:]
 
            children.extend(childs)
 
        return [self.repository.get_changeset(cs) for cs in children]
 

	
 
    def next(self, branch=None):
 

	
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        def _next(changeset, branch):
 
            try:
 
                next_ = changeset.revision + 1
 
                next_rev = changeset.repository.revisions[next_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = changeset.repository.get_changeset(next_rev)
 

	
 
            if branch and branch != cs.branch:
 
                return _next(cs, branch)
 

	
 
            return cs
 

	
 
        return _next(self, branch)
 

	
 
    def prev(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        def _prev(changeset, branch):
 
            try:
 
                prev_ = changeset.revision - 1
 
                if prev_ < 0:
 
                    raise IndexError
 
                prev_rev = changeset.repository.revisions[prev_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 

	
 
            cs = changeset.repository.get_changeset(prev_rev)
 

	
 
            if branch and branch != cs.branch:
 
                return _prev(cs, branch)
 

	
 
            return cs
 

	
 
        return _prev(self, branch)
 

	
 
    def diff(self, ignore_whitespace=True, context=3):
 
        rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
 
        rev2 = self
 
        return ''.join(self.repository.get_diff(rev1, rev2,
 
                                    ignore_whitespace=ignore_whitespace,
 
                                    context=context))
 

	
 
    def get_file_mode(self, path):
 
        """
 
        Returns stat mode of the file at the given ``path``.
 
        """
 
        # ensure path is traversed
 
        self._get_id_for_path(path)
 
        return self._stat_modes[path]
 

	
 
    def get_file_content(self, path):
 
        """
 
        Returns content of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.as_pretty_string()
 

	
 
    def get_file_size(self, path):
 
        """
 
        Returns size of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.raw_length()
 

	
 
    def get_file_changeset(self, path):
 
        """
 
        Returns last commit of the file at the given ``path``.
 
        """
 
        node = self.get_node(path)
 
        return node.history[0]
 

	
 
    def get_file_history(self, path):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        TODO: This function now uses os underlying 'git' and 'grep' commands
 
        which is generally not good. Should be replaced with algorithm
 
        iterating commits.
 
        """
 
        self._get_filectx(path)
 

	
 
        cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % (
 
                  self.id, path
 
               )
 
        so, se = self.repository.run_git_command(cmd)
 
        ids = re.findall(r'[0-9a-fA-F]{40}', so)
 
        return [self.repository.get_changeset(id) for id in ids]
 

	
 
    def get_file_history_2(self, path):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        """
 
        self._get_filectx(path)
 
        from dulwich.walk import Walker
 
        include = [self.id]
 
        walker = Walker(self.repository._repo.object_store, include,
 
                        paths=[path], max_entries=1)
 
        return [self.repository.get_changeset(sha)
 
                for sha in (x.commit.id for x in walker)]
 

	
 
    def get_file_annotate(self, path):
 
        """
 
        Returns a generator of four element tuples with
 
            lineno, sha, changeset lazy loader and line
 

	
 
        TODO: This function now uses os underlying 'git' command which is
 
        generally not good. Should be replaced with algorithm iterating
 
        commits.
 
        """
 
        cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path)
 
        # -l     ==> outputs long shas (and we need all 40 characters)
 
        # --root ==> doesn't put '^' character for bounderies
 
        # -r sha ==> blames for the given revision
 
        so, se = self.repository.run_git_command(cmd)
 

	
 
        for i, blame_line in enumerate(so.split('\n')[:-1]):
 
            ln_no = i + 1
 
            sha, line = re.split(r' ', blame_line, 1)
 
            yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
 

	
 
    def fill_archive(self, stream=None, kind='tgz', prefix=None,
 
                     subrepos=False):
 
        """
 
        Fills up given stream.
 

	
 
        :param stream: file like object.
 
        :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
 
            Default: ``tgz``.
 
        :param prefix: name of root directory in archive.
 
            Default is repository name and changeset's raw_id joined with dash
 
            (``repo-tip.<KIND>``).
 
        :param subrepos: include subrepos in this archive.
 

	
 
        :raise ImproperArchiveTypeError: If given kind is wrong.
 
        :raise VcsError: If given stream is None
 

	
 
        """
 
        allowed_kinds = settings.ARCHIVE_SPECS.keys()
 
        if kind not in allowed_kinds:
 
            raise ImproperArchiveTypeError('Archive kind not supported use one'
 
                'of %s', allowed_kinds)
 

	
 
        if prefix is None:
 
            prefix = '%s-%s' % (self.repository.name, self.short_id)
 
        elif prefix.startswith('/'):
 
            raise VCSError("Prefix cannot start with leading slash")
 
        elif prefix.strip() == '':
 
            raise VCSError("Prefix cannot be empty")
 

	
 
        if kind == 'zip':
 
            frmt = 'zip'
 
        else:
 
            frmt = 'tar'
 
        cmd = 'git archive --format=%s --prefix=%s/ %s' % (frmt, prefix,
 
            self.raw_id)
 
        _git_path = rhodecode.CONFIG.get('git_path', 'git')
 
        cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path, 
 
                                                frmt, prefix, self.raw_id)
 
        if kind == 'tgz':
 
            cmd += ' | gzip -9'
 
        elif kind == 'tbz2':
 
            cmd += ' | bzip2 -9'
 

	
 
        if stream is None:
 
            raise VCSError('You need to pass in a valid stream for filling'
 
                           ' with archival data')
 
        popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
 
            cwd=self.repository.path)
 

	
 
        buffer_size = 1024 * 8
 
        chunk = popen.stdout.read(buffer_size)
 
        while chunk:
 
            stream.write(chunk)
 
            chunk = popen.stdout.read(buffer_size)
 
        # Make sure all descriptors would be read
 
        popen.communicate()
 

	
 
    def get_nodes(self, path):
 
        if self._get_kind(path) != NodeKind.DIR:
 
            raise ChangesetError("Directory does not exist for revision %r at "
 
                " %r" % (self.revision, path))
 
        path = self._fix_path(path)
 
        id = self._get_id_for_path(path)
 
        tree = self.repository._repo[id]
 
        dirnodes = []
 
        filenodes = []
 
        als = self.repository.alias
 
        for name, stat, id in tree.iteritems():
 
            if objects.S_ISGITLINK(stat):
 
                dirnodes.append(SubModuleNode(name, url=None, changeset=id,
 
                                              alias=als))
 
                continue
 

	
 
            obj = self.repository._repo.get_object(id)
 
            if path != '':
 
                obj_path = '/'.join((path, name))
 
            else:
 
                obj_path = name
 
            if obj_path not in self._stat_modes:
 
                self._stat_modes[obj_path] = stat
 
            if isinstance(obj, objects.Tree):
 
                dirnodes.append(DirNode(obj_path, changeset=self))
 
            elif isinstance(obj, objects.Blob):
 
                filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
 
            else:
 
                raise ChangesetError("Requested object should be Tree "
 
                                     "or Blob, is %r" % type(obj))
 
        nodes = dirnodes + filenodes
 
        for node in nodes:
 
            if not node.path in self.nodes:
 
                self.nodes[node.path] = node
 
        nodes.sort()
 
        return nodes
 

	
 
    def get_node(self, path):
 
        if isinstance(path, unicode):
 
            path = path.encode('utf-8')
 
        path = self._fix_path(path)
 
        if not path in self.nodes:
 
            try:
 
                id_ = self._get_id_for_path(path)
 
            except ChangesetError:
 
                raise NodeDoesNotExistError("Cannot find one of parents' "
 
                    "directories for a given path: %s" % path)
 

	
 
            _GL = lambda m: m and objects.S_ISGITLINK(m)
 
            if _GL(self._stat_modes.get(path)):
 
                node = SubModuleNode(path, url=None, changeset=id_,
 
                                     alias=self.repository.alias)
 
            else:
 
                obj = self.repository._repo.get_object(id_)
 

	
 
                if isinstance(obj, objects.Tree):
 
                    if path == '':
 
                        node = RootNode(changeset=self)
 
                    else:
 
                        node = DirNode(path, changeset=self)
 
                    node._tree = obj
 
                elif isinstance(obj, objects.Blob):
 
                    node = FileNode(path, changeset=self)
 
                    node._blob = obj
 
                else:
 
                    raise NodeDoesNotExistError("There is no file nor directory "
 
                        "at the given path %r at revision %r"
 
                        % (path, self.short_id))
 
            # cache node
 
            self.nodes[path] = node
 
        return self.nodes[path]
 

	
 
    @LazyProperty
 
    def affected_files(self):
 
        """
 
        Get's a fast accessible file changes for given changeset
 
        """
 
        a, m, d = self._changes_cache
 
        return list(a.union(m).union(d))
 

	
 
    @LazyProperty
 
    def _diff_name_status(self):
 
        output = []
 
        for parent in self.parents:
 
            cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id,
 
                                                                self.raw_id)
 
            so, se = self.repository.run_git_command(cmd)
 
            output.append(so.strip())
 
        return '\n'.join(output)
 

	
 
    @LazyProperty
 
    def _changes_cache(self):
 
        added = set()
 
        modified = set()
 
        deleted = set()
 
        _r = self.repository._repo
 

	
 
        parents = self.parents
 
        if not self.parents:
 
            parents = [EmptyChangeset()]
 
        for parent in parents:
 
            if isinstance(parent, EmptyChangeset):
 
                oid = None
 
            else:
 
                oid = _r[parent.raw_id].tree
 
            changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
 
            for (oldpath, newpath), (_, _), (_, _) in changes:
 
                if newpath and oldpath:
 
                    modified.add(newpath)
 
                elif newpath and not oldpath:
 
                    added.add(newpath)
 
                elif not newpath and oldpath:
 
                    deleted.add(oldpath)
 
        return added, modified, deleted
 

	
 
    def _get_paths_for_status(self, status):
 
        """
 
        Returns sorted list of paths for given ``status``.
 

	
 
        :param status: one of: *added*, *modified* or *deleted*
 
        """
 
        a, m, d = self._changes_cache
 
        return sorted({
 
            'added': list(a),
 
            'modified': list(m),
 
            'deleted': list(d)}[status]
 
        )
 

	
 
    @LazyProperty
 
    def added(self):
 
        """
 
        Returns list of added ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return list(self._get_file_nodes())
 
        return AddedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('added')], self)
 

	
 
    @LazyProperty
 
    def changed(self):
 
        """
 
        Returns list of modified ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return []
 
        return ChangedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('modified')], self)
 

	
 
    @LazyProperty
 
    def removed(self):
 
        """
 
        Returns list of removed ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return []
 
        return RemovedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('deleted')], self)
rhodecode/lib/vcs/backends/git/repository.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.git
 
    ~~~~~~~~~~~~~~~~
 

	
 
    Git backend implementation.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import os
 
import re
 
import time
 
import posixpath
 
import logging
 
import traceback
 
import urllib
 
import urllib2
 
from dulwich.repo import Repo, NotGitRepository
 
from dulwich.objects import Tag
 
from string import Template
 
from subprocess import Popen, PIPE
 

	
 
import rhodecode
 
from rhodecode.lib.vcs.backends.base import BaseRepository
 
from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import EmptyRepositoryError
 
from rhodecode.lib.vcs.exceptions import RepositoryError
 
from rhodecode.lib.vcs.exceptions import TagAlreadyExistError
 
from rhodecode.lib.vcs.exceptions import TagDoesNotExistError
 
from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty
 
from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
 
from rhodecode.lib.vcs.utils.paths import abspath
 
from rhodecode.lib.vcs.utils.paths import get_user_home
 
from .workdir import GitWorkdir
 
from .changeset import GitChangeset
 
from .inmemory import GitInMemoryChangeset
 
from .config import ConfigFile
 
from rhodecode.lib import subprocessio
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class GitRepository(BaseRepository):
 
    """
 
    Git repository backend.
 
    """
 
    DEFAULT_BRANCH_NAME = 'master'
 
    scm = 'git'
 

	
 
    def __init__(self, repo_path, create=False, src_url=None,
 
                 update_after_clone=False, bare=False):
 

	
 
        self.path = abspath(repo_path)
 
        repo = self._get_repo(create, src_url, update_after_clone, bare)
 
        self.bare = repo.bare
 

	
 
        self._config_files = [
 
            bare and abspath(self.path, 'config')
 
                     or abspath(self.path, '.git', 'config'),
 
            abspath(get_user_home(), '.gitconfig'),
 
        ]
 

	
 
    @ThreadLocalLazyProperty
 
    def _repo(self):
 
        repo = Repo(self.path)
 
        #temporary set that to now at later we will move it to constructor
 
        baseui = None
 
        if baseui is None:
 
            from mercurial.ui import ui
 
            baseui = ui()
 
        # patch the instance of GitRepo with an "FAKE" ui object to add
 
        # compatibility layer with Mercurial
 
        setattr(repo, 'ui', baseui)
 
        return repo
 

	
 
    @property
 
    def head(self):
 
        try:
 
            return self._repo.head()
 
        except KeyError:
 
            return None
 

	
 
    @LazyProperty
 
    def revisions(self):
 
        """
 
        Returns list of revisions' ids, in ascending order.  Being lazy
 
        attribute allows external tools to inject shas from cache.
 
        """
 
        return self._get_all_revisions()
 

	
 
    def run_git_command(self, cmd):
 
    @classmethod
 
    def _run_git_command(cls, cmd, **opts):
 
        """
 
        Runs given ``cmd`` as git command and returns tuple
 
        (returncode, stdout, stderr).
 

	
 
        .. note::
 
           This method exists only until log/blame functionality is implemented
 
           at Dulwich (see https://bugs.launchpad.net/bugs/645142). Parsing
 
           os command's output is road to hell...
 
        (stdout, stderr).
 

	
 
        :param cmd: git command to be executed
 
        :param opts: env options to pass into Subprocess command
 
        """
 

	
 
        _copts = ['-c', 'core.quotepath=false', ]
 
        _str_cmd = False
 
        if isinstance(cmd, basestring):
 
            cmd = [cmd]
 
            _str_cmd = True
 

	
 
        gitenv = os.environ
 
        # need to clean fix GIT_DIR !
 
        if 'GIT_DIR' in gitenv:
 
            del gitenv['GIT_DIR']
 
        gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
 

	
 
        cmd = ['git'] + _copts + cmd
 
        _git_path = rhodecode.CONFIG.get('git_path', 'git')
 
        cmd = [_git_path] + _copts + cmd
 
        if _str_cmd:
 
            cmd = ' '.join(cmd)
 
        try:
 
            opts = dict(
 
            _opts = dict(
 
                env=gitenv,
 
                shell=False,
 
            )
 
            if os.path.isdir(self.path):
 
                opts['cwd'] = self.path
 
            p = subprocessio.SubprocessIOChunker(cmd, **opts)
 
            _opts.update(opts)
 
            p = subprocessio.SubprocessIOChunker(cmd, **_opts)
 
        except (EnvironmentError, OSError), err:
 
            log.error(traceback.format_exc())
 
            raise RepositoryError("Couldn't run git command (%s).\n"
 
                                  "Original error was:%s" % (cmd, err))
 

	
 
        return ''.join(p.output), ''.join(p.error)
 

	
 
    def run_git_command(self, cmd):
 
        opts = {}
 
        if os.path.isdir(self.path):
 
            opts['cwd'] = self.path
 
        return self._run_git_command(cmd, **opts)
 

	
 
    @classmethod
 
    def _check_url(cls, url):
 
        """
 
        Functon will check given url and try to verify if it's a valid
 
        link. Sometimes it may happened that mercurial will issue basic
 
        auth request that can cause whole API to hang when used from python
 
        or other external calls.
 

	
 
        On failures it'll raise urllib2.HTTPError
 
        """
 
        from mercurial.util import url as Url
 

	
 
        # those authnadlers are patched for python 2.6.5 bug an
 
        # infinit looping when given invalid resources
 
        from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
 

	
 
        # check first if it's not an local url
 
        if os.path.isdir(url) or url.startswith('file:'):
 
            return True
 

	
 
        if('+' in url[:url.find('://')]):
 
            url = url[url.find('+') + 1:]
 

	
 
        handlers = []
 
        test_uri, authinfo = Url(url).authinfo()
 
        if not test_uri.endswith('info/refs'):
 
            test_uri = test_uri.rstrip('/') + '/info/refs'
 
        if authinfo:
 
            #create a password manager
 
            passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
 
            passmgr.add_password(*authinfo)
 

	
 
            handlers.extend((httpbasicauthhandler(passmgr),
 
                             httpdigestauthhandler(passmgr)))
 

	
 
        o = urllib2.build_opener(*handlers)
 
        o.addheaders = [('User-Agent', 'git/1.7.8.0')]  # fake some git
 

	
 
        q = {"service": 'git-upload-pack'}
 
        qs = '?%s' % urllib.urlencode(q)
 
        cu = "%s%s" % (test_uri, qs)
 
        req = urllib2.Request(cu, None, {})
 

	
 
        try:
 
            resp = o.open(req)
 
            return resp.code == 200
 
        except Exception, e:
 
            # means it cannot be cloned
 
            raise urllib2.URLError("[%s] %s" % (url, e))
 

	
 
    def _get_repo(self, create, src_url=None, update_after_clone=False,
 
                  bare=False):
 
        if create and os.path.exists(self.path):
 
            raise RepositoryError("Location already exist")
 
        if src_url and not create:
 
            raise RepositoryError("Create should be set to True if src_url is "
 
                                  "given (clone operation creates repository)")
 
        try:
 
            if create and src_url:
 
                GitRepository._check_url(src_url)
 
                self.clone(src_url, update_after_clone, bare)
 
                return Repo(self.path)
 
            elif create:
 
                os.mkdir(self.path)
 
                if bare:
 
                    return Repo.init_bare(self.path)
 
                else:
 
                    return Repo.init(self.path)
 
            else:
 
                return Repo(self.path)
 
        except (NotGitRepository, OSError), err:
 
            raise RepositoryError(err)
 

	
 
    def _get_all_revisions(self):
 
        # we must check if this repo is not empty, since later command
 
        # fails if it is. And it's cheaper to ask than throw the subprocess
 
        # errors
 
        try:
 
            self._repo.head()
 
        except KeyError:
 
            return []
 
        cmd = 'rev-list --all --reverse --date-order'
 
        try:
 
            so, se = self.run_git_command(cmd)
 
        except RepositoryError:
 
            # Can be raised for empty repositories
 
            return []
 
        return so.splitlines()
 

	
 
    def _get_all_revisions2(self):
 
        #alternate implementation using dulwich
 
        includes = [x[1][0] for x in self._parsed_refs.iteritems()
 
                    if x[1][1] != 'T']
 
        return [c.commit.id for c in self._repo.get_walker(include=includes)]
 

	
 
    def _get_revision(self, revision):
 
        """
 
        For git backend we always return integer here. This way we ensure
 
        that changset's revision attribute would become integer.
 
        """
 
        pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
 
        is_bstr = lambda o: isinstance(o, (str, unicode))
 
        is_null = lambda o: len(o) == revision.count('0')
 

	
 
        if len(self.revisions) == 0:
 
            raise EmptyRepositoryError("There are no changesets yet")
 

	
 
        if revision in (None, '', 'tip', 'HEAD', 'head', -1):
 
            revision = self.revisions[-1]
 

	
 
        if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12)
 
            or isinstance(revision, int) or is_null(revision)):
 
            try:
 
                revision = self.revisions[int(revision)]
 
            except:
 
                raise ChangesetDoesNotExistError("Revision %r does not exist "
 
                    "for this repository %s" % (revision, self))
 

	
 
        elif is_bstr(revision):
 
            # get by branch/tag name
 
            _ref_revision = self._parsed_refs.get(revision)
 
            _tags_shas = self.tags.values()
 
            if _ref_revision:  # and _ref_revision[1] in ['H', 'RH', 'T']:
 
                return _ref_revision[0]
 

	
 
            # maybe it's a tag ? we don't have them in self.revisions
 
            elif revision in _tags_shas:
 
                return _tags_shas[_tags_shas.index(revision)]
 

	
 
            elif not pattern.match(revision) or revision not in self.revisions:
 
                raise ChangesetDoesNotExistError("Revision %r does not exist "
 
                    "for this repository %s" % (revision, self))
 

	
 
        # Ensure we return full id
 
        if not pattern.match(str(revision)):
 
            raise ChangesetDoesNotExistError("Given revision %r not recognized"
 
                % revision)
 
        return revision
 

	
 
    def _get_archives(self, archive_name='tip'):
 

	
 
        for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
 
                yield {"type": i[0], "extension": i[1], "node": archive_name}
 

	
 
    def _get_url(self, url):
 
        """
 
        Returns normalized url. If schema is not given, would fall to
 
        filesystem (``file:///``) schema.
 
        """
 
        url = str(url)
 
        if url != 'default' and not '://' in url:
 
            url = ':///'.join(('file', url))
 
        return url
 

	
 
    @LazyProperty
 
    def name(self):
 
        return os.path.basename(self.path)
 

	
 
    @LazyProperty
 
    def last_change(self):
 
        """
 
        Returns last change made on this repository as datetime object
 
        """
 
        return date_fromtimestamp(self._get_mtime(), makedate()[1])
 

	
 
    def _get_mtime(self):
 
        try:
 
            return time.mktime(self.get_changeset().date.timetuple())
 
        except RepositoryError:
 
            idx_loc = '' if self.bare else '.git'
 
            # fallback to filesystem
 
            in_path = os.path.join(self.path, idx_loc, "index")
 
            he_path = os.path.join(self.path, idx_loc, "HEAD")
 
            if os.path.exists(in_path):
 
                return os.stat(in_path).st_mtime
 
            else:
 
                return os.stat(he_path).st_mtime
 

	
 
    @LazyProperty
 
    def description(self):
 
        idx_loc = '' if self.bare else '.git'
 
        undefined_description = u'unknown'
 
        description_path = os.path.join(self.path, idx_loc, 'description')
 
        if os.path.isfile(description_path):
 
            return safe_unicode(open(description_path).read())
 
        else:
 
            return undefined_description
 

	
 
    @LazyProperty
 
    def contact(self):
 
        undefined_contact = u'Unknown'
 
        return undefined_contact
 

	
 
    @property
 
    def branches(self):
 
        if not self.revisions:
 
            return {}
 
        sortkey = lambda ctx: ctx[0]
 
        _branches = [(x[0], x[1][0])
 
                     for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
 
        return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
 

	
 
    @LazyProperty
 
    def tags(self):
 
        return self._get_tags()
 

	
 
    def _get_tags(self):
 
        if not self.revisions:
 
            return {}
 

	
 
        sortkey = lambda ctx: ctx[0]
 
        _tags = [(x[0], x[1][0])
 
                 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
 
        return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
 

	
 
    def tag(self, name, user, revision=None, message=None, date=None,
 
            **kwargs):
 
        """
 
        Creates and returns a tag for the given ``revision``.
 

	
 
        :param name: name for new tag
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param revision: changeset id for which new tag would be created
 
        :param message: message of the tag's commit
 
        :param date: date of tag's commit
 

	
 
        :raises TagAlreadyExistError: if tag with same name already exists
 
        """
 
        if name in self.tags:
 
            raise TagAlreadyExistError("Tag %s already exists" % name)
 
        changeset = self.get_changeset(revision)
 
        message = message or "Added tag %s for commit %s" % (name,
 
            changeset.raw_id)
 
        self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
 

	
 
        self._parsed_refs = self._get_parsed_refs()
 
        self.tags = self._get_tags()
 
        return changeset
 

	
 
    def remove_tag(self, name, user, message=None, date=None):
 
        """
 
        Removes tag with the given ``name``.
 

	
 
        :param name: name of the tag to be removed
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param message: message of the tag's removal commit
 
        :param date: date of tag's removal commit
 

	
 
        :raises TagDoesNotExistError: if tag with given name does not exists
 
        """
 
        if name not in self.tags:
 
            raise TagDoesNotExistError("Tag %s does not exist" % name)
 
        tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
 
        try:
 
            os.remove(tagpath)
 
            self._parsed_refs = self._get_parsed_refs()
 
            self.tags = self._get_tags()
 
        except OSError, e:
 
            raise RepositoryError(e.strerror)
 

	
 
    @LazyProperty
 
    def _parsed_refs(self):
 
        return self._get_parsed_refs()
 

	
 
    def _get_parsed_refs(self):
 
        refs = self._repo.get_refs()
 
        keys = [('refs/heads/', 'H'),
 
                ('refs/remotes/origin/', 'RH'),
 
                ('refs/tags/', 'T')]
 
        _refs = {}
 
        for ref, sha in refs.iteritems():
 
            for k, type_ in keys:
 
                if ref.startswith(k):
 
                    _key = ref[len(k):]
 
                    if type_ == 'T':
 
                        obj = self._repo.get_object(sha)
 
                        if isinstance(obj, Tag):
 
                            sha = self._repo.get_object(sha).object[1]
 
                    _refs[_key] = [sha, type_]
 
                    break
 
        return _refs
 

	
 
    def _heads(self, reverse=False):
 
        refs = self._repo.get_refs()
 
        heads = {}
 

	
 
        for key, val in refs.items():
 
            for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
 
                if key.startswith(ref_key):
 
                    n = key[len(ref_key):]
 
                    if n not in ['HEAD']:
 
                        heads[n] = val
 

	
 
        return heads if reverse else dict((y, x) for x, y in heads.iteritems())
 

	
 
    def get_changeset(self, revision=None):
 
        """
 
        Returns ``GitChangeset`` object representing commit from git repository
 
        at the given revision or head (most recent commit) if None given.
 
        """
 
        if isinstance(revision, GitChangeset):
 
            return revision
 
        revision = self._get_revision(revision)
 
        changeset = GitChangeset(repository=self, revision=revision)
 
        return changeset
 

	
 
    def get_changesets(self, start=None, end=None, start_date=None,
 
           end_date=None, branch_name=None, reverse=False):
 
        """
 
        Returns iterator of ``GitChangeset`` objects from start to end (both
 
        are inclusive), in ascending date order (unless ``reverse`` is set).
 

	
 
        :param start: changeset ID, as str; first returned changeset
 
        :param end: changeset ID, as str; last returned changeset
 
        :param start_date: if specified, changesets with commit date less than
 
          ``start_date`` would be filtered out from returned set
 
        :param end_date: if specified, changesets with commit date greater than
 
          ``end_date`` would be filtered out from returned set
 
        :param branch_name: if specified, changesets not reachable from given
 
          branch would be filtered out from returned set
 
        :param reverse: if ``True``, returned generator would be reversed
 
          (meaning that returned changesets would have descending date order)
 

	
 
        :raise BranchDoesNotExistError: If given ``branch_name`` does not
 
            exist.
 
        :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
 
          ``end`` could not be found.
 

	
 
        """
 
        if branch_name and branch_name not in self.branches:
 
            raise BranchDoesNotExistError("Branch '%s' not found" \
 
                                          % branch_name)
 
        # %H at format means (full) commit hash, initial hashes are retrieved
 
        # in ascending date order
 
        cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
 
        cmd_params = {}
 
        if start_date:
 
            cmd_template += ' --since "$since"'
 
            cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
 
        if end_date:
 
            cmd_template += ' --until "$until"'
 
            cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
 
        if branch_name:
 
            cmd_template += ' $branch_name'
 
            cmd_params['branch_name'] = branch_name
 
        else:
 
            cmd_template += ' --all'
 

	
 
        cmd = Template(cmd_template).safe_substitute(**cmd_params)
 
        revs = self.run_git_command(cmd)[0].splitlines()
 
        start_pos = 0
 
        end_pos = len(revs)
 
        if start:
 
            _start = self._get_revision(start)
 
            try:
 
                start_pos = revs.index(_start)
 
            except ValueError:
 
                pass
 

	
 
        if end is not None:
 
            _end = self._get_revision(end)
 
            try:
 
                end_pos = revs.index(_end)
 
            except ValueError:
 
                pass
 

	
 
        if None not in [start, end] and start_pos > end_pos:
 
            raise RepositoryError('start cannot be after end')
 

	
 
        if end_pos is not None:
 
            end_pos += 1
 

	
 
        revs = revs[start_pos:end_pos]
 
        if reverse:
 
            revs = reversed(revs)
 
        for rev in revs:
 
            yield self.get_changeset(rev)
 

	
 
    def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
 
                 context=3):
 
        """
 
        Returns (git like) *diff*, as plain text. Shows changes introduced by
 
        ``rev2`` since ``rev1``.
 

	
0 comments (0 inline, 0 general)