Changeset - a9e72f7148c8
[Not reviewed]
beta
0 2 0
Marcin Kuzminski - 15 years ago 2010-10-22 02:58:17
marcin@python-works.com
some small fixes
2 files changed with 19 insertions and 18 deletions:
0 comments (0 inline, 0 general)
rhodecode/config/deployment.ini_tmpl
Show inline comments
 
@@ -19,25 +19,25 @@ debug = true
 

	
 
#smtp_server = mail.server.com
 
#smtp_username = 
 
#smtp_password = 
 
#smtp_port = 
 
#smtp_use_tls = false
 

	
 
[server:main]
 
##nr of threads to spawn
 
threadpool_workers = 5
 

	
 
##max request before thread respawn
 
threadpool_max_requests = 2
 
threadpool_max_requests = 10
 

	
 
##option to use threads of process
 
use_threadpool = true
 

	
 
use = egg:Paste#http
 
host = 127.0.0.1
 
port = 5000
 

	
 
[app:main]
 
use = egg:rhodecode
 
full_stack = true
 
static_files = true
rhodecode/lib/celerylib/tasks.py
Show inline comments
 
@@ -90,25 +90,25 @@ def whoosh_index(repo_location, full_ind
 
@locked_task
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y):
 
    from rhodecode.model.db import Statistics, Repository
 
    log = get_commits_stats.get_logger()
 
    author_key_cleaner = lambda k: person(k).replace('"', "") #for js data compatibilty
 

	
 
    commits_by_day_author_aggregate = {}
 
    commits_by_day_aggregate = {}
 
    repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
 
    repo = MercurialRepository(repos_path + repo_name)
 

	
 
    skip_date_limit = True
 
    parse_limit = 350 #limit for single task changeset parsing optimal for
 
    parse_limit = 250 #limit for single task changeset parsing optimal for
 
    last_rev = 0
 
    last_cs = None
 
    timegetter = itemgetter('time')
 

	
 
    sa = get_session()
 

	
 
    dbrepo = sa.query(Repository)\
 
        .filter(Repository.repo_name == repo_name).scalar()
 
    cur_stats = sa.query(Statistics)\
 
        .filter(Statistics.repository == dbrepo).scalar()
 
    if cur_stats:
 
        last_rev = cur_stats.stat_on_revision
 
@@ -118,31 +118,33 @@ def get_commits_stats(repo_name, ts_min_
 
    if last_rev == repo.revisions[-1] and len(repo.revisions) > 1:
 
        #pass silently without any work if we're not on first revision or current
 
        #state of parsing revision(from db marker) is the last revision
 
        return True
 

	
 
    if cur_stats:
 
        commits_by_day_aggregate = OrderedDict(
 
                                       json.loads(
 
                                        cur_stats.commit_activity_combined))
 
        commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
 

	
 
    log.debug('starting parsing %s', parse_limit)
 
    lmktime = mktime
 

	
 
    for cnt, rev in enumerate(repo.revisions[last_rev:]):
 
        last_cs = cs = repo.get_changeset(rev)
 
        k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
 
                          cs.date.timetuple()[2])
 
        timetupple = [int(x) for x in k.split('-')]
 
        timetupple.extend([0 for _ in xrange(6)])
 
        k = mktime(timetupple)
 
        k = lmktime(timetupple)
 
        if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
 
            try:
 
                l = [timegetter(x) for x in commits_by_day_author_aggregate\
 
                        [author_key_cleaner(cs.author)]['data']]
 
                time_pos = l.index(k)
 
            except ValueError:
 
                time_pos = False
 

	
 
            if time_pos >= 0 and time_pos is not False:
 

	
 
                datadict = commits_by_day_author_aggregate\
 
                    [author_key_cleaner(cs.author)]['data'][time_pos]
 
@@ -177,30 +179,28 @@ def get_commits_stats(repo_name, ts_min_
 
                                    "schema":["commits"],
 
                                    }
 

	
 
        #gather all data by day
 
        if commits_by_day_aggregate.has_key(k):
 
            commits_by_day_aggregate[k] += 1
 
        else:
 
            commits_by_day_aggregate[k] = 1
 

	
 
        if cnt >= parse_limit:
 
            #don't fetch to much data since we can freeze application
 
            break
 

	
 
    overview_data = []
 
    for k, v in commits_by_day_aggregate.items():
 
        overview_data.append([k, v])
 
    overview_data = sorted(overview_data, key=itemgetter(0))
 

	
 
    if not commits_by_day_author_aggregate:
 
        commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
 
            "label":author_key_cleaner(repo.contact),
 
            "data":[0, 1],
 
            "schema":["commits"],
 
        }
 

	
 
    stats = cur_stats if cur_stats else Statistics()
 
    stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
 
    stats.commit_activity_combined = json.dumps(overview_data)
 

	
 
    log.debug('last revison %s', last_rev)
 
@@ -277,52 +277,53 @@ def send_email(recipients, subject, body
 
                       mail_port, ssl, tls)
 
        m.send(recipients, subject, body)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 
@task
 
def create_repo_fork(form_data, cur_user):
 
    import os
 
    from rhodecode.model.repo import RepoModel
 
    sa = get_session()
 
    rm = RepoModel(sa)
 

	
 
    rm.create(form_data, cur_user, just_db=True, fork=True)
 
    repo_model = RepoModel(get_session())
 
    repo_model.create(form_data, cur_user, just_db=True, fork=True)
 

	
 
    repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
 
    repo_path = os.path.join(repos_path, form_data['repo_name'])
 
    repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
 

	
 
    MercurialRepository(str(repo_fork_path), True, clone_url=str(repo_path))
 

	
 

	
 
def __get_codes_stats(repo_name):
 
    LANGUAGES_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx', 'aspx', 'asx', 'axd', 'c',
 
                    'cfg', 'cfm', 'cpp', 'cs', 'diff', 'do', 'el', 'erl',
 
                    'h', 'java', 'js', 'jsp', 'jspx', 'lisp',
 
                    'lua', 'm', 'mako', 'ml', 'pas', 'patch', 'php', 'php3',
 
                    'php4', 'phtml', 'pm', 'py', 'rb', 'rst', 's', 'sh',
 
                    'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt',
 
                    'yaws']
 
    LANGUAGES_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx',
 
    'aspx', 'asx', 'axd', 'c', 'cfg', 'cfm', 'cpp', 'cs', 'diff', 'do', 'el',
 
    'erl', 'h', 'java', 'js', 'jsp', 'jspx', 'lisp', 'lua', 'm', 'mako', 'ml',
 
    'pas', 'patch', 'php', 'php3', 'php4', 'phtml', 'pm', 'py', 'rb', 'rst',
 
    's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws']
 

	
 

	
 
    repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
 
    repo = MercurialRepository(repos_path + repo_name)
 
    tip = repo.get_changeset()
 

	
 
    code_stats = {}
 
    for topnode, dirs, files in tip.walk('/'):
 
        for f in files:
 

	
 
    def aggregate(cs):
 
        for f in cs[2]:
 
            k = f.mimetype
 
            if f.extension in LANGUAGES_EXTENSIONS:
 
                if code_stats.has_key(k):
 
                    code_stats[k] += 1
 
                else:
 
                    code_stats[k] = 1
 

	
 
    map(aggregate, tip.walk('/'))
 

	
 
    return code_stats or {}
 

	
 

	
 

	
 

	
 

	
0 comments (0 inline, 0 general)