Changeset - ecc566f8b69f
[Not reviewed]
beta
0 6 0
Marcin Kuzminski - 15 years ago 2010-11-14 19:57:14
marcin@python-works.com
fixes #59, notifications for user registrations + some changes to mailer
6 files changed with 51 insertions and 5 deletions:
0 comments (0 inline, 0 general)
production.ini
Show inline comments
 
################################################################################
 
################################################################################
 
# rhodecode - Pylons environment configuration                                 #
 
#                                                                              # 
 
# The %(here)s variable will be replaced with the parent directory of this file#
 
################################################################################
 

	
 
[DEFAULT]
 
debug = true
 
################################################################################
 
## Uncomment and replace with the address which should receive                ## 
 
## any error reports after application crash                                  ##
 
## Additionally those settings will be used by rhodecode mailing system       ##
 
################################################################################
 
#email_to = admin@localhost
 
#error_email_from = paste_error@localhost
 
#app_email_from = rhodecode-noreply@localhost
 
#error_message =
 

	
 
#smtp_server = mail.server.com
 
#smtp_username = 
 
#smtp_password = 
 
#smtp_port = 
 
#smtp_use_tls = false
 
#smtp_use_ssl = true
 

	
 
[server:main]
 
##nr of threads to spawn
 
threadpool_workers = 5
 

	
 
##max request before thread respawn
 
threadpool_max_requests = 2
 

	
 
##option to use threads of process
 
use_threadpool = true
 

	
 
use = egg:Paste#http
 
host = 127.0.0.1
 
port = 8001
 

	
 
[app:main]
 
use = egg:rhodecode
 
full_stack = true
 
static_files = false
 
lang=en
 
cache_dir = %(here)s/data
 
index_dir = %(here)s/data/index
 

	
 
####################################
 
###         BEAKER CACHE        ####
 
####################################
 
beaker.cache.data_dir=/%(here)s/data/cache/data
 
beaker.cache.lock_dir=/%(here)s/data/cache/lock
 
beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
 

	
 
beaker.cache.super_short_term.type=memory
 
beaker.cache.super_short_term.expire=10
 

	
 
beaker.cache.short_term.type=memory
 
beaker.cache.short_term.expire=60
 

	
 
beaker.cache.long_term.type=memory
 
beaker.cache.long_term.expire=36000
 

	
 

	
 
beaker.cache.sql_cache_short.type=memory
 
beaker.cache.sql_cache_short.expire=5
 

	
 
beaker.cache.sql_cache_med.type=memory
 
beaker.cache.sql_cache_med.expire=360
 

	
 
beaker.cache.sql_cache_long.type=file
 
beaker.cache.sql_cache_long.expire=3600
 

	
 
####################################
 
###       BEAKER SESSION        ####
 
####################################
 
## Type of storage used for the session, current types are 
 
## dbm, file, memcached, database, and memory. 
 
## The storage uses the Container API 
 
##that is also used by the cache system.
 
beaker.session.type = file
 

	
 
beaker.session.key = rhodecode
 
beaker.session.secret = g654dcno0-9873jhgfreyu
 
beaker.session.timeout = 36000
 

	
 
##auto save the session to not to use .save()
 
beaker.session.auto = False
 

	
 
##true exire at browser close
 
#beaker.session.cookie_expires = 3600
 

	
 
    
 
################################################################################
 
## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
 
## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
 
## execute malicious code after an exception is raised.                       ##
 
################################################################################
 
set debug = false
 

	
 
##################################
 
###       LOGVIEW CONFIG       ###
 
##################################
 
logview.sqlalchemy = #faa
 
logview.pylons.templating = #bfb
 
logview.pylons.util = #eee
 

	
 
#########################################################
 
### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
 
#########################################################
 
sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
 
#sqlalchemy.db1.echo = False
 
#sqlalchemy.db1.pool_recycle = 3600
 
sqlalchemy.convert_unicode = true
 

	
 
################################
 
### LOGGING CONFIGURATION   ####
 
################################
 
[loggers]
 
keys = root, routes, rhodecode, sqlalchemy
 

	
 
[handlers]
 
keys = console
 

	
 
[formatters]
 
keys = generic,color_formatter
 

	
 
#############
 
## LOGGERS ##
 
#############
 
[logger_root]
 
level = INFO
 
handlers = console
 

	
 
[logger_routes]
 
level = INFO
 
handlers = console
 
qualname = routes.middleware
 
# "level = DEBUG" logs the route matched and routing variables.
 
propagate = 0
 

	
 
[logger_rhodecode]
 
level = DEBUG
 
handlers = console
 
qualname = rhodecode
 
propagate = 0
 

	
 
[logger_sqlalchemy]
 
level = ERROR
 
handlers = console
 
qualname = sqlalchemy.engine
 
propagate = 0
 

	
 
##############
 
## HANDLERS ##
 
##############
 

	
 
[handler_console]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = NOTSET
 
formatter = color_formatter
 

	
 
################
 
## FORMATTERS ##
 
################
 

	
 
[formatter_generic]
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter]
 
class=rhodecode.lib.colored_formatter.ColorFormatter
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 
\ No newline at end of file
rhodecode/config/deployment.ini_tmpl
Show inline comments
 
################################################################################
 
################################################################################
 
# rhodecode - Pylons environment configuration                                 #
 
#                                                                              # 
 
# The %(here)s variable will be replaced with the parent directory of this file#
 
################################################################################
 

	
 
[DEFAULT]
 
debug = true
 
################################################################################
 
## Uncomment and replace with the address which should receive                ## 
 
## any error reports after application crash                                  ##
 
## Additionally those settings will be used by rhodecode mailing system       ##
 
################################################################################
 
#email_to = admin@localhost
 
#error_email_from = paste_error@localhost
 
#app_email_from = rhodecode-noreply@localhost
 
#error_message =
 

	
 
#smtp_server = mail.server.com
 
#smtp_username = 
 
#smtp_password = 
 
#smtp_port = 
 
#smtp_use_tls = false
 
#smtp_use_ssl = true
 

	
 
[server:main]
 
##nr of threads to spawn
 
threadpool_workers = 5
 

	
 
##max request before thread respawn
 
threadpool_max_requests = 10
 

	
 
##option to use threads of process
 
use_threadpool = true
 

	
 
use = egg:Paste#http
 
host = 127.0.0.1
 
port = 5000
 

	
 
[app:main]
 
use = egg:rhodecode
 
full_stack = true
 
static_files = true
 
lang=en
 
cache_dir = %(here)s/data
 
index_dir = %(here)s/data/index
 
app_instance_uuid = ${app_instance_uuid}
 

	
 
####################################
 
###         BEAKER CACHE        ####
 
####################################
 
beaker.cache.data_dir=/%(here)s/data/cache/data
 
beaker.cache.lock_dir=/%(here)s/data/cache/lock
 
beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
 

	
 
beaker.cache.super_short_term.type=memory
 
beaker.cache.super_short_term.expire=10
 

	
 
beaker.cache.short_term.type=memory
 
beaker.cache.short_term.expire=60
 

	
 
beaker.cache.long_term.type=memory
 
beaker.cache.long_term.expire=36000
 

	
 
beaker.cache.sql_cache_short.type=memory
 
beaker.cache.sql_cache_short.expire=5
 

	
 
beaker.cache.sql_cache_med.type=memory
 
beaker.cache.sql_cache_med.expire=360
 

	
 
beaker.cache.sql_cache_long.type=file
 
beaker.cache.sql_cache_long.expire=3600
 

	
 
####################################
 
###       BEAKER SESSION        ####
 
####################################
 
## Type of storage used for the session, current types are 
 
## dbm, file, memcached, database, and memory. 
 
## The storage uses the Container API 
 
##that is also used by the cache system.
 
beaker.session.type = file
 

	
 
beaker.session.key = rhodecode
 
beaker.session.secret = ${app_instance_secret}
 
beaker.session.timeout = 36000
 

	
 
##auto save the session to not to use .save()
 
beaker.session.auto = False
 

	
 
##true exire at browser close
 
#beaker.session.cookie_expires = 3600
 

	
 
    
 
################################################################################
 
## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
 
## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
 
## execute malicious code after an exception is raised.                       ##
 
################################################################################
 
set debug = false
 

	
 
##################################
 
###       LOGVIEW CONFIG       ###
 
##################################
 
logview.sqlalchemy = #faa
 
logview.pylons.templating = #bfb
 
logview.pylons.util = #eee
 

	
 
#########################################################
 
### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
 
#########################################################
 
sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
 
#sqlalchemy.db1.echo = False
 
#sqlalchemy.db1.pool_recycle = 3600
 
sqlalchemy.convert_unicode = true
 

	
 
################################
 
### LOGGING CONFIGURATION   ####
 
################################
 
[loggers]
 
keys = root, routes, rhodecode, sqlalchemy
 

	
 
[handlers]
 
keys = console
 

	
 
[formatters]
 
keys = generic,color_formatter
 

	
 
#############
 
## LOGGERS ##
 
#############
 
[logger_root]
 
level = INFO
 
handlers = console
 

	
 
[logger_routes]
 
level = INFO
 
handlers = console
 
qualname = routes.middleware
 
# "level = DEBUG" logs the route matched and routing variables.
 
propagate = 0
 

	
 
[logger_rhodecode]
 
level = DEBUG
 
handlers = console
 
qualname = rhodecode
 
propagate = 0
 

	
 
[logger_sqlalchemy]
 
level = ERROR
 
handlers = console
 
qualname = sqlalchemy.engine
 
propagate = 0
 

	
 
##############
 
## HANDLERS ##
 
##############
 

	
 
[handler_console]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = NOTSET
 
formatter = color_formatter
 

	
 
################
 
## FORMATTERS ##
 
################
 

	
 
[formatter_generic]
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter]
 
class=rhodecode.lib.colored_formatter.ColorFormatter
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 
\ No newline at end of file
rhodecode/lib/celerylib/tasks.py
Show inline comments
 
from celery.decorators import task
 

	
 
from operator import itemgetter
 
from pylons.i18n.translation import _
 
from rhodecode.lib.celerylib import run_task, locked_task
 
from rhodecode.lib.helpers import person
 
from rhodecode.lib.smtp_mailer import SmtpMailer
 
from rhodecode.lib.utils import OrderedDict
 
from time import mktime
 
import os
 
import traceback
 
from vcs.backends import get_repo
 
from rhodecode.model.hg import HgModel
 
try:
 
    import json
 
except ImportError:
 
    #python 2.5 compatibility
 
    import simplejson as json
 

	
 
try:
 
    from celeryconfig import PYLONS_CONFIG as config
 
    celery_on = True
 
except ImportError:
 
    #if celeryconfig is not present let's just load our pylons
 
    #config instead
 
    from pylons import config
 
    celery_on = False
 

	
 

	
 
__all__ = ['whoosh_index', 'get_commits_stats',
 
           'reset_user_password', 'send_email']
 

	
 
def get_session():
 
    if celery_on:
 
        from sqlalchemy import engine_from_config
 
        from sqlalchemy.orm import sessionmaker, scoped_session
 
        engine = engine_from_config(dict(config.items('app:main')),
 
                                    'sqlalchemy.db1.')
 
        sa = scoped_session(sessionmaker(bind=engine))
 
    else:
 
        #If we don't use celery reuse our current application Session
 
        from rhodecode.model.meta import Session
 
        sa = Session()
 

	
 
    return sa
 

	
 
@task
 
@locked_task
 
def whoosh_index(repo_location, full_index):
 
    log = whoosh_index.get_logger()
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    index_location = dict(config.items('app:main'))['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location).run(full_index=full_index)
 

	
 
@task
 
@locked_task
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y):
 
    from rhodecode.model.db import Statistics, Repository
 
    log = get_commits_stats.get_logger()
 
    author_key_cleaner = lambda k: person(k).replace('"', "") #for js data compatibilty
 

	
 
    commits_by_day_author_aggregate = {}
 
    commits_by_day_aggregate = {}
 
    repos_path = HgModel().repos_path
 
    p = os.path.join(repos_path, repo_name)
 
    repo = get_repo(p)
 

	
 
    skip_date_limit = True
 
    parse_limit = 250 #limit for single task changeset parsing optimal for
 
    last_rev = 0
 
    last_cs = None
 
    timegetter = itemgetter('time')
 

	
 
    sa = get_session()
 

	
 
    dbrepo = sa.query(Repository)\
 
        .filter(Repository.repo_name == repo_name).scalar()
 
    cur_stats = sa.query(Statistics)\
 
        .filter(Statistics.repository == dbrepo).scalar()
 
    if cur_stats:
 
        last_rev = cur_stats.stat_on_revision
 
    if not repo.revisions:
 
        return True
 

	
 
    if last_rev == repo.revisions[-1] and len(repo.revisions) > 1:
 
        #pass silently without any work if we're not on first revision or current
 
        #state of parsing revision(from db marker) is the last revision
 
        return True
 

	
 
    if cur_stats:
 
        commits_by_day_aggregate = OrderedDict(
 
                                       json.loads(
 
                                        cur_stats.commit_activity_combined))
 
        commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
 

	
 
    log.debug('starting parsing %s', parse_limit)
 
    lmktime = mktime
 

	
 
    for cnt, rev in enumerate(repo.revisions[last_rev:]):
 
        last_cs = cs = repo.get_changeset(rev)
 
        k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
 
                          cs.date.timetuple()[2])
 
        timetupple = [int(x) for x in k.split('-')]
 
        timetupple.extend([0 for _ in xrange(6)])
 
        k = lmktime(timetupple)
 
        if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
 
            try:
 
                l = [timegetter(x) for x in commits_by_day_author_aggregate\
 
                        [author_key_cleaner(cs.author)]['data']]
 
                time_pos = l.index(k)
 
            except ValueError:
 
                time_pos = False
 

	
 
            if time_pos >= 0 and time_pos is not False:
 

	
 
                datadict = commits_by_day_author_aggregate\
 
                    [author_key_cleaner(cs.author)]['data'][time_pos]
 

	
 
                datadict["commits"] += 1
 
                datadict["added"] += len(cs.added)
 
                datadict["changed"] += len(cs.changed)
 
                datadict["removed"] += len(cs.removed)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                    datadict = {"time":k,
 
                                "commits":1,
 
                                "added":len(cs.added),
 
                                "changed":len(cs.changed),
 
                                "removed":len(cs.removed),
 
                               }
 
                    commits_by_day_author_aggregate\
 
                        [author_key_cleaner(cs.author)]['data'].append(datadict)
 

	
 
        else:
 
            if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
 
                                    "label":author_key_cleaner(cs.author),
 
                                    "data":[{"time":k,
 
                                             "commits":1,
 
                                             "added":len(cs.added),
 
                                             "changed":len(cs.changed),
 
                                             "removed":len(cs.removed),
 
                                             }],
 
                                    "schema":["commits"],
 
                                    }
 

	
 
        #gather all data by day
 
        if commits_by_day_aggregate.has_key(k):
 
            commits_by_day_aggregate[k] += 1
 
        else:
 
            commits_by_day_aggregate[k] = 1
 

	
 
        if cnt >= parse_limit:
 
            #don't fetch to much data since we can freeze application
 
            break
 
    overview_data = []
 
    for k, v in commits_by_day_aggregate.items():
 
        overview_data.append([k, v])
 
    overview_data = sorted(overview_data, key=itemgetter(0))
 
    if not commits_by_day_author_aggregate:
 
        commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
 
            "label":author_key_cleaner(repo.contact),
 
            "data":[0, 1],
 
            "schema":["commits"],
 
        }
 

	
 
    stats = cur_stats if cur_stats else Statistics()
 
    stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
 
    stats.commit_activity_combined = json.dumps(overview_data)
 

	
 
    log.debug('last revison %s', last_rev)
 
    leftovers = len(repo.revisions[last_rev:])
 
    log.debug('revisions to parse %s', leftovers)
 

	
 
    if last_rev == 0 or leftovers < parse_limit:
 
        stats.languages = json.dumps(__get_codes_stats(repo_name))
 

	
 
    stats.repository = dbrepo
 
    stats.stat_on_revision = last_cs.revision
 

	
 
    try:
 
        sa.add(stats)
 
        sa.commit()
 
    except:
 
        log.error(traceback.format_exc())
 
        sa.rollback()
 
        return False
 
    if len(repo.revisions) > 1:
 
        run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
 

	
 
    return True
 

	
 
@task
 
def reset_user_password(user_email):
 
    log = reset_user_password.get_logger()
 
    from rhodecode.lib import auth
 
    from rhodecode.model.db import User
 

	
 
    try:
 
        try:
 
            sa = get_session()
 
            user = sa.query(User).filter(User.email == user_email).scalar()
 
            new_passwd = auth.PasswordGenerator().gen_password(8,
 
                             auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
 
            if user:
 
                user.password = auth.get_crypt_password(new_passwd)
 
                sa.add(user)
 
                sa.commit()
 
                log.info('change password for %s', user_email)
 
            if new_passwd is None:
 
                raise Exception('unable to generate new password')
 

	
 
        except:
 
            log.error(traceback.format_exc())
 
            sa.rollback()
 

	
 
        run_task(send_email, user_email,
 
                 "Your new rhodecode password",
 
                 'Your new rhodecode password:%s' % (new_passwd))
 
        log.info('send new password mail to %s', user_email)
 

	
 

	
 
    except:
 
        log.error('Failed to update user password')
 
        log.error(traceback.format_exc())
 
    return True
 

	
 
@task
 
def send_email(recipients, subject, body):
 
    """
 
    Sends an email with defined parameters from the .ini files.
 
    
 
    
 
    :param recipients: list of recipients, it this is empty the defined email
 
        address from field 'email_to' is used instead
 
    :param subject: subject of the mail
 
    :param body: body of the mail
 
    """
 
    log = send_email.get_logger()
 
    email_config = dict(config.items('DEFAULT'))
 

	
 
    if not recipients:
 
        recipients = [email_config.get('email_to')]
 

	
 
    def str2bool(v):
 
        return v.lower() in ["yes", "true", "t", "1"]
 

	
 
    mail_from = email_config.get('app_email_from')
 
    user = email_config.get('smtp_username')
 
    passwd = email_config.get('smtp_password')
 
    mail_server = email_config.get('smtp_server')
 
    mail_port = email_config.get('smtp_port')
 
    tls = email_config.get('smtp_use_tls')
 
    ssl = False
 
    tls = str2bool(email_config.get('smtp_use_tls'))
 
    ssl = str2bool(email_config.get('smtp_use_ssl'))
 

	
 
    try:
 
        m = SmtpMailer(mail_from, user, passwd, mail_server,
 
                       mail_port, ssl, tls)
 
        m.send(recipients, subject, body)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 
@task
 
def create_repo_fork(form_data, cur_user):
 
    from rhodecode.model.repo import RepoModel
 
    from vcs import get_backend
 
    log = create_repo_fork.get_logger()
 
    repo_model = RepoModel(get_session())
 
    repo_model.create(form_data, cur_user, just_db=True, fork=True)
 
    repo_name = form_data['repo_name']
 
    repos_path = HgModel().repos_path
 
    repo_path = os.path.join(repos_path, repo_name)
 
    repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
 
    alias = form_data['repo_type']
 

	
 
    log.info('creating repo fork %s as %s', repo_name, repo_path)
 
    backend = get_backend(alias)
 
    backend(str(repo_fork_path), create=True, src_url=str(repo_path))
 

	
 
def __get_codes_stats(repo_name):
 
    LANGUAGES_EXTENSIONS = ['action', 'adp', 'ashx', 'asmx',
 
    'aspx', 'asx', 'axd', 'c', 'cfg', 'cfm', 'cpp', 'cs', 'diff', 'do', 'el',
 
    'erl', 'h', 'java', 'js', 'jsp', 'jspx', 'lisp', 'lua', 'm', 'mako', 'ml',
 
    'pas', 'patch', 'php', 'php3', 'php4', 'phtml', 'pm', 'py', 'rb', 'rst',
 
    's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws']
 

	
 

	
 
    repos_path = HgModel().repos_path
 
    p = os.path.join(repos_path, repo_name)
 
    repo = get_repo(p)
 
    tip = repo.get_changeset()
 
    code_stats = {}
 

	
 
    def aggregate(cs):
 
        for f in cs[2]:
 
            k = f.mimetype
 
            if f.extension in LANGUAGES_EXTENSIONS:
 
                if code_stats.has_key(k):
 
                    code_stats[k] += 1
 
                else:
 
                    code_stats[k] = 1
 

	
 
    map(aggregate, tip.walk('/'))
 

	
 
    return code_stats or {}
 

	
 

	
 

	
 

	
rhodecode/lib/smtp_mailer.py
Show inline comments
 
import logging
 
import smtplib
 
import mimetypes
 
from email.mime.multipart import MIMEMultipart
 
from email.mime.image import MIMEImage
 
from email.mime.audio import MIMEAudio
 
from email.mime.base import MIMEBase
 
from email.mime.text import MIMEText
 
from email.utils import formatdate
 
from email import encoders
 

	
 
class SmtpMailer(object):
 
    """simple smtp mailer class
 
    
 
    mailer = SmtpMailer(mail_from, user, passwd, mail_server, mail_port, ssl, tls)
 
    mailer.send(recipients, subject, body, attachment_files)    
 
    
 
    :param recipients might be a list of string or single string
 
    :param attachment_files is a dict of {filename:location} 
 
    it tries to guess the mimetype and attach the file
 
    """
 

	
 
    def __init__(self, mail_from, user, passwd, mail_server,
 
                    mail_port=None, ssl=False, tls=False):
 
        
 
        self.mail_from = mail_from
 
        self.mail_server = mail_server
 
        self.mail_port = mail_port
 
        self.user = user
 
        self.passwd = passwd
 
        self.ssl = ssl
 
        self.tls = tls
 
        self.debug = False
 
        
 
    def send(self, recipients=[], subject='', body='', attachment_files={}):
 

	
 
        if isinstance(recipients, basestring):
 
            recipients = [recipients]
 
        if self.ssl:
 
            smtp_serv = smtplib.SMTP_SSL(self.mail_server, self.mail_port)
 
        else:
 
            smtp_serv = smtplib.SMTP(self.mail_server, self.mail_port)
 

	
 
        if self.tls:
 
            smtp_serv.starttls()
 
         
 
        if self.debug:    
 
            smtp_serv.set_debuglevel(1)
 

	
 
        smtp_serv.ehlo("mailer")
 
        smtp_serv.ehlo("rhodecode mailer")
 

	
 
        #if server requires authorization you must provide login and password
 
        smtp_serv.login(self.user, self.passwd)
 

	
 
        date_ = formatdate(localtime=True)
 
        msg = MIMEMultipart()
 
        msg['From'] = self.mail_from
 
        msg['To'] = ','.join(recipients)
 
        msg['Date'] = date_
 
        msg['Subject'] = subject
 
        msg.preamble = 'You will not see this in a MIME-aware mail reader.\n'
 

	
 
        msg.attach(MIMEText(body))
 

	
 
        if attachment_files:
 
            self.__atach_files(msg, attachment_files)
 

	
 
        smtp_serv.sendmail(self.mail_from, recipients, msg.as_string())
 
        logging.info('MAIL SEND TO: %s' % recipients)
 
        smtp_serv.quit()
 

	
 

	
 
    def __atach_files(self, msg, attachment_files):
 
        if isinstance(attachment_files, dict):
 
            for f_name, msg_file in attachment_files.items():
 
                ctype, encoding = mimetypes.guess_type(f_name)
 
                logging.info("guessing file %s type based on %s" , ctype, f_name)
 
                if ctype is None or encoding is not None:
 
                    # No guess could be made, or the file is encoded (compressed), so
 
                    # use a generic bag-of-bits type.
 
                    ctype = 'application/octet-stream'
 
                maintype, subtype = ctype.split('/', 1)
 
                if maintype == 'text':
 
                    # Note: we should handle calculating the charset
 
                    file_part = MIMEText(self.get_content(msg_file), 
 
                                         _subtype=subtype)
 
                elif maintype == 'image':
 
                    file_part = MIMEImage(self.get_content(msg_file), 
 
                                          _subtype=subtype)
 
                elif maintype == 'audio':
 
                    file_part = MIMEAudio(self.get_content(msg_file), 
 
                                          _subtype=subtype)
 
                else:
 
                    file_part = MIMEBase(maintype, subtype)
 
                    file_part.set_payload(self.get_content(msg_file))
 
                    # Encode the payload using Base64
 
                    encoders.encode_base64(msg)
 
                # Set the filename parameter
 
                file_part.add_header('Content-Disposition', 'attachment', 
 
                                     filename=f_name)
 
                file_part.add_header('Content-Type', ctype, name=f_name)
 
                msg.attach(file_part)
 
        else:
 
            raise Exception('Attachment files should be' 
 
                            'a dict in format {"filename":"filepath"}')    
 

	
 
    def get_content(self, msg_file):
 
        '''
 
        Get content based on type, if content is a string do open first
 
        else just read because it's a probably open file object
 
        :param msg_file:
 
        '''
 
        if isinstance(msg_file, str):
 
            return open(msg_file, "rb").read()
 
        else:
 
            #just for safe seek to 0
 
            msg_file.seek(0)
 
            return msg_file.read()
rhodecode/lib/utils.py
Show inline comments
 
#!/usr/bin/env python
 
# encoding: utf-8
 
# Utilities for RhodeCode
 
# Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
 
# This program is free software; you can redistribute it and/or
 
# modify it under the terms of the GNU General Public License
 
# as published by the Free Software Foundation; version 2
 
# of the License or (at your opinion) any later version of the license.
 
# 
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
# 
 
# You should have received a copy of the GNU General Public License
 
# along with this program; if not, write to the Free Software
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
 
# MA  02110-1301, USA.
 
"""
 
Created on April 18, 2010
 
Utilities for RhodeCode
 
@author: marcink
 
"""
 

	
 
from UserDict import DictMixin
 
from mercurial import ui, config, hg
 
from mercurial.error import RepoError
 
from rhodecode.model import meta
 
from rhodecode.model.caching_query import FromCache
 
from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, \
 
    UserLog
 
from rhodecode.model.repo import RepoModel
 
from rhodecode.model.user import UserModel
 
from vcs.backends.base import BaseChangeset
 
from paste.script import command
 
import ConfigParser
 
from vcs.utils.lazy import LazyProperty
 
import traceback
 
import datetime
 
import logging
 
import os
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def get_repo_slug(request):
 
    return request.environ['pylons.routes_dict'].get('repo_name')
 

	
 
def is_mercurial(environ):
 
    """
 
    Returns True if request's target is mercurial server - header
 
    ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
 
    """
 
    http_accept = environ.get('HTTP_ACCEPT')
 
    if http_accept and http_accept.startswith('application/mercurial'):
 
        return True
 
    return False
 

	
 
def is_git(environ):
 
    """
 
    Returns True if request's target is git server. ``HTTP_USER_AGENT`` would
 
    then have git client version given.
 
    
 
    :param environ:
 
    """
 
    http_user_agent = environ.get('HTTP_USER_AGENT')
 
    if http_user_agent and http_user_agent.startswith('git'):
 
        return True
 
    return False
 

	
 
def action_logger(user, action, repo, ipaddr, sa=None):
 
def action_logger(user, action, repo, ipaddr='', sa=None):
 
    """
 
    Action logger for various action made by users
 
    
 
    :param user: user that made this action, can be a string unique username or
 
        object containing user_id attribute
 
    :param action: action to log, should be on of predefined unique actions for
 
        easy translations
 
    :param repo: repository that action was made on
 
    :param ipaddr: optional ip address from what the action was made
 
    :param sa: optional sqlalchemy session
 
    
 
    """
 

	
 
    if not sa:
 
        sa = meta.Session()
 

	
 
    try:
 
        if hasattr(user, 'user_id'):
 
            user_obj = user
 
        elif isinstance(user, basestring):
 
            user_obj = UserModel(sa).get_by_username(user, cache=False)
 
        else:
 
            raise Exception('You have to provide user object or username')
 

	
 

	
 
        if repo:
 
        repo_name = repo.lstrip('/')
 

	
 
            repository = RepoModel(sa).get(repo_name, cache=False)
 
            if not repository:
 
                raise Exception('You have to provide valid repository')
 
        else:
 
            raise Exception('You have to provide repository to action logger')
 

	
 

	
 
        user_log = UserLog()
 
        user_log.user_id = user_obj.user_id
 
        user_log.action = action
 
        user_log.repository_name = repo_name
 
        user_log.repository = RepoModel(sa).get(repo_name, cache=False)
 
        user_log.repository = repository
 
        user_log.action_date = datetime.datetime.now()
 
        user_log.user_ip = ipaddr
 
        sa.add(user_log)
 
        sa.commit()
 

	
 
        log.info('Adding user %s, action %s on %s',
 
                                        user_obj.username, action, repo)
 
    except:
 
        log.error(traceback.format_exc())
 
        sa.rollback()
 

	
 
def get_repos(path, recursive=False, initial=False):
 
    """
 
    Scans given path for repos and return (name,(type,path)) tuple 
 
    :param prefix:
 
    :param path:
 
    :param recursive:
 
    :param initial:
 
    """
 
    from vcs.utils.helpers import get_scm
 
    from vcs.exceptions import VCSError
 

	
 
    try:
 
        scm = get_scm(path)
 
    except:
 
        pass
 
    else:
 
        raise Exception('The given path %s should not be a repository got %s',
 
                        path, scm)
 

	
 
    for dirpath in os.listdir(path):
 
        try:
 
            yield dirpath, get_scm(os.path.join(path, dirpath))
 
        except VCSError:
 
            pass
 

	
 
if __name__ == '__main__':
 
    get_repos('', '/home/marcink/workspace-python')
 

	
 

	
 
def check_repo_fast(repo_name, base_path):
 
    if os.path.isdir(os.path.join(base_path, repo_name)):return False
 
    return True
 

	
 
def check_repo(repo_name, base_path, verify=True):
 

	
 
    repo_path = os.path.join(base_path, repo_name)
 

	
 
    try:
 
        if not check_repo_fast(repo_name, base_path):
 
            return False
 
        r = hg.repository(ui.ui(), repo_path)
 
        if verify:
 
            hg.verify(r)
 
        #here we hnow that repo exists it was verified
 
        log.info('%s repo is already created', repo_name)
 
        return False
 
    except RepoError:
 
        #it means that there is no valid repo there...
 
        log.info('%s repo is free for creation', repo_name)
 
        return True
 

	
 
def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
 
    while True:
 
        ok = raw_input(prompt)
 
        if ok in ('y', 'ye', 'yes'): return True
 
        if ok in ('n', 'no', 'nop', 'nope'): return False
 
        retries = retries - 1
 
        if retries < 0: raise IOError
 
        print complaint
 

	
 
def get_hg_ui_cached():
 
    try:
 
        sa = meta.Session
 
        ret = sa.query(RhodeCodeUi)\
 
        .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
 
        .all()
 
    except:
 
        pass
 
    finally:
 
        meta.Session.remove()
 
    return ret
 

	
 

	
 
def get_hg_settings():
 
    try:
 
        sa = meta.Session()
 
        ret = sa.query(RhodeCodeSettings)\
 
        .options(FromCache("sql_cache_short", "get_hg_settings"))\
 
        .all()
 
    except:
 
        pass
 
    finally:
 
        meta.Session.remove()
 

	
 
    if not ret:
 
        raise Exception('Could not get application settings !')
 
    settings = {}
 
    for each in ret:
 
        settings['rhodecode_' + each.app_settings_name] = each.app_settings_value
 

	
 
    return settings
 

	
 
def get_hg_ui_settings():
 
    try:
 
        sa = meta.Session()
 
        ret = sa.query(RhodeCodeUi).all()
 
    except:
 
        pass
 
    finally:
 
        meta.Session.remove()
 

	
 
    if not ret:
 
        raise Exception('Could not get application ui settings !')
 
    settings = {}
 
    for each in ret:
 
        k = each.ui_key
 
        v = each.ui_value
 
        if k == '/':
 
            k = 'root_path'
 

	
 
        if k.find('.') != -1:
 
            k = k.replace('.', '_')
 

	
 
        if each.ui_section == 'hooks':
 
            v = each.ui_active
 

	
 
        settings[each.ui_section + '_' + k] = v
 

	
 
    return settings
 

	
 
#propagated from mercurial documentation
 
ui_sections = ['alias', 'auth',
 
                'decode/encode', 'defaults',
 
                'diff', 'email',
 
                'extensions', 'format',
 
                'merge-patterns', 'merge-tools',
 
                'hooks', 'http_proxy',
 
                'smtp', 'patch',
 
                'paths', 'profiling',
 
                'server', 'trusted',
 
                'ui', 'web', ]
 

	
 
def make_ui(read_from='file', path=None, checkpaths=True):
 
    """
 
    A function that will read python rc files or database
 
    and make an mercurial ui object from read options
 
    
 
    :param path: path to mercurial config file
 
    :param checkpaths: check the path
 
    :param read_from: read from 'file' or 'db'
 
    """
 

	
 
    baseui = ui.ui()
 

	
 
    if read_from == 'file':
 
        if not os.path.isfile(path):
 
            log.warning('Unable to read config file %s' % path)
 
            return False
 
        log.debug('reading hgrc from %s', path)
 
        cfg = config.config()
 
        cfg.read(path)
 
        for section in ui_sections:
 
            for k, v in cfg.items(section):
 
                baseui.setconfig(section, k, v)
 
                log.debug('settings ui from file[%s]%s:%s', section, k, v)
 

	
 
    elif read_from == 'db':
 
        hg_ui = get_hg_ui_cached()
 
        for ui_ in hg_ui:
 
            if ui_.ui_active:
 
                log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, ui_.ui_key, ui_.ui_value)
 
                baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
 

	
 

	
 
    return baseui
 

	
 

	
 
def set_rhodecode_config(config):
 
    hgsettings = get_hg_settings()
 

	
 
    for k, v in hgsettings.items():
 
        config[k] = v
 

	
 
def invalidate_cache(name, *args):
 
    """
 
    Puts cache invalidation task into db for 
 
    further global cache invalidation
 
    """
 
    pass
 

	
 
class EmptyChangeset(BaseChangeset):
 
    """
 
    An dummy empty changeset. It's possible to pass hash when creating
 
    an EmptyChangeset
 
    """
 

	
 
    def __init__(self, cs='0' * 40):
 
        self._empty_cs = cs
 
        self.revision = -1
 
        self.message = ''
 
        self.author = ''
 
        self.date = ''
 

	
 
    @LazyProperty
 
    def raw_id(self):
 
        """
 
        Returns raw string identifying this changeset, useful for web
 
        representation.
 
        """
 
        return self._empty_cs
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        return self.raw_id[:12]
 

	
 
    def get_file_changeset(self, path):
 
        return self
 

	
 
    def get_file_content(self, path):
 
        return u''
 

	
 
    def get_file_size(self, path):
 
        return 0
 

	
 
def repo2db_mapper(initial_repo_list, remove_obsolete=False):
 
    """
 
    maps all found repositories into db
 
    """
 

	
 
    sa = meta.Session()
 
    rm = RepoModel(sa)
 
    user = sa.query(User).filter(User.admin == True).first()
 

	
 
    for name, repo in initial_repo_list.items():
 
        if not rm.get(name, cache=False):
 
            log.info('repository %s not found creating default', name)
 

	
 
            form_data = {
 
                         'repo_name':name,
 
                         'repo_type':repo.alias,
 
                         'description':repo.description \
 
                            if repo.description != 'unknown' else \
 
                                        '%s repository' % name,
 
                         'private':False
 
                         }
 
            rm.create(form_data, user, just_db=True)
 

	
 
    if remove_obsolete:
 
        #remove from database those repositories that are not in the filesystem
 
        for repo in sa.query(Repository).all():
 
            if repo.repo_name not in initial_repo_list.keys():
 
                sa.delete(repo)
 
                sa.commit()
 

	
 
class OrderedDict(dict, DictMixin):
 

	
 
    def __init__(self, *args, **kwds):
 
        if len(args) > 1:
 
            raise TypeError('expected at most 1 arguments, got %d' % len(args))
 
        try:
 
            self.__end
 
        except AttributeError:
 
            self.clear()
 
        self.update(*args, **kwds)
 

	
 
    def clear(self):
 
        self.__end = end = []
 
        end += [None, end, end]         # sentinel node for doubly linked list
 
        self.__map = {}                 # key --> [key, prev, next]
 
        dict.clear(self)
 

	
 
    def __setitem__(self, key, value):
 
        if key not in self:
 
            end = self.__end
 
            curr = end[1]
 
            curr[2] = end[1] = self.__map[key] = [key, curr, end]
 
        dict.__setitem__(self, key, value)
 

	
 
    def __delitem__(self, key):
 
        dict.__delitem__(self, key)
 
        key, prev, next = self.__map.pop(key)
 
        prev[2] = next
 
        next[1] = prev
 

	
 
    def __iter__(self):
 
        end = self.__end
 
        curr = end[2]
 
        while curr is not end:
 
            yield curr[0]
 
            curr = curr[2]
 

	
 
    def __reversed__(self):
 
        end = self.__end
 
        curr = end[1]
 
        while curr is not end:
 
            yield curr[0]
 
            curr = curr[1]
 

	
 
    def popitem(self, last=True):
 
        if not self:
 
            raise KeyError('dictionary is empty')
 
        if last:
 
            key = reversed(self).next()
 
        else:
 
            key = iter(self).next()
 
        value = self.pop(key)
 
        return key, value
 

	
 
    def __reduce__(self):
 
        items = [[k, self[k]] for k in self]
 
        tmp = self.__map, self.__end
 
        del self.__map, self.__end
 
        inst_dict = vars(self).copy()
 
        self.__map, self.__end = tmp
 
        if inst_dict:
 
            return (self.__class__, (items,), inst_dict)
 
        return self.__class__, (items,)
 

	
 
    def keys(self):
 
        return list(self)
 

	
 
    setdefault = DictMixin.setdefault
 
    update = DictMixin.update
 
    pop = DictMixin.pop
 
    values = DictMixin.values
 
    items = DictMixin.items
 
    iterkeys = DictMixin.iterkeys
 
    itervalues = DictMixin.itervalues
 
    iteritems = DictMixin.iteritems
 

	
 
    def __repr__(self):
 
        if not self:
 
            return '%s()' % (self.__class__.__name__,)
 
        return '%s(%r)' % (self.__class__.__name__, self.items())
 

	
 
    def copy(self):
 
        return self.__class__(self)
 

	
 
    @classmethod
 
    def fromkeys(cls, iterable, value=None):
 
        d = cls()
 
        for key in iterable:
 
            d[key] = value
 
        return d
 

	
 
    def __eq__(self, other):
 
        if isinstance(other, OrderedDict):
 
            return len(self) == len(other) and self.items() == other.items()
 
        return dict.__eq__(self, other)
 

	
 
    def __ne__(self, other):
 
        return not self == other
 

	
 

	
 
#===============================================================================
 
# TEST FUNCTIONS AND CREATORS
 
#===============================================================================
 
def create_test_index(repo_location, full_index):
 
    """Makes default test index
 
    :param repo_location:
 
    :param full_index:
 
    """
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    from rhodecode.lib.pidlock import DaemonLock, LockHeld
 
    import shutil
 

	
 
    index_location = os.path.join(repo_location, 'index')
 
    if os.path.exists(index_location):
 
        shutil.rmtree(index_location)
 

	
 
    try:
 
        l = DaemonLock()
 
        WhooshIndexingDaemon(index_location=index_location,
 
                             repo_location=repo_location)\
 
            .run(full_index=full_index)
 
        l.release()
 
    except LockHeld:
 
        pass
 

	
 
def create_test_env(repos_test_path, config):
 
    """Makes a fresh database and 
 
    install test repository into tmp dir
 
    """
rhodecode/model/user.py
Show inline comments
 
#!/usr/bin/env python
 
# encoding: utf-8
 
# Model for users
 
# Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
 
# 
 
# This program is free software; you can redistribute it and/or
 
# modify it under the terms of the GNU General Public License
 
# as published by the Free Software Foundation; version 2
 
# of the License or (at your opinion) any later version of the license.
 
# 
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
# 
 
# You should have received a copy of the GNU General Public License
 
# along with this program; if not, write to the Free Software
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
 
# MA  02110-1301, USA.
 
"""
 
Created on April 9, 2010
 
Model for users
 
:author: marcink
 
"""
 

	
 
from pylons.i18n.translation import _
 
from rhodecode.model.caching_query import FromCache
 
from rhodecode.model.db import User
 
from rhodecode.model.meta import Session
 
import logging
 
import traceback
 

	
 
log = logging.getLogger(__name__)
 

	
 
class DefaultUserException(Exception):pass
 

	
 
class UserModel(object):
 

	
 
    def __init__(self, sa=None):
 
        if not sa:
 
            self.sa = Session()
 
        else:
 
            self.sa = sa
 

	
 
    def get(self, user_id, cache=False):
 
        user = self.sa.query(User)
 
        if cache:
 
            user = user.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % user_id))
 
        return user.get(user_id)
 

	
 

	
 
    def get_by_username(self, username, cache=False):
 
        user = self.sa.query(User)\
 
            .filter(User.username == username)
 
        if cache:
 
            user = user.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % username))
 
        return user.scalar()
 

	
 
    def create(self, form_data):
 
        try:
 
            new_user = User()
 
            for k, v in form_data.items():
 
                setattr(new_user, k, v)
 

	
 
            self.sa.add(new_user)
 
            self.sa.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            self.sa.rollback()
 
            raise
 

	
 
    def create_registration(self, form_data):
 
        from rhodecode.lib.celerylib import tasks, run_task
 
        try:
 
            new_user = User()
 
            for k, v in form_data.items():
 
                if k != 'admin':
 
                    setattr(new_user, k, v)
 

	
 
            self.sa.add(new_user)
 
            self.sa.commit()
 
            body = ('New user registration\n'
 
                    'username: %s\n'
 
                    'email: %s\n')
 
            body = body % (form_data['username'], form_data['email'])
 

	
 
            run_task(tasks.send_email, None,
 
                     _('[RhodeCode] New User registration'),
 
                     body)
 
        except:
 
            log.error(traceback.format_exc())
 
            self.sa.rollback()
 
            raise
 

	
 
    def update(self, user_id, form_data):
 
        try:
 
            new_user = self.get(user_id, cache=False)
 
            if new_user.username == 'default':
 
                raise DefaultUserException(
 
                                _("You can't Edit this user since it's"
 
                                  " crucial for entire application"))
 
            for k, v in form_data.items():
 
                if k == 'new_password' and v != '':
 
                    new_user.password = v
 
                else:
 
                    setattr(new_user, k, v)
 

	
 
            self.sa.add(new_user)
 
            self.sa.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            self.sa.rollback()
 
            raise
 

	
 
    def update_my_account(self, user_id, form_data):
 
        try:
 
            new_user = self.get(user_id, cache=False)
 
            if new_user.username == 'default':
 
                raise DefaultUserException(
 
                                _("You can't Edit this user since it's"
 
                                  " crucial for entire application"))
 
            for k, v in form_data.items():
 
                if k == 'new_password' and v != '':
 
                    new_user.password = v
 
                else:
 
                    if k not in ['admin', 'active']:
 
                        setattr(new_user, k, v)
 

	
 
            self.sa.add(new_user)
 
            self.sa.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            self.sa.rollback()
 
            raise
 

	
 
    def delete(self, user_id):
 
        try:
 
            user = self.get(user_id, cache=False)
 
            if user.username == 'default':
 
                raise DefaultUserException(
 
                                _("You can't remove this user since it's"
 
                                  " crucial for entire application"))
 
            self.sa.delete(user)
 
            self.sa.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            self.sa.rollback()
 
            raise
 

	
 
    def reset_password(self, data):
 
        from rhodecode.lib.celerylib import tasks, run_task
 
        run_task(tasks.reset_user_password, data['email'])
 

	
 

	
 
    def fill_data(self, user):
 
        """
 
        Fills user data with those from database and log out user if not 
 
        present in database
 
        :param user:
 
        """
 

	
 
        if not hasattr(user, 'user_id') or user.user_id is None:
 
            raise Exception('passed in user has to have the user_id attribute')
 

	
 

	
 
        log.debug('filling auth user data')
 
        try:
 
            dbuser = self.get(user.user_id)
 
            user.username = dbuser.username
 
            user.is_admin = dbuser.admin
 
            user.name = dbuser.name
 
            user.lastname = dbuser.lastname
 
            user.email = dbuser.email
 
        except:
 
            log.error(traceback.format_exc())
 
            user.is_authenticated = False
 

	
 
        return user
0 comments (0 inline, 0 general)