Changeset - 67377fd685be
[Not reviewed]
beta
0 3 0
Les Peabody - 14 years ago 2011-10-20 17:58:11
lpeabody@ccom.unh.edu
applied smth_auth options update patch
3 files changed with 9 insertions and 2 deletions:
0 comments (0 inline, 0 general)
rhodecode/config/deployment.ini_tmpl
Show inline comments
 
################################################################################
 
################################################################################
 
# RhodeCode - Pylons environment configuration                                 #
 
#                                                                              # 
 
# The %(here)s variable will be replaced with the parent directory of this file#
 
################################################################################
 

	
 
[DEFAULT]
 
debug = true
 
pdebug = false
 
################################################################################
 
## Uncomment and replace with the address which should receive                ## 
 
## any error reports after application crash                                  ##
 
## Additionally those settings will be used by RhodeCode mailing system       ##
 
################################################################################
 
#email_to = admin@localhost
 
#error_email_from = paste_error@localhost
 
#app_email_from = rhodecode-noreply@localhost
 
#error_message =
 

	
 
#smtp_server = mail.server.com
 
#smtp_username = 
 
#smtp_password = 
 
#smtp_port = 
 
#smtp_use_tls = false
 
#smtp_use_ssl = true
 
# Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
 
#smtp_auth = 
 

	
 
[server:main]
 
##nr of threads to spawn
 
threadpool_workers = 5
 

	
 
##max request before thread respawn
 
threadpool_max_requests = 10
 

	
 
##option to use threads of process
 
use_threadpool = true
 

	
 
use = egg:Paste#http
 
host = 127.0.0.1
 
port = 5000
 

	
 
[app:main]
 
use = egg:rhodecode
 
full_stack = true
 
static_files = true
 
lang=en
 
cache_dir = %(here)s/data
 
index_dir = %(here)s/data/index
 
app_instance_uuid = ${app_instance_uuid}
 
cut_off_limit = 256000
 
force_https = false 
 
commit_parse_limit = 50
 
use_gravatar = true
 

	
 
####################################
 
###        CELERY CONFIG        ####
 
####################################
 
use_celery = false
 
broker.host = localhost
 
broker.vhost = rabbitmqhost
 
broker.port = 5672
 
broker.user = rabbitmq
 
broker.password = qweqwe
 

	
 
celery.imports = rhodecode.lib.celerylib.tasks
 

	
 
celery.result.backend = amqp
 
celery.result.dburi = amqp://
 
celery.result.serialier = json
 

	
 
#celery.send.task.error.emails = true
 
#celery.amqp.task.result.expires = 18000
 

	
 
celeryd.concurrency = 2
 
#celeryd.log.file = celeryd.log
 
celeryd.log.level = debug
 
celeryd.max.tasks.per.child = 1
 

	
 
#tasks will never be sent to the queue, but executed locally instead.
 
celery.always.eager = false
 

	
 
####################################
 
###         BEAKER CACHE        ####
 
####################################
 
beaker.cache.data_dir=%(here)s/data/cache/data
 
beaker.cache.lock_dir=%(here)s/data/cache/lock
 

	
 
beaker.cache.regions=super_short_term,short_term,long_term,sql_cache_short,sql_cache_med,sql_cache_long
 

	
 
beaker.cache.super_short_term.type=memory
 
beaker.cache.super_short_term.expire=10
 

	
 
beaker.cache.short_term.type=memory
 
beaker.cache.short_term.expire=60
 

	
 
beaker.cache.long_term.type=memory
 
beaker.cache.long_term.expire=36000
 

	
 
beaker.cache.sql_cache_short.type=memory
 
beaker.cache.sql_cache_short.expire=10
 

	
 
beaker.cache.sql_cache_med.type=memory
 
beaker.cache.sql_cache_med.expire=360
 

	
 
beaker.cache.sql_cache_long.type=file
 
beaker.cache.sql_cache_long.expire=3600
 

	
 
####################################
 
###       BEAKER SESSION        ####
 
####################################
 
## Type of storage used for the session, current types are 
 
## dbm, file, memcached, database, and memory. 
 
## The storage uses the Container API 
 
##that is also used by the cache system.
 
beaker.session.type = file
 

	
 
beaker.session.key = rhodecode
 
beaker.session.secret = ${app_instance_secret}
 
beaker.session.timeout = 36000
 

	
 
##auto save the session to not to use .save()
 
beaker.session.auto = False
 

	
 
##true exire at browser close
 
#beaker.session.cookie_expires = 3600
 

	
 
    
 
################################################################################
 
## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
 
## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
 
## execute malicious code after an exception is raised.                       ##
 
################################################################################
 
set debug = false
 

	
 
##################################
 
###       LOGVIEW CONFIG       ###
 
##################################
 
logview.sqlalchemy = #faa
 
logview.pylons.templating = #bfb
 
logview.pylons.util = #eee
 

	
 
#########################################################
 
### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
 
#########################################################
 

	
 
# SQLITE [default]
 
sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db
 
 
 
# POSTGRES
 
# sqlalchemy.db1.url = postgresql://user:pass@localhost/rhodecode
 

	
 
# MySQL
 
# sqlalchemy.db1.url = mysql://user:pass@localhost/rhodecode
 

	
 

	
 
sqlalchemy.db1.echo = false
 
sqlalchemy.db1.pool_recycle = 3600
 
sqlalchemy.convert_unicode = true
 

	
 
################################
 
### LOGGING CONFIGURATION   ####
 
################################
 
[loggers]
 
keys = root, routes, rhodecode, sqlalchemy, beaker, templates
 

	
 
[handlers]
 
keys = console, console_sql
 

	
 
[formatters]
 
keys = generic, color_formatter, color_formatter_sql
 

	
 
#############
 
## LOGGERS ##
 
#############
 
[logger_root]
 
level = NOTSET
 
handlers = console
 

	
 
[logger_routes]
 
level = DEBUG
 
handlers = 
 
qualname = routes.middleware
 
# "level = DEBUG" logs the route matched and routing variables.
 
propagate = 1
 

	
 
[logger_beaker]
 
level = DEBUG
 
handlers = 
 
qualname = beaker.container
 
propagate = 1
 

	
 
[logger_templates]
 
level = INFO
 
handlers = 
 
qualname = pylons.templating
 
propagate = 1
 

	
 
[logger_rhodecode]
 
level = DEBUG
 
handlers = 
 
qualname = rhodecode
 
propagate = 1
 

	
 
[logger_sqlalchemy]
 
level = INFO
 
handlers = console_sql
 
qualname = sqlalchemy.engine
 
propagate = 0
 

	
 
##############
 
## HANDLERS ##
 
##############
 

	
 
[handler_console]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = INFO
 
formatter = color_formatter
 

	
 
[handler_console_sql]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = WARN
 
formatter = color_formatter_sql
 

	
 
################
 
## FORMATTERS ##
 
################
 

	
 
[formatter_generic]
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter]
 
class=rhodecode.lib.colored_formatter.ColorFormatter
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter_sql]
 
class=rhodecode.lib.colored_formatter.ColorFormatterSql
 
format= %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 
\ No newline at end of file
rhodecode/lib/celerylib/tasks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.celerylib.tasks
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    RhodeCode task modules, containing all task that suppose to be run
 
    by celery daemon
 

	
 
    :created_on: Oct 6, 2010
 
    :author: marcink
 
    :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
from celery.decorators import task
 

	
 
import os
 
import traceback
 
import logging
 
from os.path import dirname as dn, join as jn
 

	
 
from time import mktime
 
from operator import itemgetter
 
from string import lower
 

	
 
from pylons import config, url
 
from pylons.i18n.translation import _
 

	
 
from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, safe_str
 
from rhodecode.lib.celerylib import run_task, locked_task, str2bool, \
 
    __get_lockkey, LockHeld, DaemonLock
 
from rhodecode.lib.helpers import person
 
from rhodecode.lib.smtp_mailer import SmtpMailer
 
from rhodecode.lib.utils import add_cache
 
from rhodecode.lib.compat import json, OrderedDict
 

	
 
from rhodecode.model import init_model
 
from rhodecode.model import meta
 
from rhodecode.model.db import RhodeCodeUi, Statistics, Repository
 

	
 
from vcs.backends import get_repo
 

	
 
from sqlalchemy import engine_from_config
 

	
 
add_cache(config)
 

	
 

	
 

	
 
__all__ = ['whoosh_index', 'get_commits_stats',
 
           'reset_user_password', 'send_email']
 

	
 
CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
 

	
 

	
 
def get_session():
 
    if CELERY_ON:
 
        engine = engine_from_config(config, 'sqlalchemy.db1.')
 
        init_model(engine)
 
    sa = meta.Session()
 
    return sa
 

	
 

	
 
def get_repos_path():
 
    sa = get_session()
 
    q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
 
    return q.ui_value
 

	
 

	
 
@task(ignore_result=True)
 
@locked_task
 
def whoosh_index(repo_location, full_index):
 
    #log = whoosh_index.get_logger()
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location, sa=get_session())\
 
                         .run(full_index=full_index)
 

	
 

	
 
@task(ignore_result=True)
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y):
 
    try:
 
        log = get_commits_stats.get_logger()
 
    except:
 
        log = logging.getLogger(__name__)
 

	
 
    lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
 
                            ts_max_y)
 
    lockkey_path = config['here']
 

	
 
    log.info('running task with lockkey %s', lockkey)
 
    try:
 
        lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
 

	
 
        #for js data compatibilty cleans the key for person from '
 
        akc = lambda k: person(k).replace('"', "")
 

	
 
        co_day_auth_aggr = {}
 
        commits_by_day_aggregate = {}
 
        repos_path = get_repos_path()
 
        repo = get_repo(safe_str(os.path.join(repos_path, repo_name)))
 
        repo_size = len(repo.revisions)
 
        #return if repo have no revisions
 
        if repo_size < 1:
 
            lock.release()
 
            return True
 

	
 
        skip_date_limit = True
 
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
 
        last_rev = 0
 
        last_cs = None
 
        timegetter = itemgetter('time')
 

	
 
        sa = get_session()
 

	
 
        dbrepo = sa.query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 
        cur_stats = sa.query(Statistics)\
 
            .filter(Statistics.repository == dbrepo).scalar()
 

	
 
        if cur_stats is not None:
 
            last_rev = cur_stats.stat_on_revision
 

	
 
        if last_rev == repo.get_changeset().revision and repo_size > 1:
 
            #pass silently without any work if we're not on first revision or
 
            #current state of parsing revision(from db marker) is the
 
            #last revision
 
            lock.release()
 
            return True
 

	
 
        if cur_stats:
 
            commits_by_day_aggregate = OrderedDict(json.loads(
 
                                        cur_stats.commit_activity_combined))
 
            co_day_auth_aggr = json.loads(cur_stats.commit_activity)
 

	
 
        log.debug('starting parsing %s', parse_limit)
 
        lmktime = mktime
 

	
 
        last_rev = last_rev + 1 if last_rev > 0 else last_rev
 

	
 
        for cs in repo[last_rev:last_rev + parse_limit]:
 
            last_cs = cs  # remember last parsed changeset
 
            k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
 
                          cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
 

	
 
            if akc(cs.author) in co_day_auth_aggr:
 
                try:
 
                    l = [timegetter(x) for x in
 
                         co_day_auth_aggr[akc(cs.author)]['data']]
 
                    time_pos = l.index(k)
 
                except ValueError:
 
                    time_pos = False
 

	
 
                if time_pos >= 0 and time_pos is not False:
 

	
 
                    datadict = \
 
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
 

	
 
                    datadict["commits"] += 1
 
                    datadict["added"] += len(cs.added)
 
                    datadict["changed"] += len(cs.changed)
 
                    datadict["removed"] += len(cs.removed)
 

	
 
                else:
 
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                        datadict = {"time": k,
 
                                    "commits": 1,
 
                                    "added": len(cs.added),
 
                                    "changed": len(cs.changed),
 
                                    "removed": len(cs.removed),
 
                                   }
 
                        co_day_auth_aggr[akc(cs.author)]['data']\
 
                            .append(datadict)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                    co_day_auth_aggr[akc(cs.author)] = {
 
                                        "label": akc(cs.author),
 
                                        "data": [{"time":k,
 
                                                 "commits":1,
 
                                                 "added":len(cs.added),
 
                                                 "changed":len(cs.changed),
 
                                                 "removed":len(cs.removed),
 
                                                 }],
 
                                        "schema": ["commits"],
 
                                        }
 

	
 
            #gather all data by day
 
            if k in commits_by_day_aggregate:
 
                commits_by_day_aggregate[k] += 1
 
            else:
 
                commits_by_day_aggregate[k] = 1
 

	
 
        overview_data = sorted(commits_by_day_aggregate.items(),
 
                               key=itemgetter(0))
 

	
 
        if not co_day_auth_aggr:
 
            co_day_auth_aggr[akc(repo.contact)] = {
 
                "label": akc(repo.contact),
 
                "data": [0, 1],
 
                "schema": ["commits"],
 
            }
 

	
 
        stats = cur_stats if cur_stats else Statistics()
 
        stats.commit_activity = json.dumps(co_day_auth_aggr)
 
        stats.commit_activity_combined = json.dumps(overview_data)
 

	
 
        log.debug('last revison %s', last_rev)
 
        leftovers = len(repo.revisions[last_rev:])
 
        log.debug('revisions to parse %s', leftovers)
 

	
 
        if last_rev == 0 or leftovers < parse_limit:
 
            log.debug('getting code trending stats')
 
            stats.languages = json.dumps(__get_codes_stats(repo_name))
 

	
 
        try:
 
            stats.repository = dbrepo
 
            stats.stat_on_revision = last_cs.revision if last_cs else 0
 
            sa.add(stats)
 
            sa.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            sa.rollback()
 
            lock.release()
 
            return False
 

	
 
        #final release
 
        lock.release()
 

	
 
        #execute another task if celery is enabled
 
        if len(repo.revisions) > 1 and CELERY_ON:
 
            run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
 
        return True
 
    except LockHeld:
 
        log.info('LockHeld')
 
        return 'Task with key %s already running' % lockkey
 

	
 
@task(ignore_result=True)
 
def send_password_link(user_email):
 
    try:
 
        log = reset_user_password.get_logger()
 
    except:
 
        log = logging.getLogger(__name__)
 

	
 
    from rhodecode.lib import auth
 
    from rhodecode.model.db import User
 

	
 
    try:
 
        sa = get_session()
 
        user = sa.query(User).filter(User.email == user_email).scalar()
 

	
 
        if user:
 
            link = url('reset_password_confirmation', key=user.api_key,
 
                       qualified=True)
 
            tmpl = """
 
Hello %s
 

	
 
We received a request to create a new password for your account.
 

	
 
You can generate it by clicking following URL:
 

	
 
%s
 

	
 
If you didn't request new password please ignore this email.
 
            """
 
            run_task(send_email, user_email,
 
                     "RhodeCode password reset link",
 
                     tmpl % (user.short_contact, link))
 
            log.info('send new password mail to %s', user_email)
 

	
 
    except:
 
        log.error('Failed to update user password')
 
        log.error(traceback.format_exc())
 
        return False
 

	
 
    return True
 

	
 
@task(ignore_result=True)
 
def reset_user_password(user_email):
 
    try:
 
        log = reset_user_password.get_logger()
 
    except:
 
        log = logging.getLogger(__name__)
 

	
 
    from rhodecode.lib import auth
 
    from rhodecode.model.db import User
 

	
 
    try:
 
        try:
 
            sa = get_session()
 
            user = sa.query(User).filter(User.email == user_email).scalar()
 
            new_passwd = auth.PasswordGenerator().gen_password(8,
 
                             auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
 
            if user:
 
                user.password = auth.get_crypt_password(new_passwd)
 
                user.api_key = auth.generate_api_key(user.username)
 
                sa.add(user)
 
                sa.commit()
 
                log.info('change password for %s', user_email)
 
            if new_passwd is None:
 
                raise Exception('unable to generate new password')
 

	
 
        except:
 
            log.error(traceback.format_exc())
 
            sa.rollback()
 

	
 
        run_task(send_email, user_email,
 
                 "Your new RhodeCode password",
 
                 'Your new RhodeCode password:%s' % (new_passwd))
 
        log.info('send new password mail to %s', user_email)
 

	
 
    except:
 
        log.error('Failed to update user password')
 
        log.error(traceback.format_exc())
 

	
 
    return True
 

	
 

	
 
@task(ignore_result=True)
 
def send_email(recipients, subject, body):
 
    """
 
    Sends an email with defined parameters from the .ini files.
 

	
 
    :param recipients: list of recipients, it this is empty the defined email
 
        address from field 'email_to' is used instead
 
    :param subject: subject of the mail
 
    :param body: body of the mail
 
    """
 
    try:
 
        log = send_email.get_logger()
 
    except:
 
        log = logging.getLogger(__name__)
 

	
 
    email_config = config
 

	
 
    if not recipients:
 
        recipients = [email_config.get('email_to')]
 

	
 
    mail_from = email_config.get('app_email_from')
 
    user = email_config.get('smtp_username')
 
    passwd = email_config.get('smtp_password')
 
    mail_server = email_config.get('smtp_server')
 
    mail_port = email_config.get('smtp_port')
 
    tls = str2bool(email_config.get('smtp_use_tls'))
 
    ssl = str2bool(email_config.get('smtp_use_ssl'))
 
    debug = str2bool(config.get('debug'))
 
    smtp_auth = email_config.get('smtp_auth')
 

	
 
    try:
 
        m = SmtpMailer(mail_from, user, passwd, mail_server,
 
        m = SmtpMailer(mail_from, user, passwd, mail_server,smtp_auth,
 
                       mail_port, ssl, tls, debug=debug)
 
        m.send(recipients, subject, body)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 

	
 
@task(ignore_result=True)
 
def create_repo_fork(form_data, cur_user):
 
    from rhodecode.model.repo import RepoModel
 
    from vcs import get_backend
 

	
 
    try:
 
        log = create_repo_fork.get_logger()
 
    except:
 
        log = logging.getLogger(__name__)
 

	
 
    repo_model = RepoModel(get_session())
 
    repo_model.create(form_data, cur_user, just_db=True, fork=True)
 
    repo_name = form_data['repo_name']
 
    repos_path = get_repos_path()
 
    repo_path = os.path.join(repos_path, repo_name)
 
    repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
 
    alias = form_data['repo_type']
 

	
 
    log.info('creating repo fork %s as %s', repo_name, repo_path)
 
    backend = get_backend(alias)
 
    backend(str(repo_fork_path), create=True, src_url=str(repo_path))
 

	
 

	
 
def __get_codes_stats(repo_name):
 
    repos_path = get_repos_path()
 
    repo = get_repo(safe_str(os.path.join(repos_path, repo_name)))
 
    tip = repo.get_changeset()
 
    code_stats = {}
 

	
 
    def aggregate(cs):
 
        for f in cs[2]:
 
            ext = lower(f.extension)
 
            if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
 
                if ext in code_stats:
 
                    code_stats[ext] += 1
 
                else:
 
                    code_stats[ext] = 1
 

	
 
    map(aggregate, tip.walk('/'))
 

	
 
    return code_stats or {}
rhodecode/lib/smtp_mailer.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.smtp_mailer
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Simple smtp mailer used in RhodeCode
 

	
 
    :created_on: Sep 13, 2010
 
    :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import logging
 
import smtplib
 
import mimetypes
 
from socket import sslerror
 

	
 
from email.mime.multipart import MIMEMultipart
 
from email.mime.image import MIMEImage
 
from email.mime.audio import MIMEAudio
 
from email.mime.base import MIMEBase
 
from email.mime.text import MIMEText
 
from email.utils import formatdate
 
from email import encoders
 

	
 

	
 

	
 
class SmtpMailer(object):
 
    """SMTP mailer class
 

	
 
    mailer = SmtpMailer(mail_from, user, passwd, mail_server,
 
                        mail_port, ssl, tls)
 
    mailer.send(recipients, subject, body, attachment_files)
 

	
 
    :param recipients might be a list of string or single string
 
    :param attachment_files is a dict of {filename:location}
 
        it tries to guess the mimetype and attach the file
 

	
 
    """
 

	
 
    def __init__(self, mail_from, user, passwd, mail_server,
 
    def __init__(self, mail_from, user, passwd, mail_server,smtp_auth,
 
                    mail_port=None, ssl=False, tls=False, debug=False):
 

	
 
        self.mail_from = mail_from
 
        self.mail_server = mail_server
 
        self.mail_port = mail_port
 
        self.user = user
 
        self.passwd = passwd
 
        self.ssl = ssl
 
        self.tls = tls
 
        self.debug = debug
 
        self.auth = smtp_auth
 

	
 
    def send(self, recipients=[], subject='', body='', attachment_files=None):
 

	
 
        if isinstance(recipients, basestring):
 
            recipients = [recipients]
 
        if self.ssl:
 
            smtp_serv = smtplib.SMTP_SSL(self.mail_server, self.mail_port)
 
        else:
 
            smtp_serv = smtplib.SMTP(self.mail_server, self.mail_port)
 

	
 
        if self.tls:
 
            smtp_serv.ehlo()
 
            smtp_serv.starttls()
 

	
 
        if self.debug:
 
            smtp_serv.set_debuglevel(1)
 

	
 
        smtp_serv.ehlo()
 
        if self.auth:
 
            smtp_serv.esmtp_features["auth"] = self.auth
 

	
 
        #if server requires authorization you must provide login and password
 
        #but only if we have them
 
        if self.user and self.passwd:
 
            smtp_serv.login(self.user, self.passwd)
 

	
 
        date_ = formatdate(localtime=True)
 
        msg = MIMEMultipart()
 
        msg.set_type('multipart/alternative')
 
        msg.preamble = 'You will not see this in a MIME-aware mail reader.\n'
 

	
 
        text_msg = MIMEText(body)
 
        text_msg.set_type('text/plain')
 
        text_msg.set_param('charset', 'UTF-8')
 

	
 
        msg['From'] = self.mail_from
 
        msg['To'] = ','.join(recipients)
 
        msg['Date'] = date_
 
        msg['Subject'] = subject
 

	
 
        msg.attach(text_msg)
 

	
 
        if attachment_files:
 
            self.__atach_files(msg, attachment_files)
 

	
 
        smtp_serv.sendmail(self.mail_from, recipients, msg.as_string())
 
        logging.info('MAIL SEND TO: %s' % recipients)
 

	
 
        try:
 
            smtp_serv.quit()
 
        except sslerror:
 
            # sslerror is raised in tls connections on closing sometimes
 
            pass
 

	
 
    def __atach_files(self, msg, attachment_files):
 
        if isinstance(attachment_files, dict):
 
            for f_name, msg_file in attachment_files.items():
 
                ctype, encoding = mimetypes.guess_type(f_name)
 
                logging.info("guessing file %s type based on %s", ctype,
 
                             f_name)
 
                if ctype is None or encoding is not None:
 
                    # No guess could be made, or the file is encoded
 
                    # (compressed), so use a generic bag-of-bits type.
 
                    ctype = 'application/octet-stream'
 
                maintype, subtype = ctype.split('/', 1)
 
                if maintype == 'text':
 
                    # Note: we should handle calculating the charset
 
                    file_part = MIMEText(self.get_content(msg_file),
 
                                         _subtype=subtype)
 
                elif maintype == 'image':
 
                    file_part = MIMEImage(self.get_content(msg_file),
 
                                          _subtype=subtype)
 
                elif maintype == 'audio':
 
                    file_part = MIMEAudio(self.get_content(msg_file),
 
                                          _subtype=subtype)
 
                else:
 
                    file_part = MIMEBase(maintype, subtype)
 
                    file_part.set_payload(self.get_content(msg_file))
 
                    # Encode the payload using Base64
 
                    encoders.encode_base64(msg)
 
                # Set the filename parameter
 
                file_part.add_header('Content-Disposition', 'attachment',
 
                                     filename=f_name)
 
                file_part.add_header('Content-Type', ctype, name=f_name)
 
                msg.attach(file_part)
 
        else:
 
            raise Exception('Attachment files should be'
 
                            'a dict in format {"filename":"filepath"}')
 

	
 
    def get_content(self, msg_file):
 
        """Get content based on type, if content is a string do open first
 
        else just read because it's a probably open file object
 

	
 
        :param msg_file:
 
        """
 
        if isinstance(msg_file, str):
 
            return open(msg_file, "rb").read()
 
        else:
 
            #just for safe seek to 0
 
            msg_file.seek(0)
 
            return msg_file.read()
0 comments (0 inline, 0 general)