Changeset - f6dca275c5a8
[Not reviewed]
beta
0 2 0
Marcin Kuzminski - 15 years ago 2011-03-22 18:10:37
marcin@python-works.com
control mailer debug with the .ini file
2 files changed with 4 insertions and 3 deletions:
0 comments (0 inline, 0 general)
rhodecode/lib/celerylib/tasks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.celerylib.tasks
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    RhodeCode task modules, containing all task that suppose to be run
 
    by celery daemon
 
    
 
    :created_on: Oct 6, 2010
 
    :author: marcink
 
    :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>    
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software; you can redistribute it and/or
 
# modify it under the terms of the GNU General Public License
 
# as published by the Free Software Foundation; version 2
 
# of the License or (at your opinion) any later version of the license.
 
# 
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
# 
 
# You should have received a copy of the GNU General Public License
 
# along with this program; if not, write to the Free Software
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
 
# MA  02110-1301, USA.
 
from celery.decorators import task
 

	
 
import os
 
import traceback
 
import logging
 

	
 
from time import mktime
 
from operator import itemgetter
 

	
 
from pylons import config
 
from pylons.i18n.translation import _
 

	
 
from rhodecode.lib.celerylib import run_task, locked_task, str2bool
 
from rhodecode.lib.helpers import person
 
from rhodecode.lib.smtp_mailer import SmtpMailer
 
from rhodecode.lib.utils import OrderedDict, add_cache
 
from rhodecode.model import init_model
 
from rhodecode.model import meta
 
from rhodecode.model.db import RhodeCodeUi
 

	
 
from vcs.backends import get_repo
 

	
 
from sqlalchemy import engine_from_config
 

	
 
add_cache(config)
 

	
 
try:
 
    import json
 
except ImportError:
 
    #python 2.5 compatibility
 
    import simplejson as json
 

	
 
__all__ = ['whoosh_index', 'get_commits_stats',
 
           'reset_user_password', 'send_email']
 

	
 
CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
 

	
 
def get_session():
 
    if CELERY_ON:
 
        engine = engine_from_config(config, 'sqlalchemy.db1.')
 
        init_model(engine)
 
    sa = meta.Session()
 
    return sa
 

	
 
def get_repos_path():
 
    sa = get_session()
 
    q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
 
    return q.ui_value
 

	
 
@task(ignore_result=True)
 
@locked_task
 
def whoosh_index(repo_location, full_index):
 
    #log = whoosh_index.get_logger()
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location, sa=get_session())\
 
                         .run(full_index=full_index)
 

	
 
@task(ignore_result=True)
 
@locked_task
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y):
 
    try:
 
        log = get_commits_stats.get_logger()
 
    except:
 
        log = logging.getLogger(__name__)
 

	
 
    from rhodecode.model.db import Statistics, Repository
 

	
 
    #for js data compatibilty
 
    author_key_cleaner = lambda k: person(k).replace('"', "")
 

	
 
    commits_by_day_author_aggregate = {}
 
    commits_by_day_aggregate = {}
 
    repos_path = get_repos_path()
 
    p = os.path.join(repos_path, repo_name)
 
    repo = get_repo(p)
 

	
 
    skip_date_limit = True
 
    parse_limit = int(config['app_conf'].get('commit_parse_limit'))
 
    last_rev = 0
 
    last_cs = None
 
    timegetter = itemgetter('time')
 

	
 
    sa = get_session()
 

	
 
    dbrepo = sa.query(Repository)\
 
        .filter(Repository.repo_name == repo_name).scalar()
 
    cur_stats = sa.query(Statistics)\
 
        .filter(Statistics.repository == dbrepo).scalar()
 

	
 
    if cur_stats is not None:
 
        last_rev = cur_stats.stat_on_revision
 

	
 
    #return if repo is empty
 
    if not repo.revisions:
 
        return True
 

	
 
    if last_rev == repo.get_changeset().revision and len(repo.revisions) > 1:
 
        #pass silently without any work if we're not on first revision or 
 
        #current state of parsing revision(from db marker) is the last revision
 
        return True
 

	
 
    if cur_stats:
 
        commits_by_day_aggregate = OrderedDict(
 
                                       json.loads(
 
                                        cur_stats.commit_activity_combined))
 
        commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
 

	
 
    log.debug('starting parsing %s', parse_limit)
 
    lmktime = mktime
 

	
 
    last_rev = last_rev + 1 if last_rev > 0 else last_rev
 

	
 
    for cs in repo[last_rev:last_rev + parse_limit]:
 
        last_cs = cs #remember last parsed changeset
 
        k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
 
                      cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
 

	
 
        if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
 
            try:
 
                l = [timegetter(x) for x in commits_by_day_author_aggregate\
 
                        [author_key_cleaner(cs.author)]['data']]
 
                time_pos = l.index(k)
 
            except ValueError:
 
                time_pos = False
 

	
 
            if time_pos >= 0 and time_pos is not False:
 

	
 
                datadict = commits_by_day_author_aggregate\
 
                    [author_key_cleaner(cs.author)]['data'][time_pos]
 

	
 
                datadict["commits"] += 1
 
                datadict["added"] += len(cs.added)
 
                datadict["changed"] += len(cs.changed)
 
                datadict["removed"] += len(cs.removed)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                    datadict = {"time":k,
 
                                "commits":1,
 
                                "added":len(cs.added),
 
                                "changed":len(cs.changed),
 
                                "removed":len(cs.removed),
 
                               }
 
                    commits_by_day_author_aggregate\
 
                        [author_key_cleaner(cs.author)]['data'].append(datadict)
 

	
 
        else:
 
            if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
 
                                    "label":author_key_cleaner(cs.author),
 
                                    "data":[{"time":k,
 
                                             "commits":1,
 
                                             "added":len(cs.added),
 
                                             "changed":len(cs.changed),
 
                                             "removed":len(cs.removed),
 
                                             }],
 
                                    "schema":["commits"],
 
                                    }
 

	
 
        #gather all data by day
 
        if commits_by_day_aggregate.has_key(k):
 
            commits_by_day_aggregate[k] += 1
 
        else:
 
            commits_by_day_aggregate[k] = 1
 

	
 
    overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0))
 
    if not commits_by_day_author_aggregate:
 
        commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
 
            "label":author_key_cleaner(repo.contact),
 
            "data":[0, 1],
 
            "schema":["commits"],
 
        }
 

	
 
    stats = cur_stats if cur_stats else Statistics()
 
    stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
 
    stats.commit_activity_combined = json.dumps(overview_data)
 

	
 
    log.debug('last revison %s', last_rev)
 
    leftovers = len(repo.revisions[last_rev:])
 
    log.debug('revisions to parse %s', leftovers)
 

	
 
    if last_rev == 0 or leftovers < parse_limit:
 
        log.debug('getting code trending stats')
 
        stats.languages = json.dumps(__get_codes_stats(repo_name))
 

	
 
    try:
 
        stats.repository = dbrepo
 
        stats.stat_on_revision = last_cs.revision if last_cs else 0
 
        sa.add(stats)
 
        sa.commit()
 
    except:
 
        log.error(traceback.format_exc())
 
        sa.rollback()
 
        return False
 
    if len(repo.revisions) > 1:
 
        run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
 

	
 
    return True
 

	
 
@task(ignore_result=True)
 
def reset_user_password(user_email):
 
    try:
 
        log = reset_user_password.get_logger()
 
    except:
 
        log = logging.getLogger(__name__)
 

	
 
    from rhodecode.lib import auth
 
    from rhodecode.model.db import User
 

	
 
    try:
 
        try:
 
            sa = get_session()
 
            user = sa.query(User).filter(User.email == user_email).scalar()
 
            new_passwd = auth.PasswordGenerator().gen_password(8,
 
                             auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
 
            if user:
 
                user.password = auth.get_crypt_password(new_passwd)
 
                user.api_key = auth.generate_api_key(user.username)
 
                sa.add(user)
 
                sa.commit()
 
                log.info('change password for %s', user_email)
 
            if new_passwd is None:
 
                raise Exception('unable to generate new password')
 

	
 
        except:
 
            log.error(traceback.format_exc())
 
            sa.rollback()
 

	
 
        run_task(send_email, user_email,
 
                 "Your new rhodecode password",
 
                 'Your new rhodecode password:%s' % (new_passwd))
 
        log.info('send new password mail to %s', user_email)
 

	
 

	
 
    except:
 
        log.error('Failed to update user password')
 
        log.error(traceback.format_exc())
 

	
 
    return True
 

	
 
@task(ignore_result=True)
 
def send_email(recipients, subject, body):
 
    """
 
    Sends an email with defined parameters from the .ini files.
 
    
 
    
 
    :param recipients: list of recipients, it this is empty the defined email
 
        address from field 'email_to' is used instead
 
    :param subject: subject of the mail
 
    :param body: body of the mail
 
    """
 
    try:
 
        log = send_email.get_logger()
 
    except:
 
        log = logging.getLogger(__name__)
 

	
 
    email_config = config
 

	
 
    if not recipients:
 
        recipients = [email_config.get('email_to')]
 

	
 
    mail_from = email_config.get('app_email_from')
 
    user = email_config.get('smtp_username')
 
    passwd = email_config.get('smtp_password')
 
    mail_server = email_config.get('smtp_server')
 
    mail_port = email_config.get('smtp_port')
 
    tls = str2bool(email_config.get('smtp_use_tls'))
 
    ssl = str2bool(email_config.get('smtp_use_ssl'))
 
    debug = str2bool(config.get('debug'))
 

	
 
    try:
 
        m = SmtpMailer(mail_from, user, passwd, mail_server,
 
                       mail_port, ssl, tls)
 
                       mail_port, ssl, tls, debug=debug)
 
        m.send(recipients, subject, body)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 
@task(ignore_result=True)
 
def create_repo_fork(form_data, cur_user):
 
    try:
 
        log = create_repo_fork.get_logger()
 
    except:
 
        log = logging.getLogger(__name__)
 

	
 
    from rhodecode.model.repo import RepoModel
 
    from vcs import get_backend
 

	
 
    repo_model = RepoModel(get_session())
 
    repo_model.create(form_data, cur_user, just_db=True, fork=True)
 
    repo_name = form_data['repo_name']
 
    repos_path = get_repos_path()
 
    repo_path = os.path.join(repos_path, repo_name)
 
    repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
 
    alias = form_data['repo_type']
 

	
 
    log.info('creating repo fork %s as %s', repo_name, repo_path)
 
    backend = get_backend(alias)
 
    backend(str(repo_fork_path), create=True, src_url=str(repo_path))
 

	
 
def __get_codes_stats(repo_name):
 
    LANGUAGES_EXTENSIONS_MAP = {'scm': 'Scheme', 'asmx': 'VbNetAspx', 'Rout':
 
    'RConsole', 'rest': 'Rst', 'abap': 'ABAP', 'go': 'Go', 'phtml': 'HtmlPhp',
 
    'ns2': 'Newspeak', 'xml': 'EvoqueXml', 'sh-session': 'BashSession', 'ads':
 
    'Ada', 'clj': 'Clojure', 'll': 'Llvm', 'ebuild': 'Bash', 'adb': 'Ada',
 
    'ada': 'Ada', 'c++-objdump': 'CppObjdump', 'aspx':
 
    'VbNetAspx', 'ksh': 'Bash', 'coffee': 'CoffeeScript', 'vert': 'GLShader',
 
    'Makefile.*': 'Makefile', 'di': 'D', 'dpatch': 'DarcsPatch', 'rake':
 
    'Ruby', 'moo': 'MOOCode', 'erl-sh': 'ErlangShell', 'geo': 'GLShader',
 
    'pov': 'Povray', 'bas': 'VbNet', 'bat': 'Batch', 'd': 'D', 'lisp':
 
    'CommonLisp', 'h': 'C', 'rbx': 'Ruby', 'tcl': 'Tcl', 'c++': 'Cpp', 'md':
 
    'MiniD', '.vimrc': 'Vim', 'xsd': 'Xml', 'ml': 'Ocaml', 'el': 'CommonLisp',
 
    'befunge': 'Befunge', 'xsl': 'Xslt', 'pyx': 'Cython', 'cfm':
 
    'ColdfusionHtml', 'evoque': 'Evoque', 'cfg': 'Ini', 'htm': 'Html',
 
    'Makefile': 'Makefile', 'cfc': 'ColdfusionHtml', 'tex': 'Tex', 'cs':
 
    'CSharp', 'mxml': 'Mxml', 'patch': 'Diff', 'apache.conf': 'ApacheConf',
 
    'scala': 'Scala', 'applescript': 'AppleScript', 'GNUmakefile': 'Makefile',
 
    'c-objdump': 'CObjdump', 'lua': 'Lua', 'apache2.conf': 'ApacheConf', 'rb':
 
    'Ruby', 'gemspec': 'Ruby', 'rl': 'RagelObjectiveC', 'vala': 'Vala', 'tmpl':
 
    'Cheetah', 'bf': 'Brainfuck', 'plt': 'Gnuplot', 'G': 'AntlrRuby', 'xslt':
 
    'Xslt', 'flxh': 'Felix', 'asax': 'VbNetAspx', 'Rakefile': 'Ruby', 'S': 'S',
 
    'wsdl': 'Xml', 'js': 'Javascript', 'autodelegate': 'Myghty', 'properties':
 
    'Ini', 'bash': 'Bash', 'c': 'C', 'g': 'AntlrRuby', 'r3': 'Rebol', 's':
 
    'Gas', 'ashx': 'VbNetAspx', 'cxx': 'Cpp', 'boo': 'Boo', 'prolog': 'Prolog',
 
    'sqlite3-console': 'SqliteConsole', 'cl': 'CommonLisp', 'cc': 'Cpp', 'pot':
 
    'Gettext', 'vim': 'Vim', 'pxi': 'Cython', 'yaml': 'Yaml', 'SConstruct':
 
    'Python', 'diff': 'Diff', 'txt': 'Text', 'cw': 'Redcode', 'pxd': 'Cython',
 
    'plot': 'Gnuplot', 'java': 'Java', 'hrl': 'Erlang', 'py': 'Python',
 
    'makefile': 'Makefile', 'squid.conf': 'SquidConf', 'asm': 'Nasm', 'toc':
 
    'Tex', 'kid': 'Genshi', 'rhtml': 'Rhtml', 'po': 'Gettext', 'pl': 'Prolog',
 
    'pm': 'Perl', 'hx': 'Haxe', 'ascx': 'VbNetAspx', 'ooc': 'Ooc', 'asy':
 
    'Asymptote', 'hs': 'Haskell', 'SConscript': 'Python', 'pytb':
 
    'PythonTraceback', 'myt': 'Myghty', 'hh': 'Cpp', 'R': 'S', 'aux': 'Tex',
 
    'rst': 'Rst', 'cpp-objdump': 'CppObjdump', 'lgt': 'Logtalk', 'rss': 'Xml',
 
    'flx': 'Felix', 'b': 'Brainfuck', 'f': 'Fortran', 'rbw': 'Ruby',
 
    '.htaccess': 'ApacheConf', 'cxx-objdump': 'CppObjdump', 'j': 'ObjectiveJ',
 
    'mll': 'Ocaml', 'yml': 'Yaml', 'mu': 'MuPAD', 'r': 'Rebol', 'ASM': 'Nasm',
 
    'erl': 'Erlang', 'mly': 'Ocaml', 'mo': 'Modelica', 'def': 'Modula2', 'ini':
 
    'Ini', 'control': 'DebianControl', 'vb': 'VbNet', 'vapi': 'Vala', 'pro':
 
    'Prolog', 'spt': 'Cheetah', 'mli': 'Ocaml', 'as': 'ActionScript3', 'cmd':
 
    'Batch', 'cpp': 'Cpp', 'io': 'Io', 'tac': 'Python', 'haml': 'Haml', 'rkt':
 
    'Racket', 'st':'Smalltalk', 'inc': 'Povray', 'pas': 'Delphi', 'cmake':
 
    'CMake', 'csh':'Tcsh', 'hpp': 'Cpp', 'feature': 'Gherkin', 'html': 'Html',
 
    'php':'Php', 'php3':'Php', 'php4':'Php', 'php5':'Php', 'xhtml': 'Html',
 
    'hxx': 'Cpp', 'eclass': 'Bash', 'css': 'Css',
 
    'frag': 'GLShader', 'd-objdump': 'DObjdump', 'weechatlog': 'IrcLogs',
 
    'tcsh': 'Tcsh', 'objdump': 'Objdump', 'pyw': 'Python', 'h++': 'Cpp',
 
    'py3tb': 'Python3Traceback', 'jsp': 'Jsp', 'sql': 'Sql', 'mak': 'Makefile',
 
    'php': 'Php', 'mao': 'Mako', 'man': 'Groff', 'dylan': 'Dylan', 'sass':
 
    'Sass', 'cfml': 'ColdfusionHtml', 'darcspatch': 'DarcsPatch', 'tpl':
 
    'Smarty', 'm': 'ObjectiveC', 'f90': 'Fortran', 'mod': 'Modula2', 'sh':
 
    'Bash', 'lhs': 'LiterateHaskell', 'sources.list': 'SourcesList', 'axd':
 
    'VbNetAspx', 'sc': 'Python'}
 

	
 
    repos_path = get_repos_path()
 
    p = os.path.join(repos_path, repo_name)
 
    repo = get_repo(p)
 
    tip = repo.get_changeset()
 
    code_stats = {}
 

	
 
    def aggregate(cs):
 
        for f in cs[2]:
 
            ext = f.extension
 
            key = LANGUAGES_EXTENSIONS_MAP.get(ext, ext)
 
            key = key or ext
 
            if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
 
                if code_stats.has_key(key):
 
                    code_stats[key] += 1
 
                else:
 
                    code_stats[key] = 1
 

	
 
    map(aggregate, tip.walk('/'))
 

	
 
    return code_stats or {}
 

	
 

	
 

	
 

	
rhodecode/lib/smtp_mailer.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.smtp_mailer
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~
 
    
 
    Simple smtp mailer used in RhodeCode
 
    
 
    :created_on: Sep 13, 2010
 
    :copyright: (c) 2011 by marcink.
 
    :license: LICENSE_NAME, see LICENSE_FILE for more details.
 
"""
 

	
 
import logging
 
import smtplib
 
import mimetypes
 
from socket import sslerror
 

	
 
from email.mime.multipart import MIMEMultipart
 
from email.mime.image import MIMEImage
 
from email.mime.audio import MIMEAudio
 
from email.mime.base import MIMEBase
 
from email.mime.text import MIMEText
 
from email.utils import formatdate
 
from email import encoders
 

	
 
class SmtpMailer(object):
 
    """SMTP mailer class
 
    
 
    mailer = SmtpMailer(mail_from, user, passwd, mail_server, mail_port, ssl, tls)
 
    mailer.send(recipients, subject, body, attachment_files)    
 
    
 
    :param recipients might be a list of string or single string
 
    :param attachment_files is a dict of {filename:location} 
 
        it tries to guess the mimetype and attach the file 
 
    
 
    """
 

	
 
    def __init__(self, mail_from, user, passwd, mail_server,
 
                    mail_port=None, ssl=False, tls=False):
 
                    mail_port=None, ssl=False, tls=False, debug=False):
 

	
 
        self.mail_from = mail_from
 
        self.mail_server = mail_server
 
        self.mail_port = mail_port
 
        self.user = user
 
        self.passwd = passwd
 
        self.ssl = ssl
 
        self.tls = tls
 
        self.debug = False
 
        self.debug = debug
 

	
 
    def send(self, recipients=[], subject='', body='', attachment_files=None):
 

	
 
        if isinstance(recipients, basestring):
 
            recipients = [recipients]
 
        if self.ssl:
 
            smtp_serv = smtplib.SMTP_SSL(self.mail_server, self.mail_port)
 
        else:
 
            smtp_serv = smtplib.SMTP(self.mail_server, self.mail_port)
 

	
 
        if self.tls:
 
            smtp_serv.ehlo()
 
            smtp_serv.starttls()
 

	
 
        if self.debug:
 
            smtp_serv.set_debuglevel(1)
 

	
 
        smtp_serv.ehlo()
 

	
 
        #if server requires authorization you must provide login and password
 
        #but only if we have them
 
        if self.user and self.passwd:
 
            smtp_serv.login(self.user, self.passwd)
 

	
 

	
 
        date_ = formatdate(localtime=True)
 
        msg = MIMEMultipart()
 
        msg['From'] = self.mail_from
 
        msg['To'] = ','.join(recipients)
 
        msg['Date'] = date_
 
        msg['Subject'] = subject
 
        msg.preamble = 'You will not see this in a MIME-aware mail reader.\n'
 

	
 
        msg.attach(MIMEText(body))
 

	
 
        if attachment_files:
 
            self.__atach_files(msg, attachment_files)
 

	
 
        smtp_serv.sendmail(self.mail_from, recipients, msg.as_string())
 
        logging.info('MAIL SEND TO: %s' % recipients)
 

	
 
        try:
 
            smtp_serv.quit()
 
        except sslerror:
 
            # sslerror is raised in tls connections on closing sometimes
 
            pass
 

	
 

	
 

	
 
    def __atach_files(self, msg, attachment_files):
 
        if isinstance(attachment_files, dict):
 
            for f_name, msg_file in attachment_files.items():
 
                ctype, encoding = mimetypes.guess_type(f_name)
 
                logging.info("guessing file %s type based on %s" , ctype, f_name)
 
                if ctype is None or encoding is not None:
 
                    # No guess could be made, or the file is encoded (compressed), so
 
                    # use a generic bag-of-bits type.
 
                    ctype = 'application/octet-stream'
 
                maintype, subtype = ctype.split('/', 1)
 
                if maintype == 'text':
 
                    # Note: we should handle calculating the charset
 
                    file_part = MIMEText(self.get_content(msg_file),
 
                                         _subtype=subtype)
 
                elif maintype == 'image':
 
                    file_part = MIMEImage(self.get_content(msg_file),
 
                                          _subtype=subtype)
 
                elif maintype == 'audio':
 
                    file_part = MIMEAudio(self.get_content(msg_file),
 
                                          _subtype=subtype)
 
                else:
 
                    file_part = MIMEBase(maintype, subtype)
 
                    file_part.set_payload(self.get_content(msg_file))
 
                    # Encode the payload using Base64
 
                    encoders.encode_base64(msg)
 
                # Set the filename parameter
 
                file_part.add_header('Content-Disposition', 'attachment',
 
                                     filename=f_name)
 
                file_part.add_header('Content-Type', ctype, name=f_name)
 
                msg.attach(file_part)
 
        else:
 
            raise Exception('Attachment files should be'
 
                            'a dict in format {"filename":"filepath"}')
 

	
 
    def get_content(self, msg_file):
 
        """Get content based on type, if content is a string do open first
 
        else just read because it's a probably open file object
 
        
 
        :param msg_file:
 
        """
 
        if isinstance(msg_file, str):
 
            return open(msg_file, "rb").read()
 
        else:
 
            #just for safe seek to 0
 
            msg_file.seek(0)
 
            return msg_file.read()
0 comments (0 inline, 0 general)