Changeset - e26c0616e003
[Not reviewed]
default
0 7 0
Mads Kiilerich - 6 years ago 2019-12-16 02:37:05
mads@kiilerich.com
Grafted from: 44cc1056a1ac
py3: use global next() function instead of .next() method

From 2to3 -f next.
7 files changed with 21 insertions and 21 deletions:
0 comments (0 inline, 0 general)
kallithea/lib/diffs.py
Show inline comments
 
@@ -397,64 +397,64 @@ class DiffProcessor(object):
 
                'new_lineno': '',
 
                'action':     'context',
 
                'line':       msg,
 
                } for _op, msg in stats['ops'].iteritems()
 
                  if _op not in [MOD_FILENODE]])
 

	
 
            _files.append({
 
                'old_filename':     head['a_path'],
 
                'filename':         head['b_path'],
 
                'old_revision':     head['a_blob_id'],
 
                'new_revision':     head['b_blob_id'],
 
                'chunks':           chunks,
 
                'operation':        op,
 
                'stats':            stats,
 
            })
 

	
 
        if not inline_diff:
 
            return _files
 

	
 
        # highlight inline changes when one del is followed by one add
 
        for diff_data in _files:
 
            for chunk in diff_data['chunks']:
 
                lineiter = iter(chunk)
 
                try:
 
                    peekline = lineiter.next()
 
                    peekline = next(lineiter)
 
                    while True:
 
                        # find a first del line
 
                        while peekline['action'] != 'del':
 
                            peekline = lineiter.next()
 
                            peekline = next(lineiter)
 
                        delline = peekline
 
                        peekline = lineiter.next()
 
                        peekline = next(lineiter)
 
                        # if not followed by add, eat all following del lines
 
                        if peekline['action'] != 'add':
 
                            while peekline['action'] == 'del':
 
                                peekline = lineiter.next()
 
                                peekline = next(lineiter)
 
                            continue
 
                        # found an add - make sure it is the only one
 
                        addline = peekline
 
                        try:
 
                            peekline = lineiter.next()
 
                            peekline = next(lineiter)
 
                        except StopIteration:
 
                            # add was last line - ok
 
                            _highlight_inline_diff(delline, addline)
 
                            raise
 
                        if peekline['action'] != 'add':
 
                            # there was only one add line - ok
 
                            _highlight_inline_diff(delline, addline)
 
                except StopIteration:
 
                    pass
 

	
 
        return _files
 

	
 
    def stat(self):
 
        """
 
        Returns tuple of added, and removed lines for this instance
 
        """
 
        return self.adds, self.removes
 

	
 

	
 
_escape_re = re.compile(r'(&)|(<)|(>)|(\t)|(\r)|(?<=.)( \n| $)')
 

	
 

	
 
def _escaper(string):
 
    """
 
@@ -539,124 +539,124 @@ def _get_header(vcs, diff_chunk):
 
    if match is None:
 
        raise Exception('diff not recognized as valid %s diff' % vcs)
 
    meta_info = match.groupdict()
 
    rest = diff_chunk[match.end():]
 
    if rest and _header_next_check.match(rest):
 
        raise Exception('cannot parse %s diff header: %r followed by %r' % (vcs, diff_chunk[:match.end()], rest[:1000]))
 
    diff_lines = (_escaper(m.group(0)) for m in re.finditer(br'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
 
    return meta_info, diff_lines
 

	
 

	
 
_chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
 
_newline_marker = re.compile(r'^\\ No newline at end of file')
 

	
 

	
 
def _parse_lines(diff_lines):
 
    """
 
    Given an iterator of diff body lines, parse them and return a dict per
 
    line and added/removed totals.
 
    """
 
    added = deleted = 0
 
    old_line = old_end = new_line = new_end = None
 

	
 
    chunks = []
 
    try:
 
        line = diff_lines.next()
 
        line = next(diff_lines)
 

	
 
        while True:
 
            lines = []
 
            chunks.append(lines)
 

	
 
            match = _chunk_re.match(line)
 

	
 
            if not match:
 
                raise Exception('error parsing diff @@ line %r' % line)
 

	
 
            gr = match.groups()
 
            (old_line, old_end,
 
             new_line, new_end) = [int(x or 1) for x in gr[:-1]]
 
            old_line -= 1
 
            new_line -= 1
 

	
 
            context = len(gr) == 5
 
            old_end += old_line
 
            new_end += new_line
 

	
 
            if context:
 
                # skip context only if it's first line
 
                if int(gr[0]) > 1:
 
                    lines.append({
 
                        'old_lineno': '...',
 
                        'new_lineno': '...',
 
                        'action':     'context',
 
                        'line':       line,
 
                    })
 

	
 
            line = diff_lines.next()
 
            line = next(diff_lines)
 

	
 
            while old_line < old_end or new_line < new_end:
 
                if not line:
 
                    raise Exception('error parsing diff - empty line at -%s+%s' % (old_line, new_line))
 

	
 
                affects_old = affects_new = False
 

	
 
                command = line[0]
 
                if command == '+':
 
                    affects_new = True
 
                    action = 'add'
 
                    added += 1
 
                elif command == '-':
 
                    affects_old = True
 
                    action = 'del'
 
                    deleted += 1
 
                elif command == ' ':
 
                    affects_old = affects_new = True
 
                    action = 'unmod'
 
                else:
 
                    raise Exception('error parsing diff - unknown command in line %r at -%s+%s' % (line, old_line, new_line))
 

	
 
                if not _newline_marker.match(line):
 
                    old_line += affects_old
 
                    new_line += affects_new
 
                    lines.append({
 
                        'old_lineno':   affects_old and old_line or '',
 
                        'new_lineno':   affects_new and new_line or '',
 
                        'action':       action,
 
                        'line':         line[1:],
 
                    })
 

	
 
                line = diff_lines.next()
 
                line = next(diff_lines)
 

	
 
                if _newline_marker.match(line):
 
                    # we need to append to lines, since this is not
 
                    # counted in the line specs of diff
 
                    lines.append({
 
                        'old_lineno':   '...',
 
                        'new_lineno':   '...',
 
                        'action':       'context',
 
                        'line':         line,
 
                    })
 
                    line = diff_lines.next()
 
                    line = next(diff_lines)
 
            if old_line > old_end:
 
                raise Exception('error parsing diff - more than %s "-" lines at -%s+%s' % (old_end, old_line, new_line))
 
            if new_line > new_end:
 
                raise Exception('error parsing diff - more than %s "+" lines at -%s+%s' % (new_end, old_line, new_line))
 
    except StopIteration:
 
        pass
 
    if old_line != old_end or new_line != new_end:
 
        raise Exception('diff processing broken when old %s<>%s or new %s<>%s line %r' % (old_line, old_end, new_line, new_end, line))
 

	
 
    return chunks, added, deleted
 

	
 
# Used for inline highlighter word split, must match the substitutions in _escaper
 
_token_re = re.compile(r'()(&amp;|&lt;|&gt;|<u>\t</u>|<u class="cr"></u>| <i></i>|\W+?)')
 

	
 

	
 
def _highlight_inline_diff(old, new):
 
    """
 
    Highlight simple add/remove in two lines given as info dicts. They are
 
    modified in place and given markup with <del>/<ins>.
 
    """
 
    assert old['action'] == 'del'
 
    assert new['action'] == 'add'
 

	
 
    oldwords = _token_re.split(old['line'])
kallithea/lib/helpers.py
Show inline comments
 
@@ -366,49 +366,49 @@ def gen_color(n=10000):
 
    golden_ratio = 0.618033988749895
 
    h = 0.22717784590367374
 

	
 
    for _unused in xrange(n):
 
        h += golden_ratio
 
        h %= 1
 
        HSV_tuple = [h, 0.95, 0.95]
 
        RGB_tuple = hsv_to_rgb(*HSV_tuple)
 
        yield [str(int(x * 256)) for x in RGB_tuple]
 

	
 

	
 
def pygmentize_annotation(repo_name, filenode, **kwargs):
 
    """
 
    pygmentize function for annotation
 

	
 
    :param filenode:
 
    """
 
    cgenerator = gen_color()
 
    color_dict = {}
 

	
 
    def get_color_string(cs):
 
        if cs in color_dict:
 
            col = color_dict[cs]
 
        else:
 
            col = color_dict[cs] = cgenerator.next()
 
            col = color_dict[cs] = next(cgenerator)
 
        return "color: rgb(%s)! important;" % (', '.join(col))
 

	
 
    def url_func(changeset):
 
        author = escape(changeset.author)
 
        date = changeset.date
 
        message = escape(changeset.message)
 
        tooltip_html = ("<b>Author:</b> %s<br/>"
 
                        "<b>Date:</b> %s</b><br/>"
 
                        "<b>Message:</b> %s") % (author, date, message)
 

	
 
        lnk_format = show_id(changeset)
 
        uri = link_to(
 
                lnk_format,
 
                url('changeset_home', repo_name=repo_name,
 
                    revision=changeset.raw_id),
 
                style=get_color_string(changeset.raw_id),
 
                **{'data-toggle': 'popover',
 
                   'data-content': tooltip_html}
 
              )
 

	
 
        uri += '\n'
 
        return uri
 

	
 
    return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
kallithea/lib/indexers/__init__.py
Show inline comments
 
@@ -125,49 +125,49 @@ JOURNAL_SCHEMA = Schema(
 
    date=DATETIME(),
 
    action=TEXT(),
 
    repository=ID(),
 
    ip=TEXT(),
 
)
 

	
 

	
 
class WhooshResultWrapper(object):
 
    def __init__(self, search_type, searcher, matcher, highlight_items,
 
                 repo_location):
 
        self.search_type = search_type
 
        self.searcher = searcher
 
        self.matcher = matcher
 
        self.highlight_items = highlight_items
 
        self.fragment_size = 200
 
        self.repo_location = repo_location
 

	
 
    @LazyProperty
 
    def doc_ids(self):
 
        docs_id = []
 
        while self.matcher.is_active():
 
            docnum = self.matcher.id()
 
            chunks = [offsets for offsets in self.get_chunks()]
 
            docs_id.append([docnum, chunks])
 
            self.matcher.next()
 
            self.matcher.next()  # this looks like a py2 iterator ... but it isn't
 
        return docs_id
 

	
 
    def __str__(self):
 
        return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
 

	
 
    def __repr__(self):
 
        return self.__str__()
 

	
 
    def __len__(self):
 
        return len(self.doc_ids)
 

	
 
    def __iter__(self):
 
        """
 
        Allows Iteration over results,and lazy generate content
 

	
 
        *Requires* implementation of ``__getitem__`` method.
 
        """
 
        for docid in self.doc_ids:
 
            yield self.get_full_content(docid)
 

	
 
    def __getitem__(self, key):
 
        """
 
        Slicing of resultWrapper
 
        """
kallithea/lib/middleware/wrapper.py
Show inline comments
 
@@ -42,56 +42,56 @@ class Meter:
 
        self._start = time.time()
 
        self._size = 0
 

	
 
    def duration(self):
 
        return time.time() - self._start
 

	
 
    def start_response(self, status, response_headers, exc_info=None):
 
        write = self._start_response(status, response_headers, exc_info)
 
        def metered_write(s):
 
            self.measure(s)
 
            write(s)
 
        return metered_write
 

	
 
    def measure(self, chunk):
 
        self._size += len(chunk)
 

	
 
    def size(self):
 
        return self._size
 

	
 

	
 
class ResultIter:
 

	
 
    def __init__(self, result, meter, description):
 
        self._result_close = getattr(result, 'close', None) or (lambda: None)
 
        self._next = iter(result).next
 
        self._next = iter(result).__next__
 
        self._meter = meter
 
        self._description = description
 

	
 
    def __iter__(self):
 
        return self
 

	
 
    def next(self):
 
    def __next__(self):
 
        chunk = self._next()
 
        self._meter.measure(chunk)
 
        return chunk
 

	
 
    def close(self):
 
        self._result_close()
 
        log.info("%s responded after %.3fs with %s bytes", self._description, self._meter.duration(), self._meter.size())
 

	
 

	
 
class RequestWrapper(object):
 

	
 
    def __init__(self, app, config):
 
        self.application = app
 
        self.config = config
 

	
 
    def __call__(self, environ, start_response):
 
        meter = Meter(start_response)
 
        description = "Request from %s for %s" % (
 
            _get_ip_addr(environ),
 
            get_path_info(environ),
 
        )
 
        try:
 
            result = self.application(environ, meter.start_response)
 
        finally:
kallithea/lib/vcs/subprocessio.py
Show inline comments
 
@@ -157,49 +157,49 @@ class BufferedGenerator(object):
 
    """
 

	
 
    def __init__(self, source, buffer_size=65536, chunk_size=4096,
 
                 starting_values=None, bottomless=False):
 
        starting_values = starting_values or []
 
        if bottomless:
 
            maxlen = int(buffer_size / chunk_size)
 
        else:
 
            maxlen = None
 

	
 
        self.data = collections.deque(starting_values, maxlen)
 
        self.worker = InputStreamChunker(source, self.data, buffer_size,
 
                                         chunk_size)
 
        if starting_values:
 
            self.worker.data_added.set()
 
        self.worker.start()
 

	
 
    ####################
 
    # Generator's methods
 
    ####################
 

	
 
    def __iter__(self):
 
        return self
 

	
 
    def next(self):
 
    def __next__(self):
 
        while not len(self.data) and not self.worker.EOF.is_set():
 
            self.worker.data_added.clear()
 
            self.worker.data_added.wait(0.2)
 
        if len(self.data):
 
            self.worker.keep_reading.set()
 
            return bytes(self.data.popleft())
 
        elif self.worker.EOF.is_set():
 
            raise StopIteration
 

	
 
    def throw(self, type, value=None, traceback=None):
 
        if not self.worker.EOF.is_set():
 
            raise type(value)
 

	
 
    def start(self):
 
        self.worker.start()
 

	
 
    def stop(self):
 
        self.worker.stop()
 

	
 
    def close(self):
 
        try:
 
            self.worker.stop()
 
            self.throw(GeneratorExit)
 
        except (GeneratorExit, StopIteration):
 
@@ -368,57 +368,57 @@ class SubprocessIOChunker(object):
 
        ): # and it failed
 
            bg_out.stop()
 
            out = b''.join(bg_out)
 
            bg_err.stop()
 
            err = b''.join(bg_err)
 
            if (err.strip() == b'fatal: The remote end hung up unexpectedly' and
 
                out.startswith(b'0034shallow ')
 
            ):
 
                # hack inspired by https://github.com/schacon/grack/pull/7
 
                bg_out = iter([out])
 
                _p = None
 
            elif err:
 
                raise EnvironmentError("Subprocess exited due to an error: %s" % err)
 
            else:
 
                raise EnvironmentError(
 
                    "Subprocess exited with non 0 ret code: %s" % returncode)
 
        self.process = _p
 
        self.output = bg_out
 
        self.error = bg_err
 
        self.inputstream = inputstream
 

	
 
    def __iter__(self):
 
        return self
 

	
 
    def next(self):
 
    def __next__(self):
 
        if self.process:
 
            returncode = self.process.poll()
 
            if (returncode is not None # process has terminated
 
                and returncode != 0
 
            ): # and it failed
 
                self.output.stop()
 
                self.error.stop()
 
                err = ''.join(self.error)
 
                raise EnvironmentError("Subprocess exited due to an error:\n" + err)
 
        return self.output.next()
 
        return next(self.output)
 

	
 
    def throw(self, type, value=None, traceback=None):
 
        if self.output.length or not self.output.done_reading:
 
            raise type(value)
 

	
 
    def close(self):
 
        try:
 
            self.process.terminate()
 
        except:
 
            pass
 
        try:
 
            self.output.close()
 
        except:
 
            pass
 
        try:
 
            self.error.close()
 
        except:
 
            pass
 
        try:
 
            os.close(self.inputstream)
 
        except:
 
            pass
kallithea/tests/other/test_vcs_operations.py
Show inline comments
 
@@ -157,49 +157,49 @@ class Command(object):
 
        p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd, env=testenv)
 
        stdout, stderr = p.communicate()
 
        if DEBUG:
 
            if stdout:
 
                print('stdout:', stdout)
 
            if stderr:
 
                print('stderr:', stderr)
 
        if not ignoreReturnCode:
 
            assert p.returncode == 0
 
        return stdout, stderr
 

	
 

	
 
def _get_tmp_dir(prefix='vcs_operations-', suffix=''):
 
    return tempfile.mkdtemp(dir=base.TESTS_TMP_PATH, prefix=prefix, suffix=suffix)
 

	
 

	
 
def _add_files(vcs, dest_dir, files_no=3):
 
    """
 
    Generate some files, add it to dest_dir repo and push back
 
    vcs is git or hg and defines what VCS we want to make those files for
 

	
 
    :param vcs:
 
    :param dest_dir:
 
    """
 
    added_file = '%ssetup.py' % _RandomNameSequence().next()
 
    added_file = '%ssetup.py' % next(_RandomNameSequence())
 
    open(os.path.join(dest_dir, added_file), 'a').close()
 
    Command(dest_dir).execute(vcs, 'add', added_file)
 

	
 
    email = 'me@example.com'
 
    if os.name == 'nt':
 
        author_str = 'User <%s>' % email
 
    else:
 
        author_str = 'User ǝɯɐᴎ <%s>' % email
 
    for i in xrange(files_no):
 
        cmd = """echo "added_line%s" >> %s""" % (i, added_file)
 
        Command(dest_dir).execute(cmd)
 
        if vcs == 'hg':
 
            cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % (
 
                i, author_str, added_file
 
            )
 
        elif vcs == 'git':
 
            cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % (
 
                i, author_str, added_file
 
            )
 
        # git commit needs EMAIL on some machines
 
        Command(dest_dir).execute(cmd, EMAIL=email)
 

	
 
def _add_files_and_push(webserver, vt, dest_dir, clone_url, ignoreReturnCode=False, files_no=3):
 
    _add_files(vt.repo_type, dest_dir, files_no=files_no)
 
@@ -242,51 +242,51 @@ def _check_proper_git_push(stdout, stder
 
    assert 'master -> master' in stderr
 

	
 

	
 
@pytest.mark.usefixtures("test_context_fixture")
 
class TestVCSOperations(base.TestController):
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        # DISABLE ANONYMOUS ACCESS
 
        set_anonymous_access(False)
 

	
 
    @pytest.fixture()
 
    def testhook_cleanup(self):
 
        yield
 
        # remove hook
 
        for hook in ['prechangegroup', 'pretxnchangegroup', 'preoutgoing', 'changegroup', 'outgoing', 'incoming']:
 
            entry = Ui.get_by_key('hooks', '%s.testhook' % hook)
 
            if entry:
 
                Session().delete(entry)
 
        Session().commit()
 

	
 
    @pytest.fixture(scope="module")
 
    def testfork(self):
 
        # create fork so the repo stays untouched
 
        git_fork_name = u'%s_fork%s' % (base.GIT_REPO, _RandomNameSequence().next())
 
        git_fork_name = u'%s_fork%s' % (base.GIT_REPO, next(_RandomNameSequence()))
 
        fixture.create_fork(base.GIT_REPO, git_fork_name)
 
        hg_fork_name = u'%s_fork%s' % (base.HG_REPO, _RandomNameSequence().next())
 
        hg_fork_name = u'%s_fork%s' % (base.HG_REPO, next(_RandomNameSequence()))
 
        fixture.create_fork(base.HG_REPO, hg_fork_name)
 
        return {'git': git_fork_name, 'hg': hg_fork_name}
 

	
 
    @parametrize_vcs_test
 
    def test_clone_repo_by_admin(self, webserver, vt):
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name)
 
        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
 

	
 
        if vt.repo_type == 'git':
 
            assert 'Cloning into' in stdout + stderr
 
            assert stderr == '' or stdout == ''
 
        elif vt.repo_type == 'hg':
 
            assert 'requesting all changes' in stdout
 
            assert 'adding changesets' in stdout
 
            assert 'adding manifests' in stdout
 
            assert 'adding file changes' in stdout
 
            assert stderr == ''
 

	
 
    @parametrize_vcs_test_http
 
    def test_clone_wrong_credentials(self, webserver, vt):
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name, password='bad!')
 
        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
 
        if vt.repo_type == 'git':
 
            assert 'fatal: Authentication failed' in stderr
 
@@ -298,49 +298,49 @@ class TestVCSOperations(base.TestControl
 
        stdout, stderr = Command(base.TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
 
        assert 'HTTP Error 404: Not Found' in stderr or "not a valid repository" in stdout and 'abort:' in stderr
 

	
 
    def test_clone_hg_repo_as_git(self, webserver):
 
        clone_url = GitHttpVcsTest.repo_url_param(webserver, base.HG_REPO)
 
        stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
 
        assert 'not found' in stderr
 

	
 
    @parametrize_vcs_test
 
    def test_clone_non_existing_path(self, webserver, vt):
 
        clone_url = vt.repo_url_param(webserver, 'trololo')
 
        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
 
        if vt.repo_type == 'git':
 
            assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr
 
        elif vt.repo_type == 'hg':
 
            assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout
 

	
 
    @parametrize_vcs_test
 
    def test_push_new_repo(self, webserver, vt):
 
        # Clear the log so we know what is added
 
        UserLog.query().delete()
 
        Session().commit()
 

	
 
        # Create an empty server repo using the API
 
        repo_name = u'new_%s_%s' % (vt.repo_type, _RandomNameSequence().next())
 
        repo_name = u'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence()))
 
        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
 
        params = {
 
            "id": 7,
 
            "api_key": usr.api_key,
 
            "method": 'create_repo',
 
            "args": dict(repo_name=repo_name,
 
                         owner=base.TEST_USER_ADMIN_LOGIN,
 
                         repo_type=vt.repo_type),
 
        }
 
        req = urllib2.Request(
 
            'http://%s:%s/_admin/api' % webserver.server_address,
 
            data=ascii_bytes(json.dumps(params)),
 
            headers={'content-type': 'application/json'})
 
        response = urllib2.urlopen(req)
 
        result = json.loads(response.read())
 
        # Expect something like:
 
        # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None}
 
        assert result[u'result'][u'success']
 

	
 
        # Create local clone of the empty server repo
 
        local_clone_dir = _get_tmp_dir()
 
        clone_url = vt.repo_url_param(webserver, repo_name)
 
        stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir)
 

	
kallithea/tests/scripts/manual_test_concurrency.py
Show inline comments
 
@@ -181,35 +181,35 @@ def test_clone_with_credentials(no_error
 
        if not no_errors:
 
            if backend == 'hg':
 
                assert """adding file changes""" in stdout, 'no messages about cloning'
 
                assert """abort""" not in stderr, 'got error from clone'
 
            elif backend == 'git':
 
                assert """Cloning into""" in stdout, 'no messages about cloning'
 

	
 

	
 
if __name__ == '__main__':
 
    try:
 
        create_test_user(force=False)
 
        import time
 

	
 
        try:
 
            METHOD = sys.argv[3]
 
        except IndexError:
 
            pass
 

	
 
        try:
 
            backend = sys.argv[4]
 
        except IndexError:
 
            backend = 'hg'
 

	
 
        if METHOD == 'pull':
 
            seq = tempfile._RandomNameSequence().next()
 
            seq = next(tempfile._RandomNameSequence())
 
            test_clone_with_credentials(repo=sys.argv[1], method='clone',
 
                                        backend=backend)
 
        s = time.time()
 
        for i in range(1, int(sys.argv[2]) + 1):
 
            print('take', i)
 
            test_clone_with_credentials(repo=sys.argv[1], method=METHOD,
 
                                        backend=backend)
 
        print('time taken %.3f' % (time.time() - s))
 
    except Exception as e:
 
        sys.exit('stop on %s' % e)
0 comments (0 inline, 0 general)