diff --git a/.flake8 b/.flake8 index 58538de2c..ced5ca7ac 100644 --- a/.flake8 +++ b/.flake8 @@ -5,12 +5,11 @@ # C: complexity # F401: module imported but unused # F403: import * -# F811: redefinition of unused `name` from line `N` # F841: local variable assigned but never used # E402: module level import not at top of file # I100: Import statements are in the wrong order # I101: Imported names are in the wrong order. Should be -ignore = E, C, W, F401, F403, F811, F841, E402, I100, I101, D400 +ignore = E, C, W, F403, F841, E402, I100, I101, D400 builtins = c, get_config exclude = .cache, diff --git a/docs/source/conf.py b/docs/source/conf.py index 55a023b80..40a830549 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -14,15 +14,10 @@ # # All configuration values have a default; values that are commented out # serve to show the default. -import os -import shlex -import sys - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) - # We load the ipython release info into a dict by explicit execution iprelease = {} exec( diff --git a/docs/source/examples/Using Dill.ipynb b/docs/source/examples/Using Dill.ipynb index 2ea66cc92..7466dd1c0 100644 --- a/docs/source/examples/Using Dill.ipynb +++ b/docs/source/examples/Using Dill.ipynb @@ -495,7 +495,7 @@ "import ipyparallel as ipp\n", "\n", "@interactive\n", - "class C(object):\n", + "class C:\n", " a = 5\n", "\n", "@ipp.interactive\n", diff --git a/docs/source/examples/customresults.py b/docs/source/examples/customresults.py index 22325106e..f24d85824 100644 --- a/docs/source/examples/customresults.py +++ b/docs/source/examples/customresults.py @@ -10,12 +10,11 @@ * MinRK """ import random -import time -import ipyparallel as parallel +import ipyparallel as ipp # create client & views -rc = parallel.Client() +rc = ipp.Client() dv = rc[:] v = rc.load_balanced_view() @@ -41,7 +40,7 @@ def sleep_here(count, t): while pending: try: rc.wait(pending, 1e-3) - except parallel.TimeoutError: + except TimeoutError: # ignore timeouterrors, since they only mean that at least one isn't done pass # finished is the set of msg_ids that are complete diff --git a/docs/source/examples/daVinci Word Count/pwordfreq.py b/docs/source/examples/daVinci Word Count/pwordfreq.py index dfa3da9dd..ef8ea551f 100644 --- a/docs/source/examples/daVinci Word Count/pwordfreq.py +++ b/docs/source/examples/daVinci Word Count/pwordfreq.py @@ -8,7 +8,6 @@ import io import os import time -import urllib from itertools import repeat import requests diff --git a/docs/source/examples/daVinci Word Count/wordfreq.py b/docs/source/examples/daVinci Word Count/wordfreq.py index f0dddb3ec..443c8107e 100644 --- a/docs/source/examples/daVinci Word Count/wordfreq.py +++ b/docs/source/examples/daVinci Word Count/wordfreq.py @@ -1,8 +1,4 @@ """Count the frequencies of words in a string""" -from __future__ import division -from __future__ import print_function - -import cmath as math def wordfreq(text, is_filename=False): diff --git a/docs/source/examples/dagdeps.py b/docs/source/examples/dagdeps.py index 97194b614..482fb1390 100644 --- a/docs/source/examples/dagdeps.py +++ b/docs/source/examples/dagdeps.py @@ -7,7 +7,6 @@ * MinRK """ from random import randint -from random import random import networkx as nx diff --git a/docs/source/examples/fetchparse.py b/docs/source/examples/fetchparse.py index b351b5e07..e895205e9 100644 --- a/docs/source/examples/fetchparse.py +++ b/docs/source/examples/fetchparse.py @@ -10,29 +10,18 @@ ipcluster start -n 4 """ -from __future__ import print_function - import sys import time -import bs4 # this isn't necessary, but it helps throw the dependency error earlier +import bs4 # noqa this isn't necessary, but it helps throw the dependency error earlier import ipyparallel as ipp -try: - raw_input -except NameError: - raw_input = input - def fetchAndParse(url, data=None): import requests - - try: - from urllib.parse import urljoin - except ImportError: - from urlparse import urljoin - import bs4 + from urllib.parse import urljoin + import bs4 # noqa links = [] r = requests.get(url, data=data) @@ -46,7 +35,7 @@ def fetchAndParse(url, data=None): return links -class DistributedSpider(object): +class DistributedSpider: # Time to wait between polling for task results. pollingDelay = 0.5 @@ -103,7 +92,7 @@ def main(): if len(sys.argv) > 1: site = sys.argv[1] else: - site = raw_input('Enter site to crawl: ') + site = input('Enter site to crawl: ') distributedSpider = DistributedSpider(site) distributedSpider.run() diff --git a/docs/source/examples/interengine/bintree.py b/docs/source/examples/interengine/bintree.py index 4c9811224..665bbeac7 100644 --- a/docs/source/examples/interengine/bintree.py +++ b/docs/source/examples/interengine/bintree.py @@ -114,7 +114,7 @@ def disambiguate_dns_url(url, location): return disambiguate_url(url, location) -class BinaryTreeCommunicator(object): +class BinaryTreeCommunicator: id = None pub = None diff --git a/docs/source/examples/interengine/communicator.py b/docs/source/examples/interengine/communicator.py index 008cee42a..d8ea2126e 100644 --- a/docs/source/examples/interengine/communicator.py +++ b/docs/source/examples/interengine/communicator.py @@ -6,7 +6,7 @@ from ipyparallel.util import disambiguate_url -class EngineCommunicator(object): +class EngineCommunicator: def __init__(self, interface='tcp://*', identity=None): self._ctx = zmq.Context() self.socket = self._ctx.socket(zmq.XREP) diff --git a/docs/source/examples/interengine/interengine.py b/docs/source/examples/interengine/interengine.py index fda58f864..82348822a 100644 --- a/docs/source/examples/interengine/interengine.py +++ b/docs/source/examples/interengine/interengine.py @@ -1,5 +1,3 @@ -import sys - import ipyparallel as ipp diff --git a/docs/source/examples/phistogram.py b/docs/source/examples/phistogram.py index ee108603f..d39e2cdba 100644 --- a/docs/source/examples/phistogram.py +++ b/docs/source/examples/phistogram.py @@ -1,6 +1,4 @@ """Parallel histogram function""" -import numpy - from ipyparallel import Reference diff --git a/docs/source/examples/pi/parallelpi.py b/docs/source/examples/pi/parallelpi.py index 87649c955..73155fdef 100644 --- a/docs/source/examples/pi/parallelpi.py +++ b/docs/source/examples/pi/parallelpi.py @@ -19,7 +19,6 @@ from timeit import default_timer as clock -import numpy as np from matplotlib import pyplot as plt from pidigits import compute_two_digit_freqs from pidigits import fetch_pi_file diff --git a/docs/source/examples/pi/pidigits.py b/docs/source/examples/pi/pidigits.py index 77140c464..dc802c6c4 100644 --- a/docs/source/examples/pi/pidigits.py +++ b/docs/source/examples/pi/pidigits.py @@ -14,18 +14,12 @@ If the digits of pi are truly random, these frequencies should be equal. """ -# Import statements -from __future__ import division -from __future__ import with_statement +import os +from urllib.request import urlretrieve import numpy as np from matplotlib import pyplot as plt -try: # python2 - from urllib import urlretrieve -except ImportError: # python3 - from urllib.request import urlretrieve - # Top-level functions @@ -33,7 +27,6 @@ def fetch_pi_file(filename): """This will download a segment of pi from super-computing.org if the file is not already present. """ - import os, urllib ftpdir = "ftp://pi.super-computing.org/.2/pi200m/" if os.path.exists(filename): diff --git a/docs/source/examples/task_profiler.py b/docs/source/examples/task_profiler.py index 8e749a4aa..26f326b5b 100644 --- a/docs/source/examples/task_profiler.py +++ b/docs/source/examples/task_profiler.py @@ -16,11 +16,9 @@ overhead of a single task is about 0.001-0.01 seconds. """ import random -import sys +import time from optparse import OptionParser -from IPython.utils.timing import time - import ipyparallel as ipp @@ -54,8 +52,6 @@ def main(): print(view) rc.block = True nengines = len(rc.ids) - with rc[:].sync_imports(): - from IPython.utils.timing import time # the jobs should take a random time within a range times = [ @@ -68,10 +64,10 @@ def main(): % (opts.n, stime, nengines) ) time.sleep(1) - start = time.time() + start = time.perf_counter() amr = view.map(time.sleep, times) amr.get() - stop = time.time() + stop = time.perf_counter() ptime = stop - start scale = stime / ptime diff --git a/docs/source/examples/wave2D/RectPartitioner.py b/docs/source/examples/wave2D/RectPartitioner.py index 9938703e2..abdb13928 100755 --- a/docs/source/examples/wave2D/RectPartitioner.py +++ b/docs/source/examples/wave2D/RectPartitioner.py @@ -17,8 +17,6 @@ """ from __future__ import print_function -import time - from numpy import ascontiguousarray from numpy import frombuffer from numpy import zeros diff --git a/docs/source/examples/wave2D/communicator.py b/docs/source/examples/wave2D/communicator.py index b86ef513a..49619e80f 100644 --- a/docs/source/examples/wave2D/communicator.py +++ b/docs/source/examples/wave2D/communicator.py @@ -7,7 +7,7 @@ from ipyparallel.util import disambiguate_url -class EngineCommunicator(object): +class EngineCommunicator: """An object that connects Engines to each other. north and east sockets listen, while south and west sockets connect. diff --git a/docs/source/examples/wave2D/parallelwave-mpi.py b/docs/source/examples/wave2D/parallelwave-mpi.py index 2fdcdb7bd..5d1b9fae1 100755 --- a/docs/source/examples/wave2D/parallelwave-mpi.py +++ b/docs/source/examples/wave2D/parallelwave-mpi.py @@ -21,14 +21,10 @@ * Min Ragan-Kelley """ -import sys +import argparse import time -from IPython.external import argparse -from numpy import exp -from numpy import newaxis from numpy import sqrt -from numpy import zeros import ipyparallel as ipp diff --git a/docs/source/examples/wave2D/parallelwave.py b/docs/source/examples/wave2D/parallelwave.py index 039d56a95..1835f8e20 100755 --- a/docs/source/examples/wave2D/parallelwave.py +++ b/docs/source/examples/wave2D/parallelwave.py @@ -21,15 +21,10 @@ * Min Ragan-Kelley """ -# -import sys +import argparse import time -from IPython.external import argparse -from numpy import exp -from numpy import newaxis from numpy import sqrt -from numpy import zeros import ipyparallel as ipp diff --git a/docs/source/examples/wave2D/wavesolver.py b/docs/source/examples/wave2D/wavesolver.py index 8811493f0..096e5100b 100755 --- a/docs/source/examples/wave2D/wavesolver.py +++ b/docs/source/examples/wave2D/wavesolver.py @@ -13,7 +13,6 @@ import time from numpy import arange -from numpy import exp from numpy import newaxis from numpy import sqrt from numpy import zeros @@ -32,7 +31,7 @@ def iseq(start=0, stop=None, inc=1): return arange(start, stop + inc, inc) -class WaveSolver(object): +class WaveSolver: """ Solve the 2D wave equation u_tt = u_xx + u_yy + f(x,y,t) with u = bc(x,y,t) on the boundary and initial condition du/dt = 0. diff --git a/ipyparallel/__init__.py b/ipyparallel/__init__.py index ac9b2bd20..a58ae8079 100644 --- a/ipyparallel/__init__.py +++ b/ipyparallel/__init__.py @@ -2,24 +2,20 @@ """The IPython ZMQ-based parallel computing interface.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. -import os -import warnings - -import zmq from traitlets.config.configurable import MultipleInstanceError -from ._version import __version__ -from ._version import version_info -from .client.asyncresult import * -from .client.client import Client -from .client.remotefunction import * -from .client.view import * -from .cluster import Cluster -from .cluster import ClusterManager -from .controller.dependency import * -from .error import * -from .serialize import * -from .util import interactive +from ._version import __version__ # noqa +from ._version import version_info # noqa +from .client.asyncresult import * # noqa +from .client.client import Client # noqa +from .client.remotefunction import * # noqa +from .client.view import * # noqa +from .cluster import Cluster # noqa +from .cluster import ClusterManager # noqa +from .controller.dependency import * # noqa +from .error import * # noqa +from .serialize import * # noqa +from .util import interactive # noqa # ----------------------------------------------------------------------------- # Functions diff --git a/ipyparallel/apps/baseapp.py b/ipyparallel/apps/baseapp.py index fd6b58dc2..11c907fa5 100644 --- a/ipyparallel/apps/baseapp.py +++ b/ipyparallel/apps/baseapp.py @@ -11,9 +11,6 @@ from IPython.core.application import base_flags as base_ip_flags from IPython.core.application import BaseIPythonApplication from IPython.utils.path import expand_path -from IPython.utils.process import check_pid -from ipython_genutils import py3compat -from ipython_genutils.py3compat import unicode_type from jupyter_client.session import Session from tornado.ioloop import IOLoop from traitlets import Bool @@ -87,12 +84,12 @@ def _log_format_default(self): return u"%(asctime)s.%(msecs).03d [%(name)s]%(highlevel)s %(message)s" work_dir = Unicode( - py3compat.getcwd(), config=True, help='Set the working dir for the process.' + os.getcwd(), config=True, help='Set the working dir for the process.' ) @observe('work_dir') def _work_dir_changed(self, change): - self.work_dir = unicode_type(expand_path(change['new'])) + self.work_dir = str(expand_path(change['new'])) log_to_file = Bool(config=True, help="whether to log to a file") @@ -170,7 +167,7 @@ def init_deprecated_config(self): def to_work_dir(self): wd = self.work_dir - if unicode_type(wd) != py3compat.getcwd(): + if wd != os.getcwd(): os.chdir(wd) self.log.info("Changing to working dir: %s" % wd) # This is the working dir by now. diff --git a/ipyparallel/apps/ipclusterapp.py b/ipyparallel/apps/ipclusterapp.py index c9d29578c..03f1e4e04 100644 --- a/ipyparallel/apps/ipclusterapp.py +++ b/ipyparallel/apps/ipclusterapp.py @@ -2,7 +2,7 @@ warnings.warn(f"{__name__} is deprecated in ipyparallel 7. Use ipyparallel.cluster") -from ipyparallel.cluster.app import IPCluster, IPClusterStart, main +from ipyparallel.cluster.app import IPCluster, IPClusterStart, main # noqa IPClusterApp = IPCluster launch_new_instance = main diff --git a/ipyparallel/apps/iploggerapp.py b/ipyparallel/apps/iploggerapp.py index 91ce759e3..7ac997d8d 100755 --- a/ipyparallel/apps/iploggerapp.py +++ b/ipyparallel/apps/iploggerapp.py @@ -2,29 +2,9 @@ # encoding: utf-8 """ A simple IPython logger application - -Authors: - -* MinRK - """ -# ----------------------------------------------------------------------------- -# Copyright (C) 2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -# ----------------------------------------------------------------------------- -# ----------------------------------------------------------------------------- -# Imports -# ----------------------------------------------------------------------------- -import os -import sys - -import zmq from IPython.core.profiledir import ProfileDir -from traitlets import Bool from traitlets import Dict -from traitlets import Unicode from ipyparallel.apps.baseapp import base_aliases from ipyparallel.apps.baseapp import BaseParallelApplication diff --git a/ipyparallel/apps/logwatcher.py b/ipyparallel/apps/logwatcher.py index 74575e2d1..30bc24251 100644 --- a/ipyparallel/apps/logwatcher.py +++ b/ipyparallel/apps/logwatcher.py @@ -1,37 +1,17 @@ """ -A simple logger object that consolidates messages incoming from ipcluster processes. - -Authors: - -* MinRK - +A logger object that consolidates messages incoming from ipcluster processes. """ -# ----------------------------------------------------------------------------- -# Copyright (C) 2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -# ----------------------------------------------------------------------------- -# ----------------------------------------------------------------------------- -# Imports -# ----------------------------------------------------------------------------- import logging -import sys import zmq from jupyter_client.localinterfaces import localhost from traitlets import Instance -from traitlets import Int from traitlets import List from traitlets import Unicode from traitlets.config.configurable import LoggingConfigurable from zmq.eventloop import ioloop from zmq.eventloop import zmqstream -# ----------------------------------------------------------------------------- -# Classes -# ----------------------------------------------------------------------------- - class LogWatcher(LoggingConfigurable): """A simple class that receives messages on a SUB socket, as published diff --git a/ipyparallel/client/_joblib.py b/ipyparallel/client/_joblib.py index ae59f69cc..b1c24619c 100644 --- a/ipyparallel/client/_joblib.py +++ b/ipyparallel/client/_joblib.py @@ -19,10 +19,10 @@ def __init__(self, view=None, **kwargs): # use cloudpickle or dill for closures, if available. # joblib tends to create closures default pickle can't handle. try: - import cloudpickle + import cloudpickle # noqa except ImportError: try: - import dill + import dill # noqa except ImportError: pass else: diff --git a/ipyparallel/client/asyncresult.py b/ipyparallel/client/asyncresult.py index 79b71e016..a61cfe51f 100644 --- a/ipyparallel/client/asyncresult.py +++ b/ipyparallel/client/asyncresult.py @@ -18,7 +18,6 @@ from IPython import get_ipython from IPython.display import display from IPython.display import display_pretty -from ipython_genutils.py3compat import string_types from .futures import MessageFuture from .futures import multi_future @@ -84,7 +83,7 @@ def __init__( self._return_exceptions = return_exceptions - if isinstance(children[0], string_types): + if isinstance(children[0], str): self.msg_ids = children self._children = [] else: @@ -459,7 +458,7 @@ def __getitem__(self, key): elif isinstance(key, slice): self._check_ready() return self._collect_exceptions(self.result()[key]) - elif isinstance(key, string_types): + elif isinstance(key, str): # metadata proxy *does not* require that results are done self.wait(0) self.wait_for_output(0) diff --git a/ipyparallel/client/client.py b/ipyparallel/client/client.py index adc8a2b4d..2553bfc08 100644 --- a/ipyparallel/client/client.py +++ b/ipyparallel/client/client.py @@ -29,10 +29,6 @@ from IPython.utils.capture import RichOutput from IPython.utils.coloransi import TermColors from IPython.utils.path import compress_user -from ipython_genutils.py3compat import cast_bytes -from ipython_genutils.py3compat import iteritems -from ipython_genutils.py3compat import string_types -from ipython_genutils.py3compat import xrange from jupyter_client.localinterfaces import is_local_ip from jupyter_client.localinterfaces import localhost from jupyter_client.session import Session @@ -544,7 +540,7 @@ def __init__( try: extra_args['packer'] = cfg['pack'] extra_args['unpacker'] = cfg['unpack'] - extra_args['key'] = cast_bytes(cfg['key']) + extra_args['key'] = cfg['key'].encode("utf8") extra_args['signature_scheme'] = cfg['signature_scheme'] except KeyError as exc: msg = '\n'.join( @@ -639,7 +635,7 @@ def __exit__(self, exc_type, exc_value, traceback): def _update_engines(self, engines): """Update our engines dict and _ids from a dict of the form: {id:uuid}.""" - for k, v in iteritems(engines): + for k, v in engines.items(): eid = int(k) if eid not in self._engines: self._ids.append(eid) @@ -682,7 +678,7 @@ def _build_targets(self, targets): if targets is None: targets = self._ids - elif isinstance(targets, string_types): + elif isinstance(targets, str): if targets.lower() == 'all': targets = self._ids else: @@ -699,12 +695,12 @@ def _build_targets(self, targets): ids = self.ids targets = [ids[i] for i in indices] - if not isinstance(targets, (tuple, list, xrange)): + if not isinstance(targets, (tuple, list, range)): raise TypeError( "targets by int/slice/collection of ints only, not %s" % (type(targets)) ) - return [cast_bytes(self._engines[t]) for t in targets], list(targets) + return [self._engines[t].encode("utf8") for t in targets], list(targets) def _connect(self, sshserver, ssh_kwargs, timeout): """setup all our socket connections to the cluster. This is called from @@ -1213,8 +1209,8 @@ def __len__(self): def __getitem__(self, key): """index access returns DirectView multiplexer objects - Must be int, slice, or list/tuple/xrange of ints""" - if not isinstance(key, (int, slice, tuple, list, xrange)): + Must be int, slice, or list/tuple/range of ints""" + if not isinstance(key, (int, slice, tuple, list, range)): raise TypeError( "key by int/slice/iterable of ints only, not %s" % (type(key)) ) @@ -1458,7 +1454,7 @@ def wait(self, jobs=None, timeout=-1): # make a copy, so that we aren't passing a mutable collection to _futures_for_msgs theids = set(self.outstanding) else: - if isinstance(jobs, string_types + (int, AsyncResult)) or not isinstance( + if isinstance(jobs, (str, int, AsyncResult)) or not isinstance( jobs, Iterable ): jobs = [jobs] @@ -1565,11 +1561,9 @@ def abort(self, jobs=None, targets=None, block=None): jobs = jobs if jobs is not None else list(self.outstanding) msg_ids = [] - if isinstance(jobs, string_types + (AsyncResult,)): + if isinstance(jobs, (str, AsyncResult)): jobs = [jobs] - bad_ids = [ - obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,)) - ] + bad_ids = [obj for obj in jobs if not isinstance(obj, (str, AsyncResult))] if bad_ids: raise TypeError( "Invalid msg_id type %r, expected str or AsyncResult" % bad_ids[0] @@ -1818,7 +1812,7 @@ def send_execute_request( metadata = metadata if metadata is not None else {} # validate arguments - if not isinstance(code, string_types): + if not isinstance(code, str): raise TypeError("code must be text, not %s" % type(code)) if not isinstance(metadata, dict): raise TypeError("metadata must be dict, not %s" % type(metadata)) @@ -2171,7 +2165,7 @@ def _msg_ids_from_jobs(self, jobs=None): for job in jobs: if isinstance(job, int): msg_ids.append(self.history[job]) - elif isinstance(job, string_types): + elif isinstance(job, str): msg_ids.append(job) elif isinstance(job, AsyncResult): msg_ids.extend(job.msg_ids) @@ -2191,7 +2185,7 @@ def _asyncresult_from_jobs(self, jobs=None, owner=False): for job in jobs: if isinstance(job, int): job = self.history[job] - if isinstance(job, string_types): + if isinstance(job, str): if job in self._futures: futures.append(job) elif job in self.results: @@ -2377,7 +2371,7 @@ def db_query(self, query, keys=None): The subset of keys to be returned. The default is to fetch everything but buffers. 'msg_id' will *always* be included. """ - if isinstance(keys, string_types): + if isinstance(keys, str): keys = [keys] content = dict(query=query, keys=keys) reply = self._send_recv(self._query_stream, "db_request", content=content) diff --git a/ipyparallel/client/magics.py b/ipyparallel/client/magics.py index 82a6976a8..244ce93eb 100644 --- a/ipyparallel/client/magics.py +++ b/ipyparallel/client/magics.py @@ -26,9 +26,6 @@ {CONFIG_DOC} """ -from __future__ import print_function - -import ast from contextlib import contextmanager # Python 3.6 doesn't have nullcontext, so we define our own @@ -46,11 +43,11 @@ def nullcontext(): import inspect import re +from textwrap import dedent from IPython.core.error import UsageError from IPython.core.magic import Magics from IPython.core import magic_arguments -from ipython_genutils.text import dedent # ----------------------------------------------------------------------------- # Definitions of magic functions for use with IPython diff --git a/ipyparallel/client/map.py b/ipyparallel/client/map.py index 6d26f3390..60d9d7e5a 100644 --- a/ipyparallel/client/map.py +++ b/ipyparallel/client/map.py @@ -29,7 +29,7 @@ def is_array(obj): return isinstance(obj, numpy.ndarray) -class Map(object): +class Map: """A class for partitioning a sequence using a map.""" def getPartition(self, seq, p, q, n=None): diff --git a/ipyparallel/client/remotefunction.py b/ipyparallel/client/remotefunction.py index e95e15d62..26197291a 100644 --- a/ipyparallel/client/remotefunction.py +++ b/ipyparallel/client/remotefunction.py @@ -5,6 +5,7 @@ import sys import warnings +from inspect import signature from decorator import decorator @@ -12,11 +13,6 @@ from ..serialize import PrePickled from .asyncresult import AsyncMapResult -try: - from inspect import signature -except ImportError: # py2 - from IPython.utils.signatures import signature - # ----------------------------------------------------------------------------- # Functions and Decorators # ----------------------------------------------------------------------------- @@ -99,7 +95,7 @@ def sync_view_results(f, self, *args, **kwargs): # -------------------------------------------------------------------------- -class RemoteFunction(object): +class RemoteFunction: """Turn an existing function into a remote function. Parameters diff --git a/ipyparallel/client/view.py b/ipyparallel/client/view.py index 5fac07f4c..3d7f374bf 100644 --- a/ipyparallel/client/view.py +++ b/ipyparallel/client/view.py @@ -1,6 +1,7 @@ """Views of remote engines.""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. +import builtins import concurrent.futures import inspect import warnings @@ -9,8 +10,6 @@ from decorator import decorator from IPython import get_ipython -from ipython_genutils.py3compat import iteritems -from ipython_genutils.py3compat import string_types from traitlets import Any from traitlets import Bool from traitlets import CFloat @@ -157,7 +156,7 @@ def set_flags(self, **kwargs): safely edit after arrays and buffers during non-copying sends. """ - for name, value in iteritems(kwargs): + for name, value in kwargs.items(): if name not in self._flag_names: raise KeyError("Invalid name: %r" % name) else: @@ -428,9 +427,8 @@ def sync_imports(self, local=True, quiet=False): importing recarray from numpy on engine(s) """ - from ipython_genutils.py3compat import builtin_mod - local_import = builtin_mod.__import__ + local_import = builtins.__import__ modules = set() results = [] @@ -458,8 +456,8 @@ def view_import(name, globals={}, locals={}, fromlist=[], level=0): locally as well. """ # don't override nested imports - save_import = builtin_mod.__import__ - builtin_mod.__import__ = local_import + save_import = builtins.__import__ + builtins.__import__ = local_import import_frame = inspect.getouterframes(inspect.currentframe())[1].frame if import_frame is not context_frame: @@ -486,12 +484,12 @@ def view_import(name, globals={}, locals={}, fromlist=[], level=0): print("importing %s on engine(s)" % name) results.append(self.apply_async(remote_import, name, fromlist, level)) # restore override - builtin_mod.__import__ = save_import + builtins.__import__ = save_import return mod # override __import__ - builtin_mod.__import__ = view_import + builtins.__import__ = view_import try: # enter the block yield @@ -503,7 +501,7 @@ def view_import(name, globals={}, locals={}, fromlist=[], level=0): pass finally: # always restore __import__ - builtin_mod.__import__ = local_import + builtins.__import__ = local_import for r in results: # raise possible remote ImportErrors here @@ -751,11 +749,11 @@ def pull(self, names, targets=None, block=None): """ block = block if block is not None else self.block targets = targets if targets is not None else self.targets - if isinstance(names, string_types): + if isinstance(names, str): pass elif isinstance(names, (list, tuple, set)): for key in names: - if not isinstance(key, string_types): + if not isinstance(key, str): raise TypeError("keys must be str, not type %r" % type(key)) else: raise TypeError("names must be strs, not %r" % names) @@ -1045,11 +1043,11 @@ def _validate_dependency(self, dep): For use in `set_flags`. """ - if dep is None or isinstance(dep, string_types + (AsyncResult, Dependency)): + if dep is None or isinstance(dep, (str, AsyncResult, Dependency)): return True elif isinstance(dep, (list, set, tuple)): for d in dep: - if not isinstance(d, string_types + (AsyncResult,)): + if not isinstance(d, (str, AsyncResult)): return False elif isinstance(dep, dict): if set(dep.keys()) != set(Dependency().as_dict().keys()): @@ -1057,7 +1055,7 @@ def _validate_dependency(self, dep): if not isinstance(dep['msg_ids'], list): return False for d in dep['msg_ids']: - if not isinstance(d, string_types): + if not isinstance(d, str): return False else: return False diff --git a/ipyparallel/cluster/__init__.py b/ipyparallel/cluster/__init__.py index ee6b7885f..83a1ba970 100644 --- a/ipyparallel/cluster/__init__.py +++ b/ipyparallel/cluster/__init__.py @@ -1 +1 @@ -from .cluster import * +from .cluster import * # noqa diff --git a/ipyparallel/cluster/_winhpcjob.py b/ipyparallel/cluster/_winhpcjob.py index 6d0ca9d7a..ffcf9c7d8 100644 --- a/ipyparallel/cluster/_winhpcjob.py +++ b/ipyparallel/cluster/_winhpcjob.py @@ -2,28 +2,12 @@ """ Job and task components for writing .xml files that the Windows HPC Server 2008 can use to start jobs. - -Authors: - -* Brian Granger -* MinRK - """ -# ----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -# ----------------------------------------------------------------------------- -# ----------------------------------------------------------------------------- -# Imports -# ----------------------------------------------------------------------------- import os import re import uuid from xml.etree import ElementTree as ET -from ipython_genutils.py3compat import iteritems from traitlets import Bool from traitlets import Enum from traitlets import Instance @@ -217,7 +201,7 @@ def as_element(self): def get_env_vars(self): env_vars = ET.Element('EnvironmentVariables') - for k, v in iteritems(self.environment_variables): + for k, v in self.environment_variables.items(): variable = ET.SubElement(env_vars, "Variable") name = ET.SubElement(variable, "Name") name.text = k diff --git a/ipyparallel/cluster/cluster.py b/ipyparallel/cluster/cluster.py index 4bc685729..6da6e4feb 100644 --- a/ipyparallel/cluster/cluster.py +++ b/ipyparallel/cluster/cluster.py @@ -12,7 +12,6 @@ import logging import os import random -import socket import string import sys import time diff --git a/ipyparallel/cluster/launcher.py b/ipyparallel/cluster/launcher.py index ef37ba9eb..0959b6aed 100644 --- a/ipyparallel/cluster/launcher.py +++ b/ipyparallel/cluster/launcher.py @@ -31,16 +31,12 @@ from IPython.utils.path import ensure_dir_exists from IPython.utils.path import get_home_dir from IPython.utils.text import EvalFormatter -from ipython_genutils.encoding import DEFAULT_ENCODING -from ipython_genutils.py3compat import iteritems -from ipython_genutils.py3compat import itervalues from tornado import ioloop from traitlets import Any from traitlets import CRegExp from traitlets import default from traitlets import Dict from traitlets import Float -from traitlets import import_item from traitlets import Instance from traitlets import Integer from traitlets import List @@ -867,7 +863,7 @@ def start(self, n): # deprecated MPIExec names -class DeprecatedMPILauncher(object): +class DeprecatedMPILauncher: def warn(self): oldname = self.__class__.__name__ newname = oldname.replace('MPIExec', 'MPI') @@ -1139,7 +1135,7 @@ def _fetch_file(self, remote, local, wait=True): + [self.location, 'test -e', remote, "&& echo 'yes' || echo 'no'"], input=None, ) - check = check.decode(DEFAULT_ENCODING, 'replace').strip() + check = check.decode("utf8", 'replace').strip() if check == u'no': time.sleep(1) elif check == u'yes': @@ -1378,7 +1374,7 @@ def start(self, n): requested_n = n started_n = 0 - for host, n_or_config in iteritems(self.engines): + for host, n_or_config in self.engines.items(): if isinstance(n_or_config, dict): overrides = n_or_config n = overrides.pop("n", 1) @@ -1536,7 +1532,7 @@ def start(self, n): output = check_output( [self.job_cmd] + args, env=os.environ, cwd=self.work_dir, stderr=STDOUT ) - output = output.decode(DEFAULT_ENCODING, 'replace') + output = output.decode("utf8", 'replace') job_id = self.parse_job_id(output) self.notify_start(job_id) return job_id @@ -1550,7 +1546,7 @@ def stop(self): output = check_output( [self.job_cmd] + args, env=os.environ, cwd=self.work_dir, stderr=STDOUT ) - output = output.decode(DEFAULT_ENCODING, 'replace') + output = output.decode("utf8", 'replace') except: output = u'The job already appears to be stopped: %r' % self.job_id self.notify_stop( @@ -1822,7 +1818,7 @@ def start(self, n=1): self.write_batch_script(n) output = check_output(self.args, env=os.environ) - output = output.decode(DEFAULT_ENCODING, 'replace') + output = output.decode("utf8", 'replace') self.log.debug(f"Submitted {shlex_join(self.args)}. Output: {output}") job_id = self.parse_job_id(output) @@ -1834,7 +1830,7 @@ def stop(self): output = check_output( self.delete_command + [self.job_id], stdin=None, - ).decode(DEFAULT_ENCODING, 'replace') + ).decode("utf8", 'replace') except Exception: self.log.exception( "Problem stopping cluster with command: %s" @@ -1853,7 +1849,7 @@ def signal(self, sig): output = check_output( cmd, stdin=None, - ).decode(DEFAULT_ENCODING, 'replace') + ).decode("utf8", 'replace') except Exception: self.log.exception("Problem sending signal with: {shlex_join(cmd)}") output = "" @@ -2153,7 +2149,7 @@ def start(self, n=1): self.log.debug("Starting %s: %s", self.__class__.__name__, piped_cmd) p = Popen(piped_cmd, shell=True, env=os.environ, stdout=PIPE) output, err = p.communicate() - output = output.decode(DEFAULT_ENCODING, 'replace') + output = output.decode("utf8", 'replace') job_id = self.parse_job_id(output) self.notify_start(job_id) return job_id diff --git a/ipyparallel/controller/broadcast_scheduler.py b/ipyparallel/controller/broadcast_scheduler.py index 981532f82..084fb7ba5 100644 --- a/ipyparallel/controller/broadcast_scheduler.py +++ b/ipyparallel/controller/broadcast_scheduler.py @@ -1,10 +1,8 @@ import logging -import os import zmq from traitlets import Bool from traitlets import Bytes -from traitlets import Integer from traitlets import List from ipyparallel import util diff --git a/ipyparallel/controller/dependency.py b/ipyparallel/controller/dependency.py index 19409dd0b..b665b372d 100644 --- a/ipyparallel/controller/dependency.py +++ b/ipyparallel/controller/dependency.py @@ -1,28 +1,14 @@ """Dependency utilities - -Authors: - -* Min RK """ -# ----------------------------------------------------------------------------- -# Copyright (C) 2013 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -# ----------------------------------------------------------------------------- from types import ModuleType -from ipython_genutils import py3compat -from ipython_genutils.py3compat import string_types - from ipyparallel.client.asyncresult import AsyncResult from ipyparallel.error import UnmetDependency from ipyparallel.serialize import can -from ipyparallel.serialize import uncan from ipyparallel.util import interactive -class depend(object): +class depend: """Dependency decorator, for use with tasks. `@depend` lets you define a function for engine dependencies @@ -52,7 +38,7 @@ def __call__(self, f): return dependent(f, self.f, *self.args, **self.kwargs) -class dependent(object): +class dependent: """A function that depends on another function. This is an object to prevent the closure used in traditional decorators, which are not picklable. @@ -61,10 +47,7 @@ class dependent(object): def __init__(self, _wrapped_f, _wrapped_df, *dargs, **dkwargs): self.f = _wrapped_f name = getattr(_wrapped_f, '__name__', 'f') - if py3compat.PY3: - self.__name__ = name - else: - self.func_name = name + self.__name__ = name self.df = _wrapped_df self.dargs = dargs self.dkwargs = dkwargs @@ -76,12 +59,6 @@ def check_dependency(self): def __call__(self, *args, **kwargs): return self.f(*args, **kwargs) - if not py3compat.PY3: - - @property - def __name__(self): - return self.func_name - @interactive def _require(*modules, **mapping): @@ -133,7 +110,7 @@ def require(*objects, **mapping): if isinstance(obj, ModuleType): obj = obj.__name__ - if isinstance(obj, string_types): + if isinstance(obj, str): names.append(obj) elif hasattr(obj, '__name__'): mapping[obj.__name__] = obj @@ -183,10 +160,10 @@ def __init__(self, dependencies=[], all=True, success=True, failure=False): ids = [] # extract ids from various sources: - if isinstance(dependencies, string_types + (AsyncResult,)): + if isinstance(dependencies, (str, AsyncResult)): dependencies = [dependencies] for d in dependencies: - if isinstance(d, string_types): + if isinstance(d, str): ids.append(d) elif isinstance(d, AsyncResult): ids.extend(d.msg_ids) diff --git a/ipyparallel/controller/dictdb.py b/ipyparallel/controller/dictdb.py index a0796f430..f2ec8ca66 100644 --- a/ipyparallel/controller/dictdb.py +++ b/ipyparallel/controller/dictdb.py @@ -36,20 +36,19 @@ # Distributed under the terms of the Modified BSD License. import copy from copy import deepcopy - -# Python can't copy memoryviews, but creating another memoryview works for us -copy._deepcopy_dispatch[memoryview] = lambda x, memo: memoryview(x) - - from datetime import datetime +from traitlets import Dict +from traitlets import Float +from traitlets import Integer +from traitlets import Unicode from traitlets.config.configurable import LoggingConfigurable -from ipython_genutils.py3compat import iteritems, itervalues -from traitlets import Dict, Unicode, Integer, Float - from ..util import ensure_timezone +# Python can't copy memoryviews, but creating another memoryview works for us +copy._deepcopy_dispatch[memoryview] = lambda x, memo: memoryview(x) + filters = { '$lt': lambda a, b: a < b, '$gt': lambda a, b: b > a, @@ -71,13 +70,13 @@ def _add_tz(obj): return obj -class CompositeFilter(object): +class CompositeFilter: """Composite filter for matching multiple properties.""" def __init__(self, dikt): self.tests = [] self.values = [] - for key, value in iteritems(dikt): + for key, value in dikt.items(): self.tests.append(_add_tz(filters[key])) self.values.append(_add_tz(value)) @@ -116,7 +115,7 @@ class DictDB(BaseDB): 1024 ** 3, config=True, help="""The maximum total size (in bytes) of the buffers stored in the db - + When the db exceeds this size, the oldest records will be culled until the total size is under size_limit * (1-cull_fraction). default: 1 GB @@ -126,7 +125,7 @@ class DictDB(BaseDB): 1024, config=True, help="""The maximum number of records in the db - + When the history exceeds this size, the first record_limit * cull_fraction records will be culled. """, @@ -135,18 +134,18 @@ class DictDB(BaseDB): 0.1, config=True, help="""The fraction by which the db should culled when one of the limits is exceeded - + In general, the db size will spend most of its time with a size in the range: - + [limit * (1-cull_fraction), limit] - + for each of size_limit and record_limit. """, ) def _match_one(self, rec, tests): """Check if a specific record matches tests.""" - for key, test in iteritems(tests): + for key, test in tests.items(): if not test(rec.get(key, None)): return False return True @@ -155,13 +154,13 @@ def _match(self, check): """Find all the matches for a check dict.""" matches = [] tests = {} - for k, v in iteritems(check): + for k, v in check.items(): if isinstance(v, dict): tests[k] = CompositeFilter(v) else: tests[k] = lambda o: _add_tz(o) == _add_tz(v) - for rec in itervalues(self._records): + for rec in self._records.values(): if self._match_one(rec, tests): matches.append(deepcopy(rec)) return matches diff --git a/ipyparallel/controller/heartmonitor.py b/ipyparallel/controller/heartmonitor.py index 2c705f73b..560109995 100755 --- a/ipyparallel/controller/heartmonitor.py +++ b/ipyparallel/controller/heartmonitor.py @@ -11,7 +11,6 @@ import uuid import zmq -from ipython_genutils.py3compat import str_to_bytes from tornado import ioloop from traitlets import Bool from traitlets import Dict @@ -26,7 +25,7 @@ from ipyparallel.util import log_errors -class Heart(object): +class Heart: """A basic heart object for responding to a HeartMonitor. This is a simple wrapper with defaults for the most common Device model for responding to heartbeats. @@ -161,7 +160,7 @@ def beat(self): self.responses = set() # print self.on_probation, self.hearts # self.log.debug("heartbeat::beat %.3f, %i beating hearts", self.lifetime, len(self.hearts)) - self.pingstream.send(str_to_bytes(str(self.lifetime))) + self.pingstream.send(str(self.lifetime).encode('ascii')) # flush stream to force immediate socket send self.pingstream.flush() @@ -204,8 +203,8 @@ def handle_heart_failure(self, heart): @log_errors def handle_pong(self, msg): "a heart just beat" - current = str_to_bytes(str(self.lifetime)) - last = str_to_bytes(str(self.last_ping)) + current = str(self.lifetime).encode('ascii') + last = str(self.last_ping).encode('ascii') if msg[1] == current: delta = time.time() - self.tic if self.debug: diff --git a/ipyparallel/controller/hub.py b/ipyparallel/controller/hub.py index 5416f658a..c6fdc3fed 100644 --- a/ipyparallel/controller/hub.py +++ b/ipyparallel/controller/hub.py @@ -14,10 +14,6 @@ import time from datetime import datetime -from ipython_genutils.py3compat import buffer_to_bytes_py2 -from ipython_genutils.py3compat import cast_bytes -from ipython_genutils.py3compat import iteritems -from ipython_genutils.py3compat import unicode_type from jupyter_client.jsonutil import parse_date from jupyter_client.session import Session from tornado import ioloop @@ -277,14 +273,14 @@ def _validate_targets(self, targets): # default to all return self.ids - if isinstance(targets, (int, str, unicode_type)): + if isinstance(targets, (int, str)): # only one target specified targets = [targets] _targets = [] for t in targets: # map raw identities to ids - if isinstance(t, (str, unicode_type)): - t = self.by_ident.get(cast_bytes(t), t) + if isinstance(t, str): + t = self.by_ident.get(t.encode("utf8", "replace"), t) _targets.append(t) targets = _targets bad_targets = [t for t in targets if t not in self.ids] @@ -432,7 +428,7 @@ def save_queue_request(self, idents, msg): try: # it's posible iopub arrived first: existing = self.db.get_record(msg_id) - for key, evalue in iteritems(existing): + for key, evalue in existing.items(): rvalue = record.get(key, None) if evalue and rvalue and evalue != rvalue: self.log.warn( @@ -559,7 +555,7 @@ def save_broadcast_result(self, idents, msg): header = msg['header'] md = msg['metadata'] engine_uuid = md.get('engine', u'') - eid = self.by_ident.get(cast_bytes(engine_uuid), None) + eid = self.by_ident.get(engine_uuid.encode("utf8"), None) status = md.get('status', None) if msg_id in self.pending: @@ -627,7 +623,7 @@ def save_task_request(self, idents, msg): # still check content,header which should not change # but are not expensive to compare as buffers - for key, evalue in iteritems(existing): + for key, evalue in existing.items(): if key.endswith('buffers'): # don't compare buffers continue @@ -680,7 +676,7 @@ def save_task_result(self, idents, msg): header = msg['header'] md = msg['metadata'] engine_uuid = md.get('engine', u'') - eid = self.by_ident.get(cast_bytes(engine_uuid), None) + eid = self.by_ident.get(engine_uuid.encode("utf8"), None) status = md.get('status', None) @@ -725,7 +721,7 @@ def save_task_destination(self, idents, msg): # print (content) msg_id = content['msg_id'] engine_uuid = content['engine_id'] - eid = self.by_ident[cast_bytes(engine_uuid)] + eid = self.by_ident[engine_uuid.encode("utf8")] self.log.info("task::task %r arrived on %r", msg_id, eid) if msg_id in self.unassigned: @@ -855,14 +851,14 @@ def register_engine(self, reg, msg): connection_info=self.engine_info, ) # check if requesting available IDs: - if cast_bytes(uuid) in self.by_ident: + if uuid.encode("utf8") in self.by_ident: try: raise KeyError("uuid %r in use" % uuid) except: content = error.wrap_exception() self.log.error("uuid %r in use", uuid, exc_info=True) else: - for h, ec in iteritems(self.incoming_registrations): + for h, ec in self.incoming_registrations.items(): if uuid == h: try: raise KeyError("heart_id %r in use" % uuid) @@ -882,7 +878,7 @@ def register_engine(self, reg, msg): self.query, "registration_reply", content=content, ident=reg ) - heart = cast_bytes(uuid) + heart = uuid.encode("utf8") if content['status'] == 'ok': if heart in self.heartmonitor.hearts: @@ -1062,7 +1058,7 @@ def _load_engine_state(self): save_notifier = self.notifier self.notifier = None - for eid, uuid in iteritems(state['engines']): + for eid, uuid in state['engines'].items(): heart = uuid.encode('ascii') # start with this heart as current and beating: self.heartmonitor.responses.add(heart) @@ -1298,7 +1294,7 @@ def finish(reply): finish(dict(status='ok', resubmitted=resubmitted)) # store the new IDs in the Task DB - for msg_id, resubmit_id in iteritems(resubmitted): + for msg_id, resubmit_id in resubmitted.items(): try: self.db.update_record(msg_id, {'resubmitted': resubmit_id}) except Exception: @@ -1321,7 +1317,7 @@ def _extract_record(self, rec): 'io': io_dict, } if rec['result_buffers']: - buffers = list(map(buffer_to_bytes_py2, rec['result_buffers'])) + buffers = list(rec['result_buffers']) else: buffers = [] diff --git a/ipyparallel/controller/mongodb.py b/ipyparallel/controller/mongodb.py index 4af7ee6d0..9c55ea2b1 100644 --- a/ipyparallel/controller/mongodb.py +++ b/ipyparallel/controller/mongodb.py @@ -1,15 +1,5 @@ """A TaskRecord backend using mongodb - -Authors: - -* Min RK """ -# ----------------------------------------------------------------------------- -# Copyright (C) 2010-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -# ----------------------------------------------------------------------------- try: from pymongo import MongoClient diff --git a/ipyparallel/controller/scheduler.py b/ipyparallel/controller/scheduler.py index d57da1ad3..36bf28bc3 100644 --- a/ipyparallel/controller/scheduler.py +++ b/ipyparallel/controller/scheduler.py @@ -12,7 +12,6 @@ import traitlets.log import zmq from decorator import decorator -from ipython_genutils.py3compat import cast_bytes from tornado import ioloop from traitlets import Bytes from traitlets import default @@ -100,7 +99,9 @@ def dispatch_submission(self, raw_msg): raise NotImplementedError("Implement in subclasses") def append_new_msg_id_to_msg(self, new_id, target_id, idents, msg): - new_idents = [cast_bytes(target_id)] + idents + if isinstance(target_id, str): + target_id = target_id.encode("utf8") + new_idents = [target_id] + idents msg['header']['msg_id'] = new_id new_msg_list = self.session.serialize(msg, ident=new_idents) new_msg_list.extend(msg['buffers']) diff --git a/ipyparallel/controller/sqlitedb.py b/ipyparallel/controller/sqlitedb.py index dd4d5eb8d..d2298641a 100644 --- a/ipyparallel/controller/sqlitedb.py +++ b/ipyparallel/controller/sqlitedb.py @@ -19,8 +19,7 @@ from tornado import ioloop from traitlets import Unicode, Instance, List, Dict -from jupyter_client.jsonutil import date_default, squash_dates -from ipython_genutils.py3compat import iteritems, buffer_to_bytes +from jupyter_client.jsonutil import date_default from .dictdb import BaseDB from ..util import ensure_timezone, extract_dates @@ -29,12 +28,6 @@ # SQLite operators, adapters, and converters # ----------------------------------------------------------------------------- -try: - buffer -except NameError: - # py3k - buffer = memoryview - operators = { '$lt': "<", '$gt': ">", @@ -72,8 +65,10 @@ def _convert_dict(ds): def _adapt_bufs(bufs): # this is *horrible* # copy buffers into single list and pickle it: - if bufs and isinstance(bufs[0], (bytes, buffer, memoryview)): - return sqlite3.Binary(pickle.dumps(list(map(buffer_to_bytes, bufs)), -1)) + if bufs and isinstance(bufs[0], (bytes, memoryview)): + return sqlite3.Binary( + pickle.dumps([memoryview(buf).tobytes() for buf in bufs], -1) + ) elif bufs: return bufs else: @@ -334,9 +329,9 @@ def _render_expression(self, check): if skeys: raise KeyError("Illegal testing key(s): %s" % skeys) - for name, sub_check in iteritems(check): + for name, sub_check in check.items(): if isinstance(sub_check, dict): - for test, value in iteritems(sub_check): + for test, value in sub_check.items(): try: op = operators[test] except KeyError: diff --git a/ipyparallel/controller/task_scheduler.py b/ipyparallel/controller/task_scheduler.py index fcc05178a..361ee1c59 100644 --- a/ipyparallel/controller/task_scheduler.py +++ b/ipyparallel/controller/task_scheduler.py @@ -5,7 +5,6 @@ from types import FunctionType import zmq -from ipython_genutils.py3compat import cast_bytes from traitlets import Dict from traitlets import Enum from traitlets import Instance @@ -98,7 +97,7 @@ def leastload(loads): MET = Dependency([]) -class Job(object): +class Job: """Simple container for a job""" def __init__( @@ -153,7 +152,7 @@ class TaskScheduler(Scheduler): help="""specify the High Water Mark (HWM) for the downstream socket in the Task scheduler. This is the maximum number of allowed outstanding tasks on each engine. - + The default (1) means that only one task can be outstanding on each engine. Setting TaskScheduler.hwm=0 means there is no limit, and the engines continue to be assigned tasks while they are working, @@ -173,11 +172,6 @@ class TaskScheduler(Scheduler): Options are: 'pure', 'lru', 'plainrandom', 'weighted', 'twobin','leastload'""", ) - @observe('scheme_name') - def _scheme_name_changed(self, change): - self.log.debug("Using scheme %r" % change['new']) - self.scheme = globals()[change['new']] - # input arguments: scheme = Instance(FunctionType) # function for determining the destination @@ -243,7 +237,7 @@ def dispatch_query_reply(self, msg): content = msg['content'] for uuid in content.get('engines', {}).values(): - self._register_engine(cast_bytes(uuid)) + self._register_engine(uuid.encode("utf8")) @util.log_errors def dispatch_notification(self, msg): @@ -266,7 +260,7 @@ def dispatch_notification(self, msg): self.log.error("Unhandled message type: %r" % msg_type) else: try: - handler(cast_bytes(msg['content']['uuid'])) + handler(msg['content']['uuid'].encode("utf8")) except Exception: self.log.error("task::Invalid notification msg: %r", msg, exc_info=True) @@ -374,7 +368,7 @@ def dispatch_submission(self, raw_msg): # get targets as a set of bytes objects # from a list of unicode objects targets = md.get('targets', []) - targets = set(map(cast_bytes, targets)) + targets = set(t.encode("utf8", "replace") for t in targets) retries = md.get('retries', 0) self.retries[msg_id] = retries diff --git a/ipyparallel/datapub.py b/ipyparallel/datapub.py index c09d12154..efa80857c 100644 --- a/ipyparallel/datapub.py +++ b/ipyparallel/datapub.py @@ -1 +1 @@ -from .engine.datapub import publish_data +from .engine.datapub import publish_data # noqa: F401 diff --git a/ipyparallel/engine/app.py b/ipyparallel/engine/app.py index c7fd9a850..c795db82d 100755 --- a/ipyparallel/engine/app.py +++ b/ipyparallel/engine/app.py @@ -19,7 +19,6 @@ from ipykernel.kernelapp import IPKernelApp from ipykernel.zmqshell import ZMQInteractiveShell from IPython.core.profiledir import ProfileDir -from ipython_genutils.py3compat import cast_bytes from jupyter_client.localinterfaces import localhost from jupyter_client.session import Session from jupyter_client.session import session_aliases @@ -330,7 +329,7 @@ def load_connector_file(self): # DO NOT allow override of basic URLs, serialization, or key # JSON file takes top priority there - config.Session.key = cast_bytes(d['key']) + config.Session.key = d['key'].encode('utf8') config.Session.signature_scheme = d['signature_scheme'] self.registration_url = d['interface'] + ':%i' % d['registration'] @@ -551,11 +550,11 @@ def urls(key): sys.stdout = self.out_stream_factory( self.session, iopub_socket, u'stdout' ) - sys.stdout.topic = cast_bytes('engine.%i.stdout' % self.id) + sys.stdout.topic = f"engine.{self.id}.stdout".encode("ascii") sys.stderr = self.out_stream_factory( self.session, iopub_socket, u'stderr' ) - sys.stderr.topic = cast_bytes('engine.%i.stderr' % self.id) + sys.stderr.topic = f"engine.{self.id}.stderr".encode("ascii") # copied from ipykernel 6, which captures sys.__stderr__ at the FD-level if getattr(sys.stderr, "_original_stdstream_copy", None) is not None: @@ -572,7 +571,9 @@ def urls(key): ) if self.display_hook_factory: sys.displayhook = self.display_hook_factory(self.session, iopub_socket) - sys.displayhook.topic = cast_bytes('engine.%i.execute_result' % self.id) + sys.displayhook.topic = f"engine.{self.id}.execute_result".encode( + "ascii" + ) self.kernel = Kernel( parent=self, @@ -587,8 +588,8 @@ def urls(key): log=self.log, ) - self.kernel.shell.display_pub.topic = cast_bytes( - 'engine.%i.displaypub' % self.id + self.kernel.shell.display_pub.topic = f"engine.{self.id}.displaypub".encode( + "ascii" ) # FIXME: This is a hack until IPKernelApp and IPEngineApp can be fully merged diff --git a/ipyparallel/engine/kernel.py b/ipyparallel/engine/kernel.py index 0a787ac9f..5c95e3dcd 100644 --- a/ipyparallel/engine/kernel.py +++ b/ipyparallel/engine/kernel.py @@ -2,10 +2,6 @@ import sys from ipykernel.ipkernel import IPythonKernel -from ipython_genutils.py3compat import cast_bytes -from ipython_genutils.py3compat import safe_unicode -from ipython_genutils.py3compat import string_types -from ipython_genutils.py3compat import unicode_type from traitlets import Integer from traitlets import Type @@ -31,7 +27,7 @@ def _topic(self, topic): """prefixed topic for IOPub messages""" base = "engine.%s" % self.engine_id - return cast_bytes("%s.%s" % (base, topic)) + return f"{base}.{topic}".encode("utf8") def __init__(self, **kwargs): super(IPythonParallelKernel, self).__init__(**kwargs) @@ -154,10 +150,14 @@ def do_apply(self, content, bufs, msg_id, reply_metadata): except BaseException as e: # invoke IPython traceback formatting shell.showtraceback() + try: + str_evalue = str(e) + except Exception as str_error: + str_evalue = f"Failed to cast exception to string: {str_error}" reply_content = { 'traceback': [], - 'ename': unicode_type(type(e).__name__), - 'evalue': safe_unicode(e), + 'ename': str(type(e).__name__), + 'evalue': str_evalue, } # get formatted traceback, which ipykernel recorded if hasattr(shell, '_last_traceback'): @@ -191,7 +191,7 @@ def do_apply(self, content, bufs, msg_id, reply_metadata): def abort_request(self, stream, ident, parent): """abort a specific msg by id""" msg_ids = parent['content'].get('msg_ids', None) - if isinstance(msg_ids, string_types): + if isinstance(msg_ids, str): msg_ids = [msg_ids] if not msg_ids: self._abort_queues() diff --git a/ipyparallel/engine/log.py b/ipyparallel/engine/log.py index bc42f2d4b..c45dc286e 100644 --- a/ipyparallel/engine/log.py +++ b/ipyparallel/engine/log.py @@ -1,9 +1,3 @@ -from logging import DEBUG -from logging import ERROR -from logging import FATAL -from logging import INFO -from logging import WARN - from zmq.log.handlers import PUBHandler diff --git a/ipyparallel/error.py b/ipyparallel/error.py index 0735553d2..61bc6b35c 100644 --- a/ipyparallel/error.py +++ b/ipyparallel/error.py @@ -5,34 +5,15 @@ .. inheritance-diagram:: ipyparallel.error :parts: 3 - -Authors: - -* Brian Granger -* Min RK """ from __future__ import print_function import sys import traceback -from ipython_genutils.py3compat import unicode_type - __docformat__ = "restructuredtext en" -# Tell nose to skip this module -__test__ = {} - -# ------------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -# ------------------------------------------------------------------------------- -# ------------------------------------------------------------------------------- -# Error classes -# ------------------------------------------------------------------------------- class IPythonError(Exception): """Base exception that all of our exceptions inherit from. @@ -255,8 +236,8 @@ def wrap_exception(engine_info={}): exc_content = { 'status': 'error', 'traceback': stb, - 'ename': unicode_type(etype.__name__), - 'evalue': unicode_type(evalue), + 'ename': etype.__name__, + 'evalue': str(evalue), 'engine_info': engine_info, } return exc_content diff --git a/ipyparallel/serialize/canning.py b/ipyparallel/serialize/canning.py index b83ef98e4..4d172a3c7 100644 --- a/ipyparallel/serialize/canning.py +++ b/ipyparallel/serialize/canning.py @@ -7,23 +7,10 @@ import sys from types import FunctionType -from ipython_genutils import py3compat -from ipython_genutils.importstring import import_item -from ipython_genutils.py3compat import buffer_to_bytes -from ipython_genutils.py3compat import buffer_to_bytes_py2 -from ipython_genutils.py3compat import iteritems -from ipython_genutils.py3compat import string_types +from traitlets import import_item from traitlets.log import get_logger -from . import codeutil # This registers a hook when it's imported - -if py3compat.PY3: - buffer = memoryview - class_type = type -else: - from types import ClassType - - class_type = (type, ClassType) +from . import codeutil # noqa This registers a hook when it's imported def _get_cell_type(a=None): @@ -34,7 +21,7 @@ def _get_cell_type(a=None): def inner(): return a - return type(py3compat.get_closure(inner)[0]) + return type(inner.__closure__[0]) cell_type = _get_cell_type() @@ -113,7 +100,7 @@ def use_pickle(): # ------------------------------------------------------------------------------- -class CannedObject(object): +class CannedObject: def __init__(self, obj, keys=[], hook=None): """can an object for safe pickling @@ -154,7 +141,7 @@ class Reference(CannedObject): """object for wrapping a remote reference by name.""" def __init__(self, name): - if not isinstance(name, string_types): + if not isinstance(name, str): raise TypeError("illegal name: %r" % name) self.name = name self.buffers = [] @@ -181,7 +168,7 @@ def get_object(self, g=None): def inner(): return cell_contents - return py3compat.get_closure(inner)[0] + return inner.__closure__[0] class CannedFunction(CannedObject): @@ -203,7 +190,7 @@ def __init__(self, f): else: self.annotations = None - closure = py3compat.get_closure(f) + closure = f.__closure__ if closure: self.closure = tuple(can(cell) for cell in closure) else: @@ -320,7 +307,7 @@ def __init__(self, cls): self.buffers = [] def _check_type(self, obj): - assert isinstance(obj, class_type), "Not a class type" + assert isinstance(obj, type), "Not a class type" def get_object(self, g=None): parents = tuple(uncan(p, g) for p in self.parents) @@ -351,7 +338,7 @@ def __init__(self, obj): else: # ensure contiguous obj = ascontiguousarray(obj, dtype=None) - self.buffers = [buffer(obj)] + self.buffers = [memoryview(obj)] def get_object(self, g=None): from numpy import frombuffer @@ -361,18 +348,12 @@ def get_object(self, g=None): from . import serialize # we just pickled it - return serialize.pickle.loads(buffer_to_bytes_py2(data)) + return serialize.pickle.loads(data) else: - if not py3compat.PY3 and isinstance(data, memoryview): - # frombuffer doesn't accept memoryviews on Python 2, - # so cast to old-style buffer - data = buffer(data.tobytes()) return frombuffer(data, dtype=self.dtype).reshape(self.shape) class CannedBytes(CannedObject): - wrap = staticmethod(buffer_to_bytes) - def __init__(self, obj): self.buffers = [obj] @@ -380,15 +361,19 @@ def get_object(self, g=None): data = self.buffers[0] return self.wrap(data) - -class CannedBuffer(CannedBytes): - wrap = buffer + @staticmethod + def wrap(data): + if isinstance(data, bytes): + return data + else: + return memoryview(data).tobytes() class CannedMemoryView(CannedBytes): wrap = memoryview +CannedBuffer = CannedMemoryView # ------------------------------------------------------------------------------- # Functions # ------------------------------------------------------------------------------- @@ -399,7 +384,7 @@ def _import_mapping(mapping, original=None): log = get_logger() log.debug("Importing canning map") for key, value in list(mapping.items()): - if isinstance(key, string_types): + if isinstance(key, str): try: cls = import_item(key) except Exception: @@ -430,8 +415,8 @@ def can(obj): import_needed = False - for cls, canner in iteritems(can_map): - if isinstance(cls, string_types): + for cls, canner in can_map.items(): + if isinstance(cls, str): import_needed = True break elif istype(obj, cls): @@ -447,7 +432,7 @@ def can(obj): def can_class(obj): - if isinstance(obj, class_type) and obj.__module__ == '__main__': + if isinstance(obj, type) and obj.__module__ == '__main__': return CannedClass(obj) else: return obj @@ -457,7 +442,7 @@ def can_dict(obj): """can the *values* of a dict""" if istype(obj, dict): newobj = {} - for k, v in iteritems(obj): + for k, v in obj.items(): newobj[k] = can(v) return newobj else: @@ -480,8 +465,8 @@ def uncan(obj, g=None): """invert canning""" import_needed = False - for cls, uncanner in iteritems(uncan_map): - if isinstance(cls, string_types): + for cls, uncanner in uncan_map.items(): + if isinstance(cls, str): import_needed = True break elif isinstance(obj, cls): @@ -499,7 +484,7 @@ def uncan(obj, g=None): def uncan_dict(obj, g=None): if istype(obj, dict): newobj = {} - for k, v in iteritems(obj): + for k, v in obj.items(): newobj[k] = uncan(v, g) return newobj else: @@ -535,11 +520,9 @@ def can_dependent(obj): bytes: CannedBytes, memoryview: CannedMemoryView, cell_type: CannedCell, - class_type: can_class, + type: can_class, 'ipyparallel.dependent': can_dependent, } -if buffer is not memoryview: - can_map[buffer] = CannedBuffer uncan_map = { CannedObject: lambda obj, g: obj.get_object(g), diff --git a/ipyparallel/serialize/serialize.py b/ipyparallel/serialize/serialize.py index c7ced793d..297311c6e 100644 --- a/ipyparallel/serialize/serialize.py +++ b/ipyparallel/serialize/serialize.py @@ -18,7 +18,6 @@ from itertools import chain -from ipython_genutils.py3compat import PY3, buffer_to_bytes_py2 from .canning import ( can, uncan, @@ -31,15 +30,12 @@ from jupyter_client.session import MAX_ITEMS, MAX_BYTES -if PY3: - buffer = memoryview - # ----------------------------------------------------------------------------- # Serialization Functions # ----------------------------------------------------------------------------- -class PrePickled(object): +class PrePickled: """Wrapper for a pre-pickled object Used for pre-emptively pickling re-used objects @@ -53,17 +49,9 @@ def __init__(self, obj): def _nbytes(buf): """Return byte-size of a memoryview or buffer""" if isinstance(buf, memoryview): - if PY3: - # py3 introduces nbytes attribute - return buf.nbytes - else: - # compute nbytes on py2 - size = buf.itemsize - for dim in buf.shape: - size *= dim - return size + return buf.nbytes else: - # not a memoryview, raw bytes/ py2 buffer + # not a memoryview, raw bytes return len(buf) @@ -81,8 +69,6 @@ def _extract_buffers(obj, threshold=MAX_BYTES): # because pickling buffer objects just results in broken pointers elif isinstance(buf, memoryview): obj.buffers[i] = buf.tobytes() - elif isinstance(buf, buffer): - obj.buffers[i] = bytes(buf) return buffers @@ -147,7 +133,7 @@ def deserialize_object(buffers, g=None): (newobj, bufs) : unpacked object, and the list of remaining unused buffers. """ bufs = list(buffers) - pobj = buffer_to_bytes_py2(bufs.pop(0)) + pobj = bufs.pop(0) canned = pickle.loads(pobj) if istype(canned, sequence_types) and len(canned) < MAX_ITEMS: for c in canned: @@ -213,7 +199,7 @@ def unpack_apply_message(bufs, g=None, copy=True): bufs = list(bufs) # allow us to pop assert len(bufs) >= 2, "not enough buffers!" f, bufs = deserialize_object(bufs, g) - pinfo = buffer_to_bytes_py2(bufs.pop(0)) + pinfo = bufs.pop(0) info = pickle.loads(pinfo) arg_bufs, kwarg_bufs = bufs[: info['narg_bufs']], bufs[info['narg_bufs'] :] diff --git a/ipyparallel/tests/test_asyncresult.py b/ipyparallel/tests/test_asyncresult.py index 819167ab2..2bc1f1023 100644 --- a/ipyparallel/tests/test_asyncresult.py +++ b/ipyparallel/tests/test_asyncresult.py @@ -9,10 +9,8 @@ import ipyparallel as ipp from .clienttest import ClusterTestCase from .clienttest import raises_remote -from ipyparallel import Client from ipyparallel import error from ipyparallel.error import TimeoutError -from ipyparallel.tests import add_engines def wait(n): diff --git a/ipyparallel/tests/test_db.py b/ipyparallel/tests/test_db.py index 178f04515..eade744db 100644 --- a/ipyparallel/tests/test_db.py +++ b/ipyparallel/tests/test_db.py @@ -11,7 +11,6 @@ from datetime import timedelta from unittest import TestCase -import pytest from jupyter_client.session import Session from tornado.ioloop import IOLoop diff --git a/ipyparallel/tests/test_dependency.py b/ipyparallel/tests/test_dependency.py index 081a9b1e1..32704c18c 100644 --- a/ipyparallel/tests/test_dependency.py +++ b/ipyparallel/tests/test_dependency.py @@ -1,13 +1,10 @@ """Tests for dependency.py""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. -import os - import ipyparallel as ipp from .clienttest import ClusterTestCase from ipyparallel.serialize import can from ipyparallel.serialize import uncan -from ipyparallel.tests import add_engines from ipyparallel.util import interactive diff --git a/ipyparallel/tests/test_executor.py b/ipyparallel/tests/test_executor.py index 551cec58c..a921fbfdd 100644 --- a/ipyparallel/tests/test_executor.py +++ b/ipyparallel/tests/test_executor.py @@ -8,7 +8,6 @@ from .clienttest import ClusterTestCase from ipyparallel.client.view import LoadBalancedView -from ipyparallel.tests import add_engines def wait(n): diff --git a/ipyparallel/tests/test_joblib.py b/ipyparallel/tests/test_joblib.py index 24c9854ee..aba6e0c5d 100644 --- a/ipyparallel/tests/test_joblib.py +++ b/ipyparallel/tests/test_joblib.py @@ -7,9 +7,8 @@ from .clienttest import ClusterTestCase try: - import joblib from joblib import Parallel, delayed - from ipyparallel.client._joblib import IPythonParallelBackend + from ipyparallel.client._joblib import IPythonParallelBackend # noqa except (ImportError, TypeError): have_joblib = False else: diff --git a/ipyparallel/tests/test_remotefunction.py b/ipyparallel/tests/test_remotefunction.py index cbc14a0eb..905764283 100644 --- a/ipyparallel/tests/test_remotefunction.py +++ b/ipyparallel/tests/test_remotefunction.py @@ -4,7 +4,6 @@ from __future__ import division import ipyparallel as ipp -from .clienttest import add_engines from .clienttest import ClusterTestCase diff --git a/ipyparallel/tests/test_serialize.py b/ipyparallel/tests/test_serialize.py index 405607038..74e43dc28 100644 --- a/ipyparallel/tests/test_serialize.py +++ b/ipyparallel/tests/test_serialize.py @@ -151,7 +151,7 @@ def test_numpy_in_dict(): def test_class(): @interactive - class C(object): + class C: a = 5 bufs = serialize_object(dict(C=C)) @@ -208,7 +208,7 @@ def test_list(): def test_class_inheritance(): @interactive - class C(object): + class C: a = 5 @interactive diff --git a/ipyparallel/tests/test_view.py b/ipyparallel/tests/test_view.py index c677eff16..d58903538 100644 --- a/ipyparallel/tests/test_view.py +++ b/ipyparallel/tests/test_view.py @@ -13,7 +13,6 @@ import zmq from IPython import get_ipython from IPython.utils.io import capture_output -from ipython_genutils.py3compat import unicode_type import ipyparallel as ipp from .clienttest import ClusterTestCase @@ -419,13 +418,13 @@ def test_importer(self): view = self.client[-1] view.clear(block=True) with view.importer: - import re + import re # noqa: F401 @interactive def findall(pat, s): # this globals() step isn't necessary in real code # only to prevent a closure in the test - re = globals()['re'] + re = globals()['re'] # noqa: F811 return re.findall(pat, s) self.assertEqual( @@ -596,7 +595,7 @@ def test_execute_displaypub(self): view.execute("from IPython.core.display import *") ar = view.execute("[ display(i) for i in range(5) ]", block=True) - expected = [{u'text/plain': unicode_type(j)} for j in range(5)] + expected = [{u'text/plain': str(j)} for j in range(5)] for outputs in ar.outputs: mimes = [out['data'] for out in outputs] self.assertEqual(mimes, expected) @@ -613,7 +612,7 @@ def publish(): ar = view.apply_async(publish) ar.get(5) assert ar.wait_for_output(5) - expected = [{u'text/plain': unicode_type(j)} for j in range(5)] + expected = [{u'text/plain': str(j)} for j in range(5)] for outputs in ar.outputs: mimes = [out['data'] for out in outputs] self.assertEqual(mimes, expected) @@ -795,7 +794,7 @@ def test_nested_getitem_setitem(self): view.execute( '\n'.join( [ - 'class A(object): pass', + 'class A: pass', 'a = A()', 'a.b = 128', ] @@ -835,8 +834,8 @@ def test_sync_imports(self): view = self.client[-1] with capture_output() as io: with view.sync_imports(): - import IPython - self.assertIn("IPython", io.stdout) + import IPython # noqa + assert "IPython" in io.stdout @interactive def find_ipython(): @@ -848,8 +847,8 @@ def test_sync_imports_quiet(self): view = self.client[-1] with capture_output() as io: with view.sync_imports(quiet=True): - import IPython - self.assertEqual(io.stdout, '') + import IPython # noqa + assert io.stdout == '' @interactive def find_ipython(): diff --git a/ipyparallel/util.py b/ipyparallel/util.py index 387e53a87..b124ed339 100644 --- a/ipyparallel/util.py +++ b/ipyparallel/util.py @@ -30,9 +30,6 @@ from IPython.core.profiledir import ProfileDir from IPython.core.profiledir import ProfileDirError from IPython.paths import get_ipython_dir -from ipython_genutils.py3compat import iteritems -from ipython_genutils.py3compat import itervalues -from ipython_genutils.py3compat import string_types from jupyter_client.localinterfaces import is_public_ip from jupyter_client.localinterfaces import localhost from jupyter_client.localinterfaces import public_ips @@ -71,7 +68,7 @@ class ReverseDict(dict): def __init__(self, *args, **kwargs): dict.__init__(self, *args, **kwargs) self._reverse = dict() - for key, value in iteritems(self): + for key, value in self.items(): self._reverse[value] = key def __getitem__(self, key): @@ -114,8 +111,8 @@ def log_errors(f): def logs_errors(self, *args, **kwargs): try: result = f(self, *args, **kwargs) - except Exception: - self.log.error("Uncaught exception in {f}: {future.exception()}") + except Exception as e: + self.log.exception(f"Uncaught exception in {f}: {e}") return if inspect.isawaitable(result): @@ -124,7 +121,7 @@ def logs_errors(self, *args, **kwargs): def _log_error(future): if future.exception(): - self.log.error("Uncaught exception in {f}: {future.exception()}") + self.log.error(f"Uncaught exception in {f}: {future.exception()}") future.add_done_callback(_log_error) @@ -143,7 +140,7 @@ def is_url(url): def validate_url(url): """validate a url for zeromq""" - if not isinstance(url, string_types): + if not isinstance(url, str): raise TypeError("url must be a string, not %r" % type(url)) url = url.lower() @@ -180,11 +177,11 @@ def validate_url(url): def validate_url_container(container): """validate a potentially nested collection of urls.""" - if isinstance(container, string_types): + if isinstance(container, str): url = container return validate_url(url) elif isinstance(container, dict): - container = itervalues(container) + container = container.values() for element in container: validate_url_container(element) @@ -476,7 +473,7 @@ def int_keys(dikt): where there should be ints. """ for k in list(dikt): - if isinstance(k, string_types): + if isinstance(k, str): nk = None try: nk = int(k) @@ -588,12 +585,12 @@ def extract_dates(obj): """extract ISO8601 dates from unpacked JSON""" if isinstance(obj, dict): new_obj = {} # don't clobber - for k, v in iteritems(obj): + for k, v in obj.items(): new_obj[k] = extract_dates(v) obj = new_obj elif isinstance(obj, (list, tuple)): obj = [extract_dates(o) for o in obj] - elif isinstance(obj, string_types): + elif isinstance(obj, str): obj = _parse_date(obj) return obj diff --git a/setup.py b/setup.py index 40b72724a..46fa4501f 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,6 @@ import os import sys -from glob import glob import setuptools from setuptools.command.bdist_egg import bdist_egg @@ -115,7 +114,6 @@ def run(self): include_package_data=True, install_requires=[ "entrypoints", - "ipython_genutils", "decorator", "pyzmq>=18", "traitlets>=4.3",