managers.py 39.1 KB
Newer Older
1 2 3 4 5 6
#
# Module providing the `SyncManager` class for dealing
# with shared objects
#
# multiprocessing/managers.py
#
7
# Copyright (c) 2006-2008, R Oudkerk
8
# Licensed to PSF under a Contributor Agreement.
9 10 11 12 13 14 15 16 17 18 19 20
#

__all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]

#
# Imports
#

import sys
import threading
import array
import queue
21
import time
22

23 24 25
from traceback import format_exc

from . import connection
26
from .context import reduction, get_spawning_popen, ProcessError
27 28 29
from . import pool
from . import process
from . import util
30
from . import get_context
31 32 33 34 35 36

#
# Register some things for pickling
#

def reduce_array(a):
37
    return array.array, (a.typecode, a.tobytes())
38
reduction.register(array.array, reduce_array)
39 40

view_types = [type(getattr({}, name)()) for name in ('items','keys','values')]
41
if view_types[0] is not list:       # only needed in Py3.0
42 43 44
    def rebuild_as_list(obj):
        return list, (list(obj),)
    for view_type in view_types:
45
        reduction.register(view_type, rebuild_as_list)
46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66

#
# Type for identifying shared objects
#

class Token(object):
    '''
    Type to uniquely indentify a shared object
    '''
    __slots__ = ('typeid', 'address', 'id')

    def __init__(self, typeid, address, id):
        (self.typeid, self.address, self.id) = (typeid, address, id)

    def __getstate__(self):
        return (self.typeid, self.address, self.id)

    def __setstate__(self, state):
        (self.typeid, self.address, self.id) = state

    def __repr__(self):
67 68
        return '%s(typeid=%r, address=%r, id=%r)' % \
               (self.__class__.__name__, self.typeid, self.address, self.id)
69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86

#
# Function for communication with a manager's server process
#

def dispatch(c, id, methodname, args=(), kwds={}):
    '''
    Send a message to manager using connection `c` and return response
    '''
    c.send((id, methodname, args, kwds))
    kind, result = c.recv()
    if kind == '#RETURN':
        return result
    raise convert_to_error(kind, result)

def convert_to_error(kind, result):
    if kind == '#ERROR':
        return result
87 88 89 90 91 92 93 94 95
    elif kind in ('#TRACEBACK', '#UNSERIALIZABLE'):
        if not isinstance(result, str):
            raise TypeError(
                "Result {0!r} (kind '{1}') type is {2}, not str".format(
                    result, kind, type(result)))
        if kind == '#UNSERIALIZABLE':
            return RemoteError('Unserializable message: %s\n' % result)
        else:
            return RemoteError(result)
96
    else:
97
        return ValueError('Unrecognized message type {!r}'.format(kind))
98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113

class RemoteError(Exception):
    def __str__(self):
        return ('\n' + '-'*75 + '\n' + str(self.args[0]) + '-'*75)

#
# Functions for finding the method names of an object
#

def all_methods(obj):
    '''
    Return a list of names of methods of `obj`
    '''
    temp = []
    for name in dir(obj):
        func = getattr(obj, name)
114
        if callable(func):
115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
            temp.append(name)
    return temp

def public_methods(obj):
    '''
    Return a list of names of methods of `obj` which do not start with '_'
    '''
    return [name for name in all_methods(obj) if name[0] != '_']

#
# Server which is run in a process controlled by a manager
#

class Server(object):
    '''
    Server class which runs in a process controlled by a manager object
    '''
    public = ['shutdown', 'create', 'accept_connection', 'get_methods',
              'debug_info', 'number_of_objects', 'dummy', 'incref', 'decref']

    def __init__(self, registry, address, authkey, serializer):
136 137 138 139
        if not isinstance(authkey, bytes):
            raise TypeError(
                "Authkey {0!r} is type {1!s}, not bytes".format(
                    authkey, type(authkey)))
140
        self.registry = registry
141
        self.authkey = process.AuthenticationString(authkey)
142 143 144
        Listener, Client = listener_client[serializer]

        # do authentication later
145
        self.listener = Listener(address=address, backlog=16)
146 147
        self.address = self.listener.address

148
        self.id_to_obj = {'0': (None, ())}
149
        self.id_to_refcount = {}
150 151
        self.id_to_local_proxy_obj = {}
        self.mutex = threading.Lock()
152 153 154 155 156

    def serve_forever(self):
        '''
        Run the server forever
        '''
157
        self.stop_event = threading.Event()
158
        process.current_process()._manager_server = self
159
        try:
160 161 162
            accepter = threading.Thread(target=self.accepter)
            accepter.daemon = True
            accepter.start()
163
            try:
164 165
                while not self.stop_event.is_set():
                    self.stop_event.wait(1)
166 167 168
            except (KeyboardInterrupt, SystemExit):
                pass
        finally:
169
            if sys.stdout != sys.__stdout__: # what about stderr?
170 171 172 173 174 175 176 177 178
                util.debug('resetting stdout, stderr')
                sys.stdout = sys.__stdout__
                sys.stderr = sys.__stderr__
            sys.exit(0)

    def accepter(self):
        while True:
            try:
                c = self.listener.accept()
179
            except OSError:
180 181 182 183
                continue
            t = threading.Thread(target=self.handle_request, args=(c,))
            t.daemon = True
            t.start()
184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223

    def handle_request(self, c):
        '''
        Handle a new connection
        '''
        funcname = result = request = None
        try:
            connection.deliver_challenge(c, self.authkey)
            connection.answer_challenge(c, self.authkey)
            request = c.recv()
            ignore, funcname, args, kwds = request
            assert funcname in self.public, '%r unrecognized' % funcname
            func = getattr(self, funcname)
        except Exception:
            msg = ('#TRACEBACK', format_exc())
        else:
            try:
                result = func(c, *args, **kwds)
            except Exception:
                msg = ('#TRACEBACK', format_exc())
            else:
                msg = ('#RETURN', result)
        try:
            c.send(msg)
        except Exception as e:
            try:
                c.send(('#TRACEBACK', format_exc()))
            except Exception:
                pass
            util.info('Failure to send message: %r', msg)
            util.info(' ... request was %r', request)
            util.info(' ... exception was %r', e)

        c.close()

    def serve_client(self, conn):
        '''
        Handle requests from the proxies in a particular process/thread
        '''
        util.debug('starting server thread to service %r',
224
                   threading.current_thread().name)
225 226 227 228 229

        recv = conn.recv
        send = conn.send
        id_to_obj = self.id_to_obj

230
        while not self.stop_event.is_set():
231 232 233 234 235

            try:
                methodname = obj = None
                request = recv()
                ident, methodname, args, kwds = request
236 237 238 239 240 241 242 243
                try:
                    obj, exposed, gettypeid = id_to_obj[ident]
                except KeyError as ke:
                    try:
                        obj, exposed, gettypeid = \
                            self.id_to_local_proxy_obj[ident]
                    except KeyError as second_ke:
                        raise ke
244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280

                if methodname not in exposed:
                    raise AttributeError(
                        'method %r of %r object is not in exposed=%r' %
                        (methodname, type(obj), exposed)
                        )

                function = getattr(obj, methodname)

                try:
                    res = function(*args, **kwds)
                except Exception as e:
                    msg = ('#ERROR', e)
                else:
                    typeid = gettypeid and gettypeid.get(methodname, None)
                    if typeid:
                        rident, rexposed = self.create(conn, typeid, res)
                        token = Token(typeid, self.address, rident)
                        msg = ('#PROXY', (rexposed, token))
                    else:
                        msg = ('#RETURN', res)

            except AttributeError:
                if methodname is None:
                    msg = ('#TRACEBACK', format_exc())
                else:
                    try:
                        fallback_func = self.fallback_mapping[methodname]
                        result = fallback_func(
                            self, conn, ident, obj, *args, **kwds
                            )
                        msg = ('#RETURN', result)
                    except Exception:
                        msg = ('#TRACEBACK', format_exc())

            except EOFError:
                util.debug('got EOF -- exiting thread serving %r',
281
                           threading.current_thread().name)
282 283 284 285 286 287 288 289 290
                sys.exit(0)

            except Exception:
                msg = ('#TRACEBACK', format_exc())

            try:
                try:
                    send(msg)
                except Exception as e:
291
                    send(('#UNSERIALIZABLE', format_exc()))
292 293
            except Exception as e:
                util.info('exception in thread serving %r',
294
                        threading.current_thread().name)
295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321
                util.info(' ... message was %r', msg)
                util.info(' ... exception was %r', e)
                conn.close()
                sys.exit(1)

    def fallback_getvalue(self, conn, ident, obj):
        return obj

    def fallback_str(self, conn, ident, obj):
        return str(obj)

    def fallback_repr(self, conn, ident, obj):
        return repr(obj)

    fallback_mapping = {
        '__str__':fallback_str,
        '__repr__':fallback_repr,
        '#GETVALUE':fallback_getvalue
        }

    def dummy(self, c):
        pass

    def debug_info(self, c):
        '''
        Return some info --- useful to spot problems with refcounting
        '''
322
        # Perhaps include debug info about 'c'?
323
        with self.mutex:
324
            result = []
325
            keys = list(self.id_to_refcount.keys())
326 327
            keys.sort()
            for ident in keys:
328
                if ident != '0':
329 330 331 332 333 334 335 336 337
                    result.append('  %s:       refcount=%s\n    %s' %
                                  (ident, self.id_to_refcount[ident],
                                   str(self.id_to_obj[ident][0])[:75]))
            return '\n'.join(result)

    def number_of_objects(self, c):
        '''
        Number of shared objects
        '''
338 339
        # Doesn't use (len(self.id_to_obj) - 1) as we shouldn't count ident='0'
        return len(self.id_to_refcount)
340 341 342 343 344 345

    def shutdown(self, c):
        '''
        Shutdown this process
        '''
        try:
346 347 348 349 350
            util.debug('manager received shutdown message')
            c.send(('#RETURN', None))
        except:
            import traceback
            traceback.print_exc()
351
        finally:
352
            self.stop_event.set()
353 354 355 356 357

    def create(self, c, typeid, *args, **kwds):
        '''
        Create a new shared object and return its id
        '''
358
        with self.mutex:
359 360 361 362
            callable, exposed, method_to_typeid, proxytype = \
                      self.registry[typeid]

            if callable is None:
363 364 365
                if kwds or (len(args) != 1):
                    raise ValueError(
                        "Without callable, must have one non-keyword argument")
366 367 368 369 370 371 372
                obj = args[0]
            else:
                obj = callable(*args, **kwds)

            if exposed is None:
                exposed = public_methods(obj)
            if method_to_typeid is not None:
373 374 375 376
                if not isinstance(method_to_typeid, dict):
                    raise TypeError(
                        "Method_to_typeid {0!r}: type {1!s}, not dict".format(
                            method_to_typeid, type(method_to_typeid)))
377 378 379 380 381 382 383 384
                exposed = list(exposed) + list(method_to_typeid)

            ident = '%x' % id(obj)  # convert to string because xmlrpclib
                                    # only has 32 bit signed integers
            util.debug('%r callable returned object with id %r', typeid, ident)

            self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid)
            if ident not in self.id_to_refcount:
385
                self.id_to_refcount[ident] = 0
386 387 388

        self.incref(c, ident)
        return ident, tuple(exposed)
389 390 391 392 393 394 395 396 397 398 399

    def get_methods(self, c, token):
        '''
        Return the methods of the shared object indicated by token
        '''
        return tuple(self.id_to_obj[token.id][1])

    def accept_connection(self, c, name):
        '''
        Spawn a new thread to serve this connection
        '''
400
        threading.current_thread().name = name
401 402 403 404
        c.send(('#RETURN', None))
        self.serve_client(c)

    def incref(self, c, ident):
405
        with self.mutex:
406 407 408 409 410 411 412 413 414 415 416 417 418 419 420
            try:
                self.id_to_refcount[ident] += 1
            except KeyError as ke:
                # If no external references exist but an internal (to the
                # manager) still does and a new external reference is created
                # from it, restore the manager's tracking of it from the
                # previously stashed internal ref.
                if ident in self.id_to_local_proxy_obj:
                    self.id_to_refcount[ident] = 1
                    self.id_to_obj[ident] = \
                        self.id_to_local_proxy_obj[ident]
                    obj, exposed, gettypeid = self.id_to_obj[ident]
                    util.debug('Server re-enabled tracking & INCREF %r', ident)
                else:
                    raise ke
421 422

    def decref(self, c, ident):
423 424 425 426 427
        if ident not in self.id_to_refcount and \
            ident in self.id_to_local_proxy_obj:
            util.debug('Server DECREF skipping %r', ident)
            return

428
        with self.mutex:
429 430 431 432 433
            if self.id_to_refcount[ident] <= 0:
                raise AssertionError(
                    "Id {0!s} ({1!r}) has refcount {2:n}, not 1+".format(
                        ident, self.id_to_obj[ident],
                        self.id_to_refcount[ident]))
434 435
            self.id_to_refcount[ident] -= 1
            if self.id_to_refcount[ident] == 0:
436 437 438 439 440 441 442 443 444 445 446 447 448
                del self.id_to_refcount[ident]

        if ident not in self.id_to_refcount:
            # Two-step process in case the object turns out to contain other
            # proxy objects (e.g. a managed list of managed lists).
            # Otherwise, deleting self.id_to_obj[ident] would trigger the
            # deleting of the stored value (another managed object) which would
            # in turn attempt to acquire the mutex that is already held here.
            self.id_to_obj[ident] = (None, (), None)  # thread-safe
            util.debug('disposing of obj with id %r', ident)
            with self.mutex:
                del self.id_to_obj[ident]

449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479

#
# Class to represent state of a manager
#

class State(object):
    __slots__ = ['value']
    INITIAL = 0
    STARTED = 1
    SHUTDOWN = 2

#
# Mapping from serializer name to Listener and Client types
#

listener_client = {
    'pickle' : (connection.Listener, connection.Client),
    'xmlrpclib' : (connection.XmlListener, connection.XmlClient)
    }

#
# Definition of BaseManager
#

class BaseManager(object):
    '''
    Base class for managers
    '''
    _registry = {}
    _Server = Server

480 481
    def __init__(self, address=None, authkey=None, serializer='pickle',
                 ctx=None):
482
        if authkey is None:
483
            authkey = process.current_process().authkey
484
        self._address = address     # XXX not final address if eg ('', 0)
485
        self._authkey = process.AuthenticationString(authkey)
486 487 488 489
        self._state = State()
        self._state.value = State.INITIAL
        self._serializer = serializer
        self._Listener, self._Client = listener_client[serializer]
490
        self._ctx = ctx or get_context()
491 492 493 494 495

    def get_server(self):
        '''
        Return server object with serve_forever() method and address attribute
        '''
496 497 498 499 500 501 502 503
        if self._state.value != State.INITIAL:
            if self._state.value == State.STARTED:
                raise ProcessError("Already started server")
            elif self._state.value == State.SHUTDOWN:
                raise ProcessError("Manager has shut down")
            else:
                raise ProcessError(
                    "Unknown state {!r}".format(self._state.value))
504 505 506 507 508 509 510 511 512 513 514 515
        return Server(self._registry, self._address,
                      self._authkey, self._serializer)

    def connect(self):
        '''
        Connect manager object to the server process
        '''
        Listener, Client = listener_client[self._serializer]
        conn = Client(self._address, authkey=self._authkey)
        dispatch(conn, None, 'dummy')
        self._state.value = State.STARTED

Benjamin Peterson's avatar
Benjamin Peterson committed
516
    def start(self, initializer=None, initargs=()):
517 518 519
        '''
        Spawn a server process for this manager object
        '''
520 521 522 523 524 525 526 527
        if self._state.value != State.INITIAL:
            if self._state.value == State.STARTED:
                raise ProcessError("Already started server")
            elif self._state.value == State.SHUTDOWN:
                raise ProcessError("Manager has shut down")
            else:
                raise ProcessError(
                    "Unknown state {!r}".format(self._state.value))
528

529
        if initializer is not None and not callable(initializer):
Benjamin Peterson's avatar
Benjamin Peterson committed
530 531
            raise TypeError('initializer must be a callable')

532 533 534 535
        # pipe over which we will retrieve address of server
        reader, writer = connection.Pipe(duplex=False)

        # spawn process which runs a server
536
        self._process = self._ctx.Process(
537 538
            target=type(self)._run_server,
            args=(self._registry, self._address, self._authkey,
Benjamin Peterson's avatar
Benjamin Peterson committed
539
                  self._serializer, writer, initializer, initargs),
540 541
            )
        ident = ':'.join(str(i) for i in self._process._identity)
542
        self._process.name = type(self).__name__  + '-' + ident
543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559
        self._process.start()

        # get address of server
        writer.close()
        self._address = reader.recv()
        reader.close()

        # register a finalizer
        self._state.value = State.STARTED
        self.shutdown = util.Finalize(
            self, type(self)._finalize_manager,
            args=(self._process, self._address, self._authkey,
                  self._state, self._Client),
            exitpriority=0
            )

    @classmethod
Benjamin Peterson's avatar
Benjamin Peterson committed
560 561
    def _run_server(cls, registry, address, authkey, serializer, writer,
                    initializer=None, initargs=()):
562 563 564
        '''
        Create a server, report its address and run it
        '''
Benjamin Peterson's avatar
Benjamin Peterson committed
565 566 567
        if initializer is not None:
            initializer(*initargs)

568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594
        # create server
        server = cls._Server(registry, address, authkey, serializer)

        # inform parent process of the server's address
        writer.send(server.address)
        writer.close()

        # run the manager
        util.info('manager serving at %r', server.address)
        server.serve_forever()

    def _create(self, typeid, *args, **kwds):
        '''
        Create a new shared object; return the token and exposed tuple
        '''
        assert self._state.value == State.STARTED, 'server not yet started'
        conn = self._Client(self._address, authkey=self._authkey)
        try:
            id, exposed = dispatch(conn, None, 'create', (typeid,)+args, kwds)
        finally:
            conn.close()
        return Token(typeid, self._address, id), exposed

    def join(self, timeout=None):
        '''
        Join the manager process (if it has been spawned)
        '''
595 596 597 598
        if self._process is not None:
            self._process.join(timeout)
            if not self._process.is_alive():
                self._process = None
599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620

    def _debug_info(self):
        '''
        Return some info about the servers shared objects and connections
        '''
        conn = self._Client(self._address, authkey=self._authkey)
        try:
            return dispatch(conn, None, 'debug_info')
        finally:
            conn.close()

    def _number_of_objects(self):
        '''
        Return the number of shared objects
        '''
        conn = self._Client(self._address, authkey=self._authkey)
        try:
            return dispatch(conn, None, 'number_of_objects')
        finally:
            conn.close()

    def __enter__(self):
621 622
        if self._state.value == State.INITIAL:
            self.start()
623 624 625 626 627 628 629 630
        if self._state.value != State.STARTED:
            if self._state.value == State.INITIAL:
                raise ProcessError("Unable to start server")
            elif self._state.value == State.SHUTDOWN:
                raise ProcessError("Manager has shut down")
            else:
                raise ProcessError(
                    "Unknown state {!r}".format(self._state.value))
631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.shutdown()

    @staticmethod
    def _finalize_manager(process, address, authkey, state, _Client):
        '''
        Shutdown the manager process; will be registered as a finalizer
        '''
        if process.is_alive():
            util.info('sending shutdown message to manager')
            try:
                conn = _Client(address, authkey=authkey)
                try:
                    dispatch(conn, None, 'shutdown')
                finally:
                    conn.close()
            except Exception:
                pass

652
            process.join(timeout=1.0)
653 654 655 656 657 658 659 660 661 662 663 664 665 666 667
            if process.is_alive():
                util.info('manager still alive')
                if hasattr(process, 'terminate'):
                    util.info('trying to `terminate()` manager process')
                    process.terminate()
                    process.join(timeout=0.1)
                    if process.is_alive():
                        util.info('manager still alive after terminate')

        state.value = State.SHUTDOWN
        try:
            del BaseProxy._address_to_local[address]
        except KeyError:
            pass

668 669 670
    @property
    def address(self):
        return self._address
671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689

    @classmethod
    def register(cls, typeid, callable=None, proxytype=None, exposed=None,
                 method_to_typeid=None, create_method=True):
        '''
        Register a typeid with the manager type
        '''
        if '_registry' not in cls.__dict__:
            cls._registry = cls._registry.copy()

        if proxytype is None:
            proxytype = AutoProxy

        exposed = exposed or getattr(proxytype, '_exposed_', None)

        method_to_typeid = method_to_typeid or \
                           getattr(proxytype, '_method_to_typeid_', None)

        if method_to_typeid:
690
            for key, value in list(method_to_typeid.items()): # isinstance?
691 692 693 694 695 696 697 698 699 700 701 702 703 704 705
                assert type(key) is str, '%r is not a string' % key
                assert type(value) is str, '%r is not a string' % value

        cls._registry[typeid] = (
            callable, exposed, method_to_typeid, proxytype
            )

        if create_method:
            def temp(self, *args, **kwds):
                util.debug('requesting creation of a shared %r object', typeid)
                token, exp = self._create(typeid, *args, **kwds)
                proxy = proxytype(
                    token, self._serializer, manager=self,
                    authkey=self._authkey, exposed=exp
                    )
706 707
                conn = self._Client(token.address, authkey=self._authkey)
                dispatch(conn, None, 'decref', (token.id,))
708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733
                return proxy
            temp.__name__ = typeid
            setattr(cls, typeid, temp)

#
# Subclass of set which get cleared after a fork
#

class ProcessLocalSet(set):
    def __init__(self):
        util.register_after_fork(self, lambda obj: obj.clear())
    def __reduce__(self):
        return type(self), ()

#
# Definition of BaseProxy
#

class BaseProxy(object):
    '''
    A base for proxies of shared objects
    '''
    _address_to_local = {}
    _mutex = util.ForkAwareThreadLock()

    def __init__(self, token, serializer, manager=None,
734
                 authkey=None, exposed=None, incref=True, manager_owned=False):
735
        with BaseProxy._mutex:
736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755
            tls_idset = BaseProxy._address_to_local.get(token.address, None)
            if tls_idset is None:
                tls_idset = util.ForkAwareLocal(), ProcessLocalSet()
                BaseProxy._address_to_local[token.address] = tls_idset

        # self._tls is used to record the connection used by this
        # thread to communicate with the manager at token.address
        self._tls = tls_idset[0]

        # self._idset is used to record the identities of all shared
        # objects for which the current process owns references and
        # which are in the manager at token.address
        self._idset = tls_idset[1]

        self._token = token
        self._id = self._token.id
        self._manager = manager
        self._serializer = serializer
        self._Client = listener_client[serializer][1]

756 757 758 759 760 761
        # Should be set to True only when a proxy object is being created
        # on the manager server; primary use case: nested proxy objects.
        # RebuildProxy detects when a proxy is being created on the manager
        # and sets this value appropriately.
        self._owned_by_manager = manager_owned

762
        if authkey is not None:
763
            self._authkey = process.AuthenticationString(authkey)
764 765 766
        elif self._manager is not None:
            self._authkey = self._manager._authkey
        else:
767
            self._authkey = process.current_process().authkey
768 769 770 771 772 773 774 775

        if incref:
            self._incref()

        util.register_after_fork(self, BaseProxy._after_fork)

    def _connect(self):
        util.debug('making connection to manager')
776
        name = process.current_process().name
777 778
        if threading.current_thread().name != 'MainThread':
            name += '|' + threading.current_thread().name
779 780 781 782 783 784 785 786 787 788 789 790
        conn = self._Client(self._token.address, authkey=self._authkey)
        dispatch(conn, None, 'accept_connection', (name,))
        self._tls.connection = conn

    def _callmethod(self, methodname, args=(), kwds={}):
        '''
        Try to call a method of the referrent and return a copy of the result
        '''
        try:
            conn = self._tls.connection
        except AttributeError:
            util.debug('thread %r does not own a connection',
791
                       threading.current_thread().name)
792 793 794 795 796 797 798 799 800 801 802
            self._connect()
            conn = self._tls.connection

        conn.send((self._id, methodname, args, kwds))
        kind, result = conn.recv()

        if kind == '#RETURN':
            return result
        elif kind == '#PROXY':
            exposed, token = result
            proxytype = self._manager._registry[token.typeid][-1]
803
            token.address = self._token.address
804
            proxy = proxytype(
805 806 807
                token, self._serializer, manager=self._manager,
                authkey=self._authkey, exposed=exposed
                )
808 809 810
            conn = self._Client(token.address, authkey=self._authkey)
            dispatch(conn, None, 'decref', (token.id,))
            return proxy
811 812 813 814 815 816 817 818 819
        raise convert_to_error(kind, result)

    def _getvalue(self):
        '''
        Get a copy of the value of the referent
        '''
        return self._callmethod('#GETVALUE')

    def _incref(self):
820 821 822 823
        if self._owned_by_manager:
            util.debug('owned_by_manager skipped INCREF of %r', self._token.id)
            return

824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859
        conn = self._Client(self._token.address, authkey=self._authkey)
        dispatch(conn, None, 'incref', (self._id,))
        util.debug('INCREF %r', self._token.id)

        self._idset.add(self._id)

        state = self._manager and self._manager._state

        self._close = util.Finalize(
            self, BaseProxy._decref,
            args=(self._token, self._authkey, state,
                  self._tls, self._idset, self._Client),
            exitpriority=10
            )

    @staticmethod
    def _decref(token, authkey, state, tls, idset, _Client):
        idset.discard(token.id)

        # check whether manager is still alive
        if state is None or state.value == State.STARTED:
            # tell manager this process no longer cares about referent
            try:
                util.debug('DECREF %r', token.id)
                conn = _Client(token.address, authkey=authkey)
                dispatch(conn, None, 'decref', (token.id,))
            except Exception as e:
                util.debug('... decref failed %s', e)

        else:
            util.debug('DECREF %r -- manager already shutdown', token.id)

        # check whether we can close this thread's connection because
        # the process owns no more references to objects for this manager
        if not idset and hasattr(tls, 'connection'):
            util.debug('thread %r has no more proxies so closing conn',
860
                       threading.current_thread().name)
861 862 863 864 865 866 867 868 869 870 871 872 873
            tls.connection.close()
            del tls.connection

    def _after_fork(self):
        self._manager = None
        try:
            self._incref()
        except Exception as e:
            # the proxy may just be for a manager which has shutdown
            util.info('incref failed: %s' % e)

    def __reduce__(self):
        kwds = {}
874
        if get_spawning_popen() is not None:
875 876 877 878 879 880 881 882 883 884 885 886 887 888
            kwds['authkey'] = self._authkey

        if getattr(self, '_isauto', False):
            kwds['exposed'] = self._exposed_
            return (RebuildProxy,
                    (AutoProxy, self._token, self._serializer, kwds))
        else:
            return (RebuildProxy,
                    (type(self), self._token, self._serializer, kwds))

    def __deepcopy__(self, memo):
        return self._getvalue()

    def __repr__(self):
889 890
        return '<%s object, typeid %r at %#x>' % \
               (type(self).__name__, self._token.typeid, id(self))
891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908

    def __str__(self):
        '''
        Return representation of the referent (or a fall-back if that fails)
        '''
        try:
            return self._callmethod('__repr__')
        except Exception:
            return repr(self)[:-1] + "; '__str__()' failed>"

#
# Function used for unpickling
#

def RebuildProxy(func, token, serializer, kwds):
    '''
    Function used for unpickling proxy objects.
    '''
909
    server = getattr(process.current_process(), '_manager_server', None)
910
    if server and server.address == token.address:
911 912 913 914 915 916 917 918 919 920
        util.debug('Rebuild a proxy owned by manager, token=%r', token)
        kwds['manager_owned'] = True
        if token.id not in server.id_to_local_proxy_obj:
            server.id_to_local_proxy_obj[token.id] = \
                server.id_to_obj[token.id]
    incref = (
        kwds.pop('incref', True) and
        not getattr(process.current_process(), '_inheriting', False)
        )
    return func(token, serializer, incref=incref, **kwds)
921 922 923 924 925 926 927

#
# Functions to create proxies and proxy types
#

def MakeProxyType(name, exposed, _cache={}):
    '''
928
    Return a proxy type whose methods are given by `exposed`
929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964
    '''
    exposed = tuple(exposed)
    try:
        return _cache[(name, exposed)]
    except KeyError:
        pass

    dic = {}

    for meth in exposed:
        exec('''def %s(self, *args, **kwds):
        return self._callmethod(%r, args, kwds)''' % (meth, meth), dic)

    ProxyType = type(name, (BaseProxy,), dic)
    ProxyType._exposed_ = exposed
    _cache[(name, exposed)] = ProxyType
    return ProxyType


def AutoProxy(token, serializer, manager=None, authkey=None,
              exposed=None, incref=True):
    '''
    Return an auto-proxy for `token`
    '''
    _Client = listener_client[serializer][1]

    if exposed is None:
        conn = _Client(token.address, authkey=authkey)
        try:
            exposed = dispatch(conn, None, 'get_methods', (token,))
        finally:
            conn.close()

    if authkey is None and manager is not None:
        authkey = manager._authkey
    if authkey is None:
965
        authkey = process.current_process().authkey
966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986

    ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed)
    proxy = ProxyType(token, serializer, manager=manager, authkey=authkey,
                      incref=incref)
    proxy._isauto = True
    return proxy

#
# Types/callables which we will register with SyncManager
#

class Namespace(object):
    def __init__(self, **kwds):
        self.__dict__.update(kwds)
    def __repr__(self):
        items = list(self.__dict__.items())
        temp = []
        for name, value in items:
            if not name.startswith('_'):
                temp.append('%s=%r' % (name, value))
        temp.sort()
987
        return '%s(%s)' % (self.__class__.__name__, ', '.join(temp))
988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008

class Value(object):
    def __init__(self, typecode, value, lock=True):
        self._typecode = typecode
        self._value = value
    def get(self):
        return self._value
    def set(self, value):
        self._value = value
    def __repr__(self):
        return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
    value = property(get, set)

def Array(typecode, sequence, lock=True):
    return array.array(typecode, sequence)

#
# Proxy types used by SyncManager
#

class IteratorProxy(BaseProxy):
1009
    _exposed_ = ('__next__', 'send', 'throw', 'close')
1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023
    def __iter__(self):
        return self
    def __next__(self, *args):
        return self._callmethod('__next__', args)
    def send(self, *args):
        return self._callmethod('send', args)
    def throw(self, *args):
        return self._callmethod('throw', args)
    def close(self, *args):
        return self._callmethod('close', args)


class AcquirerProxy(BaseProxy):
    _exposed_ = ('acquire', 'release')
1024 1025 1026
    def acquire(self, blocking=True, timeout=None):
        args = (blocking,) if timeout is None else (blocking, timeout)
        return self._callmethod('acquire', args)
1027 1028 1029 1030 1031 1032 1033 1034 1035
    def release(self):
        return self._callmethod('release')
    def __enter__(self):
        return self._callmethod('acquire')
    def __exit__(self, exc_type, exc_val, exc_tb):
        return self._callmethod('release')


class ConditionProxy(AcquirerProxy):
1036
    _exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all')
1037 1038
    def wait(self, timeout=None):
        return self._callmethod('wait', (timeout,))
1039 1040
    def notify(self, n=1):
        return self._callmethod('notify', (n,))
1041
    def notify_all(self):
1042
        return self._callmethod('notify_all')
1043 1044 1045 1046 1047
    def wait_for(self, predicate, timeout=None):
        result = predicate()
        if result:
            return result
        if timeout is not None:
1048
            endtime = time.monotonic() + timeout
1049 1050 1051 1052 1053
        else:
            endtime = None
            waittime = None
        while not result:
            if endtime is not None:
1054
                waittime = endtime - time.monotonic()
1055 1056 1057 1058 1059 1060
                if waittime <= 0:
                    break
            self.wait(waittime)
            result = predicate()
        return result

1061 1062

class EventProxy(BaseProxy):
1063
    _exposed_ = ('is_set', 'set', 'clear', 'wait')
1064
    def is_set(self):
1065
        return self._callmethod('is_set')
1066 1067 1068 1069 1070 1071 1072
    def set(self):
        return self._callmethod('set')
    def clear(self):
        return self._callmethod('clear')
    def wait(self, timeout=None):
        return self._callmethod('wait', (timeout,))

1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092

class BarrierProxy(BaseProxy):
    _exposed_ = ('__getattribute__', 'wait', 'abort', 'reset')
    def wait(self, timeout=None):
        return self._callmethod('wait', (timeout,))
    def abort(self):
        return self._callmethod('abort')
    def reset(self):
        return self._callmethod('reset')
    @property
    def parties(self):
        return self._callmethod('__getattribute__', ('parties',))
    @property
    def n_waiting(self):
        return self._callmethod('__getattribute__', ('n_waiting',))
    @property
    def broken(self):
        return self._callmethod('__getattribute__', ('broken',))


1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121
class NamespaceProxy(BaseProxy):
    _exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
    def __getattr__(self, key):
        if key[0] == '_':
            return object.__getattribute__(self, key)
        callmethod = object.__getattribute__(self, '_callmethod')
        return callmethod('__getattribute__', (key,))
    def __setattr__(self, key, value):
        if key[0] == '_':
            return object.__setattr__(self, key, value)
        callmethod = object.__getattribute__(self, '_callmethod')
        return callmethod('__setattr__', (key, value))
    def __delattr__(self, key):
        if key[0] == '_':
            return object.__delattr__(self, key)
        callmethod = object.__getattribute__(self, '_callmethod')
        return callmethod('__delattr__', (key,))


class ValueProxy(BaseProxy):
    _exposed_ = ('get', 'set')
    def get(self):
        return self._callmethod('get')
    def set(self, value):
        return self._callmethod('set', (value,))
    value = property(get, set)


BaseListProxy = MakeProxyType('BaseListProxy', (
1122 1123
    '__add__', '__contains__', '__delitem__', '__getitem__', '__len__',
    '__mul__', '__reversed__', '__rmul__', '__setitem__',
1124 1125
    'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove',
    'reverse', 'sort', '__imul__'
1126
    ))
1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143
class ListProxy(BaseListProxy):
    def __iadd__(self, value):
        self._callmethod('extend', (value,))
        return self
    def __imul__(self, value):
        self._callmethod('__imul__', (value,))
        return self


DictProxy = MakeProxyType('DictProxy', (
    '__contains__', '__delitem__', '__getitem__', '__len__',
    '__setitem__', 'clear', 'copy', 'get', 'has_key', 'items',
    'keys', 'pop', 'popitem', 'setdefault', 'update', 'values'
    ))


ArrayProxy = MakeProxyType('ArrayProxy', (
1144 1145
    '__len__', '__getitem__', '__setitem__'
    ))
1146 1147


1148
BasePoolProxy = MakeProxyType('PoolProxy', (
1149
    'apply', 'apply_async', 'close', 'imap', 'imap_unordered', 'join',
1150
    'map', 'map_async', 'starmap', 'starmap_async', 'terminate',
1151
    ))
1152
BasePoolProxy._method_to_typeid_ = {
1153 1154
    'apply_async': 'AsyncResult',
    'map_async': 'AsyncResult',
1155
    'starmap_async': 'AsyncResult',
1156 1157 1158
    'imap': 'Iterator',
    'imap_unordered': 'Iterator'
    }
1159 1160 1161 1162 1163
class PoolProxy(BasePoolProxy):
    def __enter__(self):
        return self
    def __exit__(self, exc_type, exc_val, exc_tb):
        self.terminate()
1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188

#
# Definition of SyncManager
#

class SyncManager(BaseManager):
    '''
    Subclass of `BaseManager` which supports a number of shared object types.

    The types registered are those intended for the synchronization
    of threads, plus `dict`, `list` and `Namespace`.

    The `multiprocessing.Manager()` function creates started instances of
    this class.
    '''

SyncManager.register('Queue', queue.Queue)
SyncManager.register('JoinableQueue', queue.Queue)
SyncManager.register('Event', threading.Event, EventProxy)
SyncManager.register('Lock', threading.Lock, AcquirerProxy)
SyncManager.register('RLock', threading.RLock, AcquirerProxy)
SyncManager.register('Semaphore', threading.Semaphore, AcquirerProxy)
SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore,
                     AcquirerProxy)
SyncManager.register('Condition', threading.Condition, ConditionProxy)
1189
SyncManager.register('Barrier', threading.Barrier, BarrierProxy)
1190
SyncManager.register('Pool', pool.Pool, PoolProxy)
1191 1192 1193 1194 1195 1196 1197 1198 1199
SyncManager.register('list', list, ListProxy)
SyncManager.register('dict', dict, DictProxy)
SyncManager.register('Value', Value, ValueProxy)
SyncManager.register('Array', Array, ArrayProxy)
SyncManager.register('Namespace', Namespace, NamespaceProxy)

# types returned by methods of PoolProxy
SyncManager.register('Iterator', proxytype=IteratorProxy, create_method=False)
SyncManager.register('AsyncResult', create_method=False)