Skip to content

Commit

Permalink
bindings/python/cntk/distributed.py: dynamic load for libmpi.so.12, s…
Browse files Browse the repository at this point in the history
…ame as CNTK binary links against
  • Loading branch information
mahilleb-msft committed Nov 3, 2016
1 parent a60692c commit b8b93d7
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions bindings/python/cntk/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@
from . import trainer
from .utils import typemap

# Preload libmpi.so for non-Windows platform to work around MPI_Init failure bug
# Preload libmpi.so.12 for non-Windows platform to work around MPI_Init failure bug
# https://xrunhprof.wordpress.com/2014/11/04/an-openmpi-python-and-dlopen-issue/
# If other OS has similar OpenMPI MPI_Init failure, add dll load to global here
import platform
import ctypes
if platform.system() == 'Linux':
ctypes.CDLL("libmpi.so", mode=ctypes.RTLD_GLOBAL)
ctypes.CDLL("libmpi.so.12", mode=ctypes.RTLD_GLOBAL)

__doc__= '''\
Distributed trainers manage trainers in distributed environment.
Expand Down Expand Up @@ -118,4 +118,4 @@ def data_parallel_distributed_trainer(communicator, use_async_buffered_parameter
if (isinstance(communicator, QuantizedCommunicator)):
return cntk_py.create_quantized_data_parallel_distributed_trainer(communicator, use_async_buffered_parameter_update)
else:
return cntk_py.create_data_parallel_distributed_trainer(communicator, use_async_buffered_parameter_update)
return cntk_py.create_data_parallel_distributed_trainer(communicator, use_async_buffered_parameter_update)

0 comments on commit b8b93d7

Please sign in to comment.