Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
.. note::
Please make sure the uploaded python environment matches the platform that the cluster
is running on and that the python version must be 3.5 or higher.
.. note::
The python udf worker depends on Apache Beam (version == 2.15.0),
Pip (version >= 7.1.0) and SetupTools (version >= 37.0.0).
Please ensure that the specified environment meets the above requirements.
:param python_exec: The path of python interpreter.
:type python_exec: str
"""
self.get_configuration().set_string(DependencyManager.PYTHON_EXEC, python_exec)
def __init__(self, j_tenv, is_blink_planner, serializer=PickleSerializer()):
self._j_tenv = j_tenv
self._is_blink_planner = is_blink_planner
self._serializer = serializer
self._dependency_manager = DependencyManager(self.get_config().get_configuration(),
self._get_j_env())
self._dependency_manager.load_from_env(os.environ)
def get_python_executable(self):
"""
Gets the path of the python interpreter which is used to execute the python udf workers.
If no path is specified before, it will return a None value.
:return: The path of the python interpreter which is used to execute the python udf workers.
:rtype: str
"""
return self.get_configuration().get_string(DependencyManager.PYTHON_EXEC, None)