Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_DASH(hook, workers):
"""
Test DASH (Distributed Association Scan Hammer), i.e. distributed linear regression for genetics with SMPC
"""
bob = workers["bob"]
alice = workers["alice"]
james = workers["james"]
crypto_prov = sy.VirtualWorker(hook, id="crypto_prov")
hbc_worker = sy.VirtualWorker(hook, id="hbc_worker")
###### Simulate data ######
torch.manual_seed(0) # Truncation might not always work so we set the random seed
K = 2 # Number of permanent covariates
M = 5 # Number of transient covariates
# Alice
N1 = 100
y1 = torch.randn(N1).send(alice)
X1 = torch.randn(N1, M).send(alice)
C1 = torch.randn(N1, K).send(alice)
# Bob
N2 = 200
def test_section_3_securing_fl(hook):
"""This tests the Udacity course content found at
https://github.com/Udacity/private-ai
"""
import syft as sy
import torch as th
# hook = sy.TorchHook(th)
from torch import nn, optim
# create a couple workers
bob = sy.VirtualWorker(hook, id="bob_udacity_3")
alice = sy.VirtualWorker(hook, id="alice_udacity_3")
secure_worker = sy.VirtualWorker(hook, id="secure_worker_udacity_3")
bob.add_workers([alice, secure_worker])
alice.add_workers([bob, secure_worker])
secure_worker.add_workers([alice, bob])
# A Toy Dataset
data = th.tensor([[0, 0], [0, 1], [1, 0], [1, 1.0]], requires_grad=True)
target = th.tensor([[0], [0], [1], [1.0]], requires_grad=True)
# get pointers to training data on each worker by
# sending some training data to bob and alice
bobs_data = data[0:2].send(bob)
bobs_target = target[0:2].send(bob)
alices_data = data[2:].send(alice)
def test_fl_with_trainconfig(isolated_filesystem, start_remote_server_worker_only, hook):
os.chdir("advanced/Federated Learning with TrainConfig/")
notebook = "Introduction to TrainConfig.ipynb"
p_name = Path("examples/tutorials/advanced/Federated Learning with TrainConfig/")
not_excluded_notebooks.remove(p_name / notebook)
hook.local_worker.remove_worker_from_registry("alice")
kwargs = {"id": "alice", "host": "localhost", "port": 8777, "hook": hook}
data = torch.tensor([[0.0, 1.0], [1.0, 0.0], [1.0, 1.0], [0.0, 0.0]], requires_grad=True)
target = torch.tensor([[1.0], [1.0], [0.0], [0.0]], requires_grad=False)
dataset = sy.BaseDataset(data, target)
process_remote_worker = start_remote_server_worker_only(dataset=(dataset, "xor"), **kwargs)
res = pm.execute_notebook(notebook, "/dev/null", timeout=300)
assert isinstance(res, nbformat.notebooknode.NotebookNode)
process_remote_worker.terminate()
sy.VirtualWorker(id="alice", hook=hook, is_client_worker=False)
def setUp(self):
hook = sy.TorchHook(torch, verbose=True)
self.me = hook.local_worker
self.me.is_client_worker = True
instance_id = str(sy.ID_PROVIDER.pop())
bob = sy.VirtualWorker(id=f"bob{instance_id}", hook=hook, is_client_worker=False)
alice = sy.VirtualWorker(id=f"alice{instance_id}", hook=hook, is_client_worker=False)
james = sy.VirtualWorker(id=f"james{instance_id}", hook=hook, is_client_worker=False)
bob.add_workers([alice, james])
alice.add_workers([bob, james])
james.add_workers([bob, alice])
self.hook = hook
self.bob = bob
self.alice = alice
self.james = james
# A Toy Dataset
data = torch.tensor([[0, 0], [0, 1], [1, 0], [1, 1.0]], requires_grad=True)
target = torch.tensor([[0], [0], [1], [1.0]], requires_grad=True)
# get pointers to training data on each worker by
def test_create_already_existing_worker(hook):
# Shares tensor with bob
bob = sy.VirtualWorker(hook, "bob")
x = th.tensor([1, 2, 3]).send(bob)
# Recreates bob and shares a new tensor
bob = sy.VirtualWorker(hook, "bob")
y = th.tensor([2, 2, 2]).send(bob)
# Recreates bob and shares a new tensor
bob = sy.VirtualWorker(hook, "bob")
z = th.tensor([2, 2, 10]).send(bob)
# Both workers should be the same, so the following operation should be valid
_ = x + y * z
def force_detail(worker: AbstractWorker, worker_tuple: tuple) -> tuple:
worker_id, _objects, auto_add = worker_tuple
worker_id = sy.serde._detail(worker, worker_id)
result = sy.VirtualWorker(sy.hook, worker_id, auto_add=auto_add)
_objects = sy.serde._detail(worker, _objects)
result._objects = _objects
# make sure they weren't accidentally double registered
for _, obj in _objects.items():
if obj.id in worker._objects:
del worker._objects[obj.id]
return result
def _maybe_create_worker(worker_name: str = 'worker', virtual_worker_id: str = 'grid'):
worker = db.get(worker_name)
if worker is None:
worker = sy.VirtualWorker(hook, virtual_worker_id, auto_add=False)
print("\t \nCREATING NEW WORKER!!")
else:
worker = sy.serde.deserialize(worker)
print("\t \nFOUND OLD WORKER!! " + str(worker._objects.keys()))
return worker
def __init__(self, user: UserAuthentication, active=True):
""" Handle session with User Authentication.
Args:
user (UserAuthentication) : User instance.
active (bool) : Session state.
"""
self.id = uuid.uuid5(uuid.NAMESPACE_DNS, UserSession.NAMESPACE_DNS)
self.user = user # PyGrid UserAuthentication object
self.tensor_requests = list()
# If it is the first session of this user at this node.
if user.username not in hook.local_worker._known_workers:
node_name = user.username + "_" + str(local_worker.id)
self.node = sy.VirtualWorker(hook, id=node_name)
else:
self.node = hook.local_worker._known_workers[user.username]
self.active = True
elif isinstance(obj, np.ndarray):
return obj.ser(private=private_local, to_json=False)
# Iterables non json-serializable
elif isinstance(obj, (tuple, set, bytearray, range)):
key = get_serialized_key(obj)
return {key: [self.python_encode(i, private_local) for i in obj]}
# Slice
elif isinstance(obj, slice):
key = get_serialized_key(obj)
return {key: {"args": [obj.start, obj.stop, obj.step]}}
# Generator
elif isinstance(obj, types.GeneratorType):
logging.warning("Generator args can't be transmitted")
return []
# worker
elif isinstance(obj, (sy.SocketWorker, sy.VirtualWorker)):
return {"__worker__": obj.id}
# Else log the error
else:
raise ValueError("Unhandled type", type(obj))
# Ellipsis
elif isinstance(obj, type(...)):
return "..."
# np.array
elif isinstance(obj, np.ndarray):
return obj.ser(private=private_local, to_json=False)
# Slice
elif isinstance(obj, slice):
key = get_serialized_key(obj)
return {key: {"args": [obj.start, obj.stop, obj.step]}}
# Generator
elif isinstance(obj, types.GeneratorType):
logging.warning("Generator args can't be transmitted")
return []
# worker
elif isinstance(obj, (sy.SocketWorker, sy.VirtualWorker)):
return {"__worker__": obj.id}
# Else log the error
else:
raise ValueError("Unhandled type", type(obj))