Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
signal.signal(signal.SIGINT, signal.SIG_DFL)
# read config if any
new_config = False
try:
config = ConfigObj(os.getenv('HOME') + '/.blueproximityrc',{'create_empty':False,'file_error':True,'configspec':conf_specs})
except:
new_config = True
if new_config:
config = ConfigObj(os.getenv('HOME') + '/.blueproximityrc',{'create_empty':True,'file_error':False,'configspec':conf_specs})
# next line fixes a problem with creating empty strings in default values for configobj
config['device_mac'] = ''
vdt = Validator()
config.validate(vdt, copy=True)
config.write()
p = Proximity(config)
p.start()
pGui = ProximityGUI(p,config,new_config)
# make GTK threadable
gtk.gdk.threads_init()
gtk.main()
def parse_changes_per_file_in(git_diff):
files_with_changes = {}
file_name = None
for line in git_diff.split("\n"):
# read ahead until we note the diff for a file:
new_file = maybe_new_module(line)
if new_file:
file_name=new_file
# one we have the diff of a file, accumulate the changes:
if file_name:
change = changed_line(line)
proximity.record_change_to(file_name, change,files_with_changes)
return files_with_changes
new_config = False
try:
config = ConfigObj(os.getenv('HOME') + '/.blueproximityrc',{'create_empty':False,'file_error':True,'configspec':conf_specs})
except:
new_config = True
if new_config:
config = ConfigObj(os.getenv('HOME') + '/.blueproximityrc',{'create_empty':True,'file_error':False,'configspec':conf_specs})
# next line fixes a problem with creating empty strings in default values for configobj
config['device_mac'] = ''
vdt = Validator()
config.validate(vdt, copy=True)
config.write()
p = Proximity(config)
p.start()
pGui = ProximityGUI(p,config,new_config)
# make GTK threadable
gtk.gdk.threads_init()
gtk.main()
shuffle=True, num_workers=args.workers)
testset = torchvision.datasets.CIFAR10(root='./data/cifar10', train=False,
download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.test_batch, pin_memory=True,
shuffle=False, num_workers=args.workers)
# Loading the Model
model = resnet(num_classes=num_classes,depth=110)
if True:
model = nn.DataParallel(model).cuda()
criterion_xent = nn.CrossEntropyLoss()
criterion_prox_1024 = Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_prox_256 = Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
criterion_conprox_1024 = Con_Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_conprox_256 = Con_Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
optimizer_model = torch.optim.SGD(model.parameters(), lr=args.lr_model, weight_decay=1e-04, momentum=0.9)
optimizer_prox_1024 = torch.optim.SGD(criterion_prox_1024.parameters(), lr=args.lr_prox)
optimizer_prox_256 = torch.optim.SGD(criterion_prox_256.parameters(), lr=args.lr_prox)
optimizer_conprox_1024 = torch.optim.SGD(criterion_conprox_1024.parameters(), lr=args.lr_conprox)
optimizer_conprox_256 = torch.optim.SGD(criterion_conprox_256.parameters(), lr=args.lr_conprox)
filename= 'Models_Softmax/CIFAR10_Softmax.pth.tar'
checkpoint = torch.load(filename)
shuffle=True, num_workers=args.workers)
testset = torchvision.datasets.CIFAR10(root='./data/cifar10', train=False,
download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.test_batch, pin_memory=True,
shuffle=False, num_workers=args.workers)
# Loading the Model
model = resnet(num_classes=num_classes,depth=110)
if True:
model = nn.DataParallel(model).cuda()
criterion_xent = nn.CrossEntropyLoss()
criterion_prox_1024 = Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_prox_256 = Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
criterion_conprox_1024 = Con_Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_conprox_256 = Con_Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
optimizer_model = torch.optim.SGD(model.parameters(), lr=args.lr_model, weight_decay=1e-04, momentum=0.9)
optimizer_prox_1024 = torch.optim.SGD(criterion_prox_1024.parameters(), lr=args.lr_prox)
optimizer_prox_256 = torch.optim.SGD(criterion_prox_256.parameters(), lr=args.lr_prox)
optimizer_conprox_1024 = torch.optim.SGD(criterion_conprox_1024.parameters(), lr=args.lr_conprox)
optimizer_conprox_256 = torch.optim.SGD(criterion_conprox_256.parameters(), lr=args.lr_conprox)
filename= 'Models_Softmax/CIFAR10_Softmax.pth.tar'
checkpoint = torch.load(filename)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.train_batch, pin_memory=True,
shuffle=True, num_workers=args.workers)
testset = torchvision.datasets.CIFAR10(root='./data/cifar10', train=False,
download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.test_batch, pin_memory=True,
shuffle=False, num_workers=args.workers)
# Loading the Model
model = resnet(num_classes=num_classes,depth=110)
if True:
model = nn.DataParallel(model).cuda()
criterion_xent = nn.CrossEntropyLoss()
criterion_prox_1024 = Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_prox_256 = Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
criterion_conprox_1024 = Con_Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_conprox_256 = Con_Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
optimizer_model = torch.optim.SGD(model.parameters(), lr=args.lr_model, weight_decay=1e-04, momentum=0.9)
optimizer_prox_1024 = torch.optim.SGD(criterion_prox_1024.parameters(), lr=args.lr_prox)
optimizer_prox_256 = torch.optim.SGD(criterion_prox_256.parameters(), lr=args.lr_prox)
optimizer_conprox_1024 = torch.optim.SGD(criterion_conprox_1024.parameters(), lr=args.lr_conprox)
optimizer_conprox_256 = torch.optim.SGD(criterion_conprox_256.parameters(), lr=args.lr_conprox)
filename= 'Models_Softmax/CIFAR10_Softmax.pth.tar'
checkpoint = torch.load(filename)
shuffle=True, num_workers=args.workers)
testset = torchvision.datasets.CIFAR10(root='./data/cifar10', train=False,
download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.test_batch, pin_memory=True,
shuffle=False, num_workers=args.workers)
# Loading the Model
model = resnet(num_classes=num_classes,depth=110)
if True:
model = nn.DataParallel(model).cuda()
criterion_xent = nn.CrossEntropyLoss()
criterion_prox_1024 = Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_prox_256 = Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
criterion_conprox_1024 = Con_Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_conprox_256 = Con_Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
optimizer_model = torch.optim.SGD(model.parameters(), lr=args.lr_model, weight_decay=1e-04, momentum=0.9)
optimizer_prox_1024 = torch.optim.SGD(criterion_prox_1024.parameters(), lr=args.lr_prox)
optimizer_prox_256 = torch.optim.SGD(criterion_prox_256.parameters(), lr=args.lr_prox)
optimizer_conprox_1024 = torch.optim.SGD(criterion_conprox_1024.parameters(), lr=args.lr_conprox)
optimizer_conprox_256 = torch.optim.SGD(criterion_conprox_256.parameters(), lr=args.lr_conprox)
filename= 'Models_Softmax/CIFAR10_Softmax.pth.tar'
checkpoint = torch.load(filename)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.train_batch, pin_memory=True,
shuffle=True, num_workers=args.workers)
testset = torchvision.datasets.CIFAR10(root='./data/cifar10', train=False,
download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.test_batch, pin_memory=True,
shuffle=False, num_workers=args.workers)
# Loading the Model
model = resnet(num_classes=num_classes,depth=110)
if True:
model = nn.DataParallel(model).cuda()
criterion_xent = nn.CrossEntropyLoss()
criterion_prox_1024 = Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_prox_256 = Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
criterion_conprox_1024 = Con_Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_conprox_256 = Con_Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
optimizer_model = torch.optim.SGD(model.parameters(), lr=args.lr_model, weight_decay=1e-04, momentum=0.9)
optimizer_prox_1024 = torch.optim.SGD(criterion_prox_1024.parameters(), lr=args.lr_prox)
optimizer_prox_256 = torch.optim.SGD(criterion_prox_256.parameters(), lr=args.lr_prox)
optimizer_conprox_1024 = torch.optim.SGD(criterion_conprox_1024.parameters(), lr=args.lr_conprox)
optimizer_conprox_256 = torch.optim.SGD(criterion_conprox_256.parameters(), lr=args.lr_conprox)
filename= 'Models_Softmax/CIFAR10_Softmax.pth.tar'
checkpoint = torch.load(filename)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.train_batch, pin_memory=True,
shuffle=True, num_workers=args.workers)
testset = torchvision.datasets.CIFAR10(root='./data/cifar10', train=False,
download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.test_batch, pin_memory=True,
shuffle=False, num_workers=args.workers)
# Loading the Model
model = resnet(num_classes=num_classes,depth=110)
if True:
model = nn.DataParallel(model).cuda()
criterion_xent = nn.CrossEntropyLoss()
criterion_prox_1024 = Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_prox_256 = Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
criterion_conprox_1024 = Con_Proximity(num_classes=num_classes, feat_dim=1024, use_gpu=use_gpu)
criterion_conprox_256 = Con_Proximity(num_classes=num_classes, feat_dim=256, use_gpu=use_gpu)
optimizer_model = torch.optim.SGD(model.parameters(), lr=args.lr_model, weight_decay=1e-04, momentum=0.9)
optimizer_prox_1024 = torch.optim.SGD(criterion_prox_1024.parameters(), lr=args.lr_prox)
optimizer_prox_256 = torch.optim.SGD(criterion_prox_256.parameters(), lr=args.lr_prox)
optimizer_conprox_1024 = torch.optim.SGD(criterion_conprox_1024.parameters(), lr=args.lr_conprox)
optimizer_conprox_256 = torch.optim.SGD(criterion_conprox_256.parameters(), lr=args.lr_conprox)
filename= 'Models_Softmax/CIFAR10_Softmax.pth.tar'
checkpoint = torch.load(filename)
def read_proximities_from(revision_range):
start_rev, end_rev = revision_range
revs = git_interactions.read_revs(start_rev, end_rev)
proximities = []
for i in range(len(revs) - 1):
first_revision = revs[i]
revision_to_compare = revs[i+1]
git_diff = git_interactions.read_diff_for(first_revision, revision_to_compare)
changes = parse_changes_per_file_in(git_diff)
proximities.append(proximity.calc_proximity(changes))
return proximities