Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
n_csets = 0
if model != 0:
LOGGER.timeit()
try:
lines = stream.readlines()
except AttributeError as err:
try:
lines = stream.read().split('\n')
except AttributeError:
raise err
if not len(lines):
raise ValueError('empty PDB file or stream')
ag = _parseCIFLines(ag, lines, model, chain, subset, altloc)
if ag.numAtoms() > 0:
LOGGER.report('{0} atoms and {1} coordinate set(s) were '
'parsed in %.2fs.'.format(ag.numAtoms(),
ag.numCoordsets() - n_csets))
else:
ag = None
LOGGER.warn('Atomic data could not be parsed, please '
'check the input file.')
return ag
title_suffix = kwargs.get('title_suffix','')
atomgroup = AtomGroup(str(kwargs.get('title', 'Unknown')) + title_suffix)
atomgroup._n_atoms = n_nodes
if make_nodes:
LOGGER.info('Building coordinates from electron density map. This may take a while.')
LOGGER.timeit()
if map:
emd, atomgroup = _parseEMDLines(atomgroup, stream, cutoff=cutoff, n_nodes=n_nodes, \
num_iter=num_iter, map=map, make_nodes=make_nodes)
else:
atomgroup = _parseEMDLines(atomgroup, stream, cutoff=cutoff, n_nodes=n_nodes, \
num_iter=num_iter, map=map, make_nodes=make_nodes)
LOGGER.report('{0} atoms and {1} coordinate sets were '
'parsed in %.2fs.'.format(atomgroup.numAtoms(), atomgroup.numCoordsets()))
else:
emd = _parseEMDLines(atomgroup, stream, cutoff=cutoff, n_nodes=n_nodes, \
num_iter=num_iter, map=map, make_nodes=make_nodes)
if make_nodes:
if map:
return emd, atomgroup
else:
return atomgroup
else:
return emd
log_message = '(Dali search is queued)...'
elif html.find('Status: Running') > -1:
log_message = '(Dali search is running)...'
elif html.find('Your job') == -1 and html.find('.txt') > -1:
break
elif html.find('ERROR:') > -1:
LOGGER.warn(': Dali search reported an ERROR!')
return False
sleep = 20 if int(sleep * 1.5) >= 20 else int(sleep * 1.5)
if LOGGER.timing('_dali') > timeout:
LOGGER.warn(': Dali search has timed out. \nThe results can be obtained later using the getRecord() method.')
return False
LOGGER.sleep(int(sleep), 'to reconnect to Dali '+log_message)
LOGGER.clear()
LOGGER.clear()
LOGGER.report('Dali results were fetched in %.1fs.', '_dali')
lines = html.strip().split('\n')
file_name = re.search('=.+-90\\.txt', html).group()[1:]
file_name = file_name[:-7]
# LOGGER.info(url+file_name+self._subset+'.txt')
# data = urllib2.urlopen(url+file_name+self._subset+'.txt').read()
data = requests.get(url+file_name+self._subset+'.txt').content
if PY3K:
data = data.decode()
localfolder = kwargs.pop('localfolder', '.')
if file_name.lower().startswith('s001'):
temp_name = self._pdbId + self._chain
else:
temp_name = file_name
temp_name += self._subset + '_dali.txt'
if localfolder != '.' and not os.path.exists(localfolder):
chi_list.append(chi)
frames_list.append(j)
if(chi < max_chi):
max_chi = chi
mod_num = i
writePDB(out_pdb_file, calphas)
# extendModel(calphas, 'calphas', protein)
# writePDB('best_model.pdb', protein)
#Reset coordinates to the original values
calphas.setCoords(origCoords)
sys.stdout.write('#')
sys.stdout.flush()
sys.stdout.write("\n")
LOGGER.report('SAXS profile calculations were performed in %2fs.', '_intplt_mode')
return chi_list, frames_list
res_j33 = res_j3+3
super_element = np.outer(i2j, i2j) * (- g / dist2)
hessian[res_i3:res_i33, res_j3:res_j33] = super_element
hessian[res_j3:res_j33, res_i3:res_i33] = super_element
hessian[res_i3:res_i33, res_i3:res_i33] = \
hessian[res_i3:res_i33, res_i3:res_i33] - super_element
hessian[res_j3:res_j33, res_j3:res_j33] = \
hessian[res_j3:res_j33, res_j3:res_j33] - super_element
# hessian updates
from .bbenmtools import buildhessian
buildhessian(coords, hessian, natoms,
float(cutoff), float(gamma),)
LOGGER.report('Hessian was built in %.2fs.', label='_bbenm')
LOGGER.debug('Retrieving Pfam search results: ' + url)
xml = None
while LOGGER.timing('_pfam') < timeout:
try:
xml = openURL(url, timeout=timeout).read()
except Exception:
pass
else:
if xml not in ['PEND','RUN']:
break
if not xml:
raise IOError('Pfam search timed out or failed to parse results '
'XML, check URL: ' + url)
else:
LOGGER.report('Pfam search completed in %.2fs.', '_pfam')
if xml.find(b'There was a system error on your last request.') > 0:
LOGGER.warn('No Pfam matches found for: ' + seq)
return None
try:
root = ET.XML(xml)
except Exception as err:
raise ValueError('failed to parse results XML, check URL: ' + url)
if len(seq) >= MINSEQLEN:
try:
xml_matches = root[0][0][0][0]
except IndexError:
raise ValueError('failed to parse results XML, check URL: ' + url)
else:
membrane = array(membrane)
if len(membrane) == 0:
self._membrane = None
LOGGER.warn('no membrane is built. The protein should be transformed to the correct origin as in OPM')
return coords
else:
self._membrane = AtomGroup(title="Membrane")
self._membrane.setCoords(membrane)
self._membrane.setResnums(range(atm))
self._membrane.setResnames(["NE1" for i in range(atm)])
self._membrane.setChids(["Q" for i in range(atm)])
self._membrane.setElements(["Q1" for i in range(atm)])
self._membrane.setNames(["Q1" for i in range(atm)])
LOGGER.report('Membrane was built in %2.fs.', label='_membrane')
coords = self._combineMembraneProtein(atoms)
return coords
j += i_p1
g = gamma(dist2, i, j)
res_j3 = j*3
res_j33 = res_j3+3
super_element = np.outer(i2j, i2j) * (- g / dist2)
total_hessian[res_i3:res_i33, res_j3:res_j33] = super_element
total_hessian[res_j3:res_j33, res_i3:res_i33] = super_element
total_hessian[res_i3:res_i33, res_i3:res_i33] = total_hessian[res_i3:res_i33, res_i3:res_i33] - super_element
total_hessian[res_j3:res_j33, res_j3:res_j33] = total_hessian[res_j3:res_j33, res_j3:res_j33] - super_element
ss = total_hessian[:natoms*3, :natoms*3]
so = total_hessian[:natoms*3, natoms*3:]
os = total_hessian[natoms*3:,:natoms*3]
oo = total_hessian[natoms*3:, natoms*3:]
self._hessian = ss - np.dot(so, np.dot(linalg.inv(oo), os))
LOGGER.report('Hessian was built in %.2fs.', label='_exanm')
self._dof = self._hessian.shape[0]
coord_I=coords[pairInd_I]
for j in xrange (i+1, numCalphas):
pairInd_J=j
coord_J=coords[pairInd_J]
ind_3j=pairInd_J*3
R_ij_sup_0=[(coord_J[0]-coord_I[0]), (coord_J[1]-coord_I[1]), (coord_J[2]-coord_I[2])]
result=0.0
R_ij_sup_0_normalized_vec=R_ij_sup_0/np.linalg.norm(R_ij_sup_0)
calcSM(numCalphas, n_modes, ind_3i, ind_3j, R_ij_sup_0_normalized_vec, inv_sqrt_eigvals, eigvals, eigvecs_flat, result)
stiffness_matrix[i][j]=result
stiffness_matrix[j][i]=result
LOGGER.report('Stiffness matrix calculated in %.2lfs.', label='_sm')
return stiffness_matrix