Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def superpose(self, **kwargs):
"""Superpose the ensemble onto the reference coordinates obtained by
:meth:`getCoords`.
"""
trans = kwargs.pop('trans', True)
if self._coords is None:
raise ValueError('coordinates are not set, use `setCoords`')
if self._confs is None or len(self._confs) == 0:
raise ValueError('conformations are not set, use `addCoordset`')
LOGGER.timeit('_prody_ensemble')
self._superpose(trans=trans) # trans kwarg is used by PDBEnsemble
LOGGER.report('Superposition completed in %.2f seconds.',
'_prody_ensemble')
query.append(('QUERY', sequence))
query.append(('CMD', 'Put'))
sleep = float(kwargs.pop('sleep', 2))
timeout = float(kwargs.pop('timeout', 120))
try:
import urllib.parse
urlencode = lambda data: bytes(urllib.parse.urlencode(data), 'utf-8')
except ImportError:
from urllib import urlencode
url = 'https://blast.ncbi.nlm.nih.gov/Blast.cgi'
data = urlencode(query)
LOGGER.timeit('_prody_blast')
LOGGER.info('Blast searching NCBI PDB database for "{0}..."'
.format(sequence[:5]))
handle = openURL(url, data=data, headers=headers)
html = handle.read()
index = html.find(b'RID =')
if index == -1:
raise Exception('NCBI did not return expected response.')
else:
last = html.find(b'\n', index)
rid = html[index + len('RID ='):last].strip()
index = html.find(b'RTOE =')
if index == -1:
rtoe = None # This is not used
else:
def getRecord(self, url=None, localFile=False, **kwargs):
if localFile:
dali_file = open(url, 'r')
data = dali_file.read()
dali_file.close()
else:
import requests
if url == None:
url = self._url
sleep = 2
timeout = kwargs.pop('timeout', 120)
LOGGER.timeit('_dali')
log_message = ''
try_error = 3
while True:
LOGGER.write('Connecting to Dali for search results...')
LOGGER.clear()
try:
# html = urllib2.urlopen(url).read()
html = requests.get(url).content
except:
try_error -= 1
if try_error >= 0:
LOGGER.sleep(2, '. Connection error happened. Trying to reconnect...')
continue
else:
# html = urllib2.urlopen(url).read()
html = requests.get(url).content
:type turbo: bool, default is **True**
:arg hinges: Identify hinge sites after modes are computed.
:type hinges: bool, default is **True**
"""
if self._kirchhoff is None:
raise ValueError('Kirchhoff matrix is not built or set')
if str(n_modes).lower() == 'all':
n_modes = None
assert n_modes is None or isinstance(n_modes, int) and n_modes > 0, \
'n_modes must be a positive integer'
assert isinstance(zeros, bool), 'zeros must be a boolean'
assert isinstance(turbo, bool), 'turbo must be a boolean'
self._clear()
LOGGER.timeit('_gnm_calc_modes')
values, vectors, vars = solveEig(self._kirchhoff, n_modes=n_modes, zeros=zeros,
turbo=turbo, is3d=False)
self._eigvals = values
self._array = vectors
self._vars = vars
self._trace = self._vars.sum()
self._n_modes = len(self._eigvals)
if hinges:
self.calcHinges()
LOGGER.report('{0} modes were calculated in %.2fs.'
.format(self._n_modes), label='_gnm_calc_modes')
elif len(chain) == 0:
raise ValueError('chain must not be an empty string')
title_suffix = '_' + chain + title_suffix
if 'ag' in kwargs:
ag = kwargs['ag']
if not isinstance(ag, AtomGroup):
raise TypeError('ag must be an AtomGroup instance')
n_csets = ag.numCoordsets()
else:
ag = AtomGroup(title + title_suffix)
n_csets = 0
pqr = openFile(filename, 'rt')
lines = pqr.readlines()
pqr.close()
LOGGER.timeit()
ag = _parsePDBLines(ag, lines, split=0, model=1, chain=chain,
subset=subset, altloc_torf=False, format='pqr')
if ag.numAtoms() > 0:
LOGGER.report('{0} atoms and {1} coordinate sets were '
'parsed in %.2fs.'.format(ag.numAtoms(),
ag.numCoordsets() - n_csets))
return ag
else:
return None
* **X** (Xaa) count is allocated to the twenty standard amino acids
* Joint probability of observing a pair of ambiguous amino acids is
allocated to all potential combinations, e.g. probability of **XX**
is allocated to 400 combinations of standard amino acids, similarly
probability of **XB** is allocated to 40 combinations of *D* and *N*
with the standard amino acids.
Selenocysteine (**U**, Sec) and pyrrolysine (**O**, Pyl) are considered
as distinct amino acids. When *ambiguity* is set **False**, all alphabet
characters as considered as distinct types. All non-alphabet characters
are considered as gaps.
"""
msa = getMSA(msa)
from .msatools import msapsicov
LOGGER.timeit('_psicov')
length = msa.shape[1]
pc = zeros((length, length), float)
pc = msapsicov(msa, pc, turbo=bool(turbo))
LOGGER.report('PC matrix was calculated in %.2fs.', '_psicov')
return pc
:type zeros: bool, default is **True**
:arg turbo: Use a memory intensive, but faster way to calculate modes.
:type turbo: bool, default is **True**
"""
if self._hessian is None:
raise ValueError('Hessian matrix is not built or set')
if str(n_modes).lower() == 'all':
n_modes = None
assert n_modes is None or isinstance(n_modes, int) and n_modes > 0, \
'n_modes must be a positive integer'
assert isinstance(zeros, bool), 'zeros must be a boolean'
assert isinstance(turbo, bool), 'turbo must be a boolean'
self._clear()
LOGGER.timeit('_anm_calc_modes')
values, vectors, vars = solveEig(self._hessian, n_modes=n_modes, zeros=zeros,
turbo=turbo, is3d=True)
self._eigvals = values
self._array = vectors
self._vars = vars
self._trace = self._vars.sum()
self._n_modes = len(self._eigvals)
LOGGER.report('{0} modes were calculated in %.2fs.'
.format(self._n_modes), label='_anm_calc_modes')
try:
import urllib.parse
urlencode = lambda data: bytes(urllib.parse.urlencode(data), 'utf-8')
except ImportError:
from urllib import urlencode
sleep = float(kwargs.pop('sleep', 2))
timeout = float(kwargs.pop('timeout', 120))
data = urlencode(query)
# submit the job
base_url = 'http://www.ebi.ac.uk/Tools/services/rest/psiblast/'
url = base_url + 'run/'
LOGGER.timeit('_prody_psi-blast')
if cycle == 0:
LOGGER.info('PSI-Blast searching PDB database for "{0}..."'
.format(sequence[:5]))
else:
LOGGER.info('PSI-Blast searching PDB database, cycle={0}'
.format(cycle))
handle = openURL(url, data=data, headers=headers)
job_id = handle.read()
handle.close()
# check the status
url = base_url + 'status/' + job_id
handle = openURL(url)
status = handle.read()
handle.close()
def superpose(self, **kwargs):
"""Superpose the ensemble onto the reference coordinates.
:arg ref: index of the reference coordinate. If **None**, the average
coordinate will be assumed as the reference. Default is **None**
:type ref: int
"""
ref = kwargs.pop('ref', None)
if self._coords is None:
raise ValueError('coordinates are not set, use `setCoords`')
if self._confs is None or len(self._confs) == 0:
raise ValueError('conformations are not set, use `addCoordset`')
LOGGER.timeit('_prody_ensemble')
self._superpose(ref=ref) # trans kwarg is used by PDBEnsemble
LOGGER.report('Superposition completed in %.2f seconds.',
'_prody_ensemble')