Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def download_and_save_mesh(self, region, obj_path):
print(f"Downloading mesh data for {region}")
path = self.structures.loc[self.structures.name == region].file.values[0]
if region != 'root':
if not path or path is None:
print(f"Could not find mesh for {region}")
return None
if region == "root":
complete_url = f"{self._base_url}/{self._url_paths['brain']}"
req = request(complete_url).json()
fp = req['brain']['outline']['file'].replace("\\", "/")
url = f"{self._base_url}/{fp}"
else:
url = f"{self._base_url}/{path}".replace("\\", "/")
req = request(url)
# download render and save .obj
data = [float(v) for v in req.content.decode("-utf8").split("\n") if v]
data = np.array(data).reshape((-1, 3)) # x y z coordinates of each vertex
data = pd.DataFrame(dict(
x = data[:, 0],
y = data[:, 1],
z = data[:, 2]
))
def download_and_write_mesh(self, acronym, obj_path):
print(f"Downloading mesh data for {acronym}")
path = self.structures.loc[
self.structures.acronym == acronym
].obj_path.values[0]
url = f"{self._url_paths['data']}/{path}"
# download and write .obj
mesh_data = request(url).content.decode("utf-8").split("\n")
with open(obj_path, "w") as f:
for md in mesh_data:
f.write(f"{md}\n")
f.close()
# return the vtk actor
return load(obj_path)
"""
Given a gene_symbol it returns the list of ISH
experiments for this gene
:param gene_symbol: str, self.genes.gene_symbol
"""
if not isinstance(gene_symbol, str):
if isinstance(gene_symbol, int): # it's an ID, get symbol
gene_symbol = self.get_gene_symbol_by_id(gene_symbol)
if gene_symbol is None:
raise ValueError("Invalid gene_symbol argument")
else:
raise ValueError("Invalid gene_symbol argument")
url = self.gene_experiments_url.replace("-GENE_SYMBOL-", gene_symbol)
data = request(url).json()["msg"]
if not len(data):
print(f"No experiment found for gene {gene_symbol}")
return None
else:
return [d["id"] for d in data]
tag=structure["name"],
identifier=structure["id"],
parent=parent_id,
)
else:
tree.create_node(
tag=structure["name"], identifier=structure["id"],
)
if "children" not in structure.keys():
return
if structure["children"]:
for child in structure["children"]:
add_descendants_to_tree(tree, child, structure["id"])
structures_hierarchy = request(
self._url_paths["structures_tree"]
).json()
tree = Tree()
tree.create_node(
tag="root", identifier=0,
)
for supercategory in structures_hierarchy:
add_descendants_to_tree(tree, supercategory, 0)
self.structures_hierarchy = tree
def get_all_genes(self):
"""
Download metadata about all the genes available in the Allen gene expression dataset
"""
res = request(self.all_genes_url)
return pd.DataFrame(res.json()["msg"])
:param eids: list of integers with experiments IDs
:param streamlines_folder: str path to the folder where the JSON files should be saved, if None the default is used (Default value = None)
"""
if streamlines_folder is None:
streamlines_folder = self.streamlines_cache
if not isinstance(eids, (list, np.ndarray, tuple)): eids = [eids]
filepaths, data = [], []
for eid in tqdm(eids):
url = self.make_url_given_id(eid)
jsonpath = os.path.join(streamlines_folder, str(eid)+".json")
filepaths.append(jsonpath)
if not os.path.isfile(jsonpath):
response = request(url)
# Write the response content as a temporary compressed file
temp_path = os.path.join(streamlines_folder, "temp.gz")
with open(temp_path, "wb") as temp:
temp.write(response.content)
# Open in pandas and delete temp
url_data = pd.read_json(temp_path, lines=True, compression='gzip')
os.remove(temp_path)
# save json
url_data.to_json(jsonpath)
# append to lists and return
data.append(url_data)
else:
metadata['name'].append(subreg['name'])
metadata['parent'].append(region['name'])
metadata['children'].append([c['name'] for c in subreg['sub_regions']])
metadata['color'].append(subreg['color'])
metadata['file'].append(subreg['files']['file_3D'])
for subsubreg in subreg['sub_regions']:
metadata['name'].append(subsubreg['name'])
metadata['parent'].append([region['name'], subreg['name']])
metadata['children'].append(None)
metadata['color'].append(subsubreg['color'])
metadata['file'].append(subsubreg['files']['file_3D'])
# Get root
complete_url = f"{self._base_url}/{self._url_paths['brain']}"
brain = request(complete_url).json()['brain']['outline']
metadata['name'].append('root')
metadata['parent'].append(None)
metadata['children'].append([c['name'] for c in regions['brain_regions']])
metadata['color'].append(brain['color'])
metadata['file'].append(brain['file'])
# get root size
# dims = np.ceil([brain['width'], brain['height'], brain['depth']]).astype(np.int32)
self.volume = np.zeros((self.atlas_dims))
self._root_midpoint = [brain['center']['x'], brain['center']['y'], brain['center']['z']]
return pd.DataFrame(metadata)