Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def download_test_resources(args: Args):
# Try running the download pipeline
try:
# Get test resources dir
resources_dir = (
Path(__file__).parent.parent / "aicsimageio" / "tests" / "resources"
).resolve()
resources_dir.mkdir(exist_ok=True)
# Get quilt package
package = Package.browse(
"aicsimageio/test_resources",
"s3://aics-modeling-packages-test-resources",
top_hash=args.top_hash
)
# Download
package["resources"].fetch(resources_dir)
log.info(f"Completed package download.")
# Catch any exception
except Exception as e:
log.error("=============================================")
if args.debug:
log.error("\n\n" + traceback.format_exc())
log.error("=============================================")
type
pandas.DataFrame or geopandas.GeoDataFrame
2010 blocks as a geodataframe or as a dataframe with geometry
stored as well-known binary on the 'wkb' column.
"""
try: # if any of these aren't found, stream them insteead
from quilt3.data.census import blocks_2010
except ImportError:
warn(
"Unable to locate local census 2010 block data. Streaming instead.\n"
"If you plan to use census data repeatedly you can store it locally "
"with the data.store_blocks_2010 function for better performance"
)
try:
blocks_2010 = quilt3.Package.browse(
"census/blocks_2010", "s3://quilt-cgs"
)
except Timeout:
warn(
"Unable to locate local census data and unable to reach s3 bucket."
"You will be unable to use built-in data during this session. "
"If you need these data, please try downloading a local copy "
"with the data.store_blocks_2010 function, then restart your "
"python kernel and try again."
)
if isinstance(states, (str, int)):
states = [states]
blks = {}
for state in states:
gpu_id = args.gpu_id
n_images_to_download = args.n_imgs # more images the better
train_fraction = 0.75
image_save_dir = "{}/".format(os.getcwd())
model_save_dir = "{}/model/".format(os.getcwd())
prefs_save_path = "{}/prefs.json".format(model_save_dir)
data_save_path_train = "{}/image_list_train.csv".format(image_save_dir)
data_save_path_test = "{}/image_list_test.csv".format(image_save_dir)
if not os.path.exists(image_save_dir):
os.makedirs(image_save_dir)
aics_pipeline = quilt3.Package.browse(
"aics/pipeline_integrated_cell", registry="s3://allencell"
)
data_manifest = aics_pipeline["metadata.csv"]()
# THE ROWS OF THE MANIFEST CORRESPOND TO CELLS, WE TRIM DOWN TO UNIQUIE FOVS
unique_fov_indices = np.unique(data_manifest['FOVId'], return_index=True)[1]
data_manifest = data_manifest.iloc[unique_fov_indices]
# SELECT THE FIRST N_IMAGES_TO_DOWNLOAD
data_manifest = data_manifest.iloc[0:n_images_to_download]
image_source_paths = data_manifest["SourceReadPath"]
image_target_paths = [
"{}/{}".format(image_save_dir, image_source_path)
def chart_benchmarks(args: Args):
# Check save dir exists or create
args.save_dir.mkdir(parents=True, exist_ok=True)
# Get file
if args.benchmark_file is None:
benchmark_filepath = Path("benchmark_results.json")
p = Package.browse(
"aicsimageio/benchmarks", "s3://aics-modeling-packages-test-resources"
)
p["results.json"].fetch(benchmark_filepath)
else:
benchmark_filepath = args.benchmark_file
# Read results file
with open(benchmark_filepath, "r") as read_in:
all_results = json.load(read_in)
# Generate charts for each config
per_cluster_results = []
selected_cluster_results = []
for config_name, results in all_results.items():
results = pd.DataFrame(results)
results["config"] = config_name
def __init__(self):
"""Instantiate a new DataStore object."""
try: # if any of these aren't found, stream them insteead
from quilt3.data.census import tracts_cartographic, administrative
except ImportError:
warn(
"Unable to locate local census data. Streaming instead.\n"
"If you plan to use census data repeatedly you can store it locally "
"with the data.store_census function for better performance"
)
try:
tracts_cartographic = quilt3.Package.browse(
"census/tracts_cartographic", "s3://quilt-cgs"
)
administrative = quilt3.Package.browse(
"census/administrative", "s3://quilt-cgs"
)
except Timeout:
warn(
"Unable to locate local census data and unable to reach s3 bucket."
"You will be unable to use built-in data during this session. "
"If you need these data, please try downloading a local copy "
"with the data.store_census function, then restart your "
"python kernel and try again."
)
self.tracts_cartographic = tracts_cartographic
self.administrative = administrative
def __init__(self):
"""Instantiate a new DataStore object."""
try: # if any of these aren't found, stream them insteead
from quilt3.data.census import tracts_cartographic, administrative
except ImportError:
warn(
"Unable to locate local census data. Streaming instead.\n"
"If you plan to use census data repeatedly you can store it locally "
"with the data.store_census function for better performance"
)
try:
tracts_cartographic = quilt3.Package.browse(
"census/tracts_cartographic", "s3://quilt-cgs"
)
administrative = quilt3.Package.browse(
"census/administrative", "s3://quilt-cgs"
)
except Timeout:
warn(
"Unable to locate local census data and unable to reach s3 bucket."
"You will be unable to use built-in data during this session. "
"If you need these data, please try downloading a local copy "
"with the data.store_census function, then restart your "
"python kernel and try again."
)
self.tracts_cartographic = tracts_cartographic
self.administrative = administrative
def __init__(self):
"""Instantiate a new DataStore object."""
try: # if any of these aren't found, stream them insteead
from quilt3.data.census import tracts_cartographic, administrative
except ImportError:
warn(
"Unable to locate local census data. Streaming instead.\n"
"If you plan to use census data repeatedly you can store it locally "
"with the data.store_census function for better performance"
)
try:
tracts_cartographic = quilt3.Package.browse(
"census/tracts_cartographic", "s3://quilt-cgs"
)
administrative = quilt3.Package.browse(
"census/administrative", "s3://quilt-cgs"
)
except Timeout:
warn(
"Unable to locate local census data and unable to reach s3 bucket."
"You will be unable to use built-in data during this session. "
"If you need these data, please try downloading a local copy "
"with the data.store_census function, then restart your "
"python kernel and try again."
)
self.tracts_cartographic = tracts_cartographic
self.administrative = administrative
def __init__(self):
"""Instantiate a new DataStore object."""
try: # if any of these aren't found, stream them insteead
from quilt3.data.census import tracts_cartographic, administrative
except ImportError:
warn(
"Unable to locate local census data. Streaming instead.\n"
"If you plan to use census data repeatedly you can store it locally "
"with the data.store_census function for better performance"
)
try:
tracts_cartographic = quilt3.Package.browse(
"census/tracts_cartographic", "s3://quilt-cgs"
)
administrative = quilt3.Package.browse(
"census/administrative", "s3://quilt-cgs"
)
except Timeout:
warn(
"Unable to locate local census data and unable to reach s3 bucket."
"You will be unable to use built-in data during this session. "
"If you need these data, please try downloading a local copy "
"with the data.store_census function, then restart your "
"python kernel and try again."
)
self.tracts_cartographic = tracts_cartographic
self.administrative = administrative