Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
)
parser.add_argument(
"-o", "--output_name", type=str, help="Dataset output name", default="mnist",
)
args = parser.parse_args()
files = ["training", "testing"]
dicts = []
for f in files:
images, labels = load_mnist(f, path=args.dataset_path)
dicts += [{"images": images, "labels": labels}]
images = np.concatenate([d["images"] for d in dicts])
# images = images.reshape((len(images), 3, 32, 32))
labels = np.concatenate([np.array(d["labels"], dtype="int8") for d in dicts])
print(images.shape, labels.shape)
# Image.fromarray(images[1000]).save("./data/image.png")
images_t = tensor.from_array(images)
labels_t = tensor.from_array(labels)
# coarse_labels_t = tensor.from_array(coarse_labels)
ds = dataset.from_tensors({"data": images_t, "labels": labels_t})
ds.store(f"./data/generated/{args.output_name}")
parser.add_argument(
"-o", "--output_name", type=str, help="Dataset output name", default="mnist",
)
args = parser.parse_args()
files = ["training", "testing"]
dicts = []
for f in files:
images, labels = load_mnist(f, path=args.dataset_path)
dicts += [{"images": images, "labels": labels}]
images = np.concatenate([d["images"] for d in dicts])
# images = images.reshape((len(images), 3, 32, 32))
labels = np.concatenate([np.array(d["labels"], dtype="int8") for d in dicts])
print(images.shape, labels.shape)
# Image.fromarray(images[1000]).save("./data/image.png")
images_t = tensor.from_array(images)
labels_t = tensor.from_array(labels)
# coarse_labels_t = tensor.from_array(coarse_labels)
ds = dataset.from_tensors({"data": images_t, "labels": labels_t})
ds.store(f"./data/generated/{args.output_name}")
help="Dataset output name",
default="cifar10",
)
args = parser.parse_args()
files = sorted([f for f in os.listdir(args.dataset_path) if "_batch" in f])
dicts = []
for f in files:
with open(os.path.join(args.dataset_path, f), "rb") as fh:
dicts += [pickle.load(fh, encoding="bytes")]
print(dicts[-1].keys())
images = np.concatenate([d[b"data"] for d in dicts])
images = images.reshape((len(images), 3, 32, 32))
labels = np.concatenate([np.array(d[b"labels"], dtype="int16") for d in dicts])
print(images.shape, labels.shape)
Image.fromarray(images[1000].transpose(1, 2, 0)).save("./data/image.png")
images_t = tensor.from_array(images)
labels_t = tensor.from_array(labels)
ds = dataset.from_tensors({"data": images_t, "labels": labels_t})
ds.store(f"./data/generated/{args.output_name}")
dicts += [pickle.load(fh, encoding="bytes")]
print(dicts[-1].keys())
images = np.concatenate([d[b"data"] for d in dicts])
images = images.reshape((len(images), 3, 32, 32))
fine_labels = np.concatenate(
[np.array(d[b"fine_labels"], dtype="int16") for d in dicts]
)
coarse_labels = np.concatenate(
[np.array(d[b"coarse_labels"], dtype="int16") for d in dicts]
)
print(images.shape, fine_labels.shape, coarse_labels.shape)
Image.fromarray(images[1000].transpose(1, 2, 0)).save("./data/image.png")
images_t = tensor.from_array(images)
fine_labels_t = tensor.from_array(fine_labels)
coarse_labels_t = tensor.from_array(coarse_labels)
ds = dataset.from_tensors(
{
"data": images_t,
"fine_labels": fine_labels_t,
"coarse_labels": coarse_labels_t,
}
)
ds.store(f"./data/generated/{args.output_name}")
default="cifar10",
)
args = parser.parse_args()
files = sorted([f for f in os.listdir(args.dataset_path) if "_batch" in f])
dicts = []
for f in files:
with open(os.path.join(args.dataset_path, f), "rb") as fh:
dicts += [pickle.load(fh, encoding="bytes")]
print(dicts[-1].keys())
images = np.concatenate([d[b"data"] for d in dicts])
images = images.reshape((len(images), 3, 32, 32))
labels = np.concatenate([np.array(d[b"labels"], dtype="int16") for d in dicts])
print(images.shape, labels.shape)
Image.fromarray(images[1000].transpose(1, 2, 0)).save("./data/image.png")
images_t = tensor.from_array(images)
labels_t = tensor.from_array(labels)
ds = dataset.from_tensors({"data": images_t, "labels": labels_t})
ds.store(f"./data/generated/{args.output_name}")
for f in files:
with open(os.path.join(args.dataset_path, f), "rb") as fh:
dicts += [pickle.load(fh, encoding="bytes")]
print(dicts[-1].keys())
images = np.concatenate([d[b"data"] for d in dicts])
images = images.reshape((len(images), 3, 32, 32))
fine_labels = np.concatenate(
[np.array(d[b"fine_labels"], dtype="int16") for d in dicts]
)
coarse_labels = np.concatenate(
[np.array(d[b"coarse_labels"], dtype="int16") for d in dicts]
)
print(images.shape, fine_labels.shape, coarse_labels.shape)
Image.fromarray(images[1000].transpose(1, 2, 0)).save("./data/image.png")
images_t = tensor.from_array(images)
fine_labels_t = tensor.from_array(fine_labels)
coarse_labels_t = tensor.from_array(coarse_labels)
ds = dataset.from_tensors(
{
"data": images_t,
"fine_labels": fine_labels_t,
"coarse_labels": coarse_labels_t,
}
)
ds.store(f"./data/generated/{args.output_name}")
with open(os.path.join(args.dataset_path, f), "rb") as fh:
dicts += [pickle.load(fh, encoding="bytes")]
print(dicts[-1].keys())
images = np.concatenate([d[b"data"] for d in dicts])
images = images.reshape((len(images), 3, 32, 32))
fine_labels = np.concatenate(
[np.array(d[b"fine_labels"], dtype="int16") for d in dicts]
)
coarse_labels = np.concatenate(
[np.array(d[b"coarse_labels"], dtype="int16") for d in dicts]
)
print(images.shape, fine_labels.shape, coarse_labels.shape)
Image.fromarray(images[1000].transpose(1, 2, 0)).save("./data/image.png")
images_t = tensor.from_array(images)
fine_labels_t = tensor.from_array(fine_labels)
coarse_labels_t = tensor.from_array(coarse_labels)
ds = dataset.from_tensors(
{
"data": images_t,
"fine_labels": fine_labels_t,
"coarse_labels": coarse_labels_t,
}
)
ds.store(f"./data/generated/{args.output_name}")