Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def kernel_submit(self):
self.info('kernel_submit updating dataset')
folder = 'submit'
os.makedirs(folder, exist_ok=True)
shutil.copy(self.file, os.path.join(folder, self.file_name))
config = api.read_config_file()
username = config['username']
title = f'{self.competition}-{self.kernel_suffix}-dataset'
dataset_meta = {
'title': f'{self.competition}-{self.kernel_suffix}-dataset',
'id': f'{username}/{title}',
'licenses': [{
'name': 'CC0-1.0'
}]
}
with open(f'{folder}/dataset-metadata.json', 'w') as f:
json.dump(dataset_meta, f)
res = api.dataset_status(dataset_meta['id'])
if res != 'ready':
res = api.dataset_create_new(folder=folder)
if res.status == 'error':
base_datadir = args.dir
if not os.path.isdir(base_datadir):
os.mkdir(base_datadir)
carvana_dir = os.path.join(base_datadir, 'carvana')
tmp_dir = os.path.join(base_datadir, 'tmp')
competition = 'carvana-image-masking-challenge'
files = [
'train.zip',
'train_masks.zip'
]
for filename in files:
kaggle.api.competitionDownloadFile(competition, filename, path=tmp_dir)
name, extension = os.path.splitext(filename)
filepath = os.path.join(tmp_dir, filename)
if extension == '.zip':
with zipfile.ZipFile(filepath,"r") as zip_ref:
zip_ref.extractall(carvana_dir)
os.remove(filepath)
else:
os.rename(filepath, os.path.join(carvana_dir, filename))
help=Help.param_config_value)
parser_config_set.set_defaults(func=api.set_config_value)
parser_config_unset = subparsers_config.add_parser(
'unset',
formatter_class=argparse.RawTextHelpFormatter,
help=Help.command_config_unset)
parser_config_unset._action_groups.pop()
parser_config_unset_required = parser_config_unset.add_argument_group(
'required arguments')
parser_config_unset_required.add_argument('-n',
'--name',
dest='name',
required=True,
help=Help.param_config_name)
parser_config_unset.set_defaults(func=api.unset_config_value)
parser_kernels_init._action_groups.append(parser_kernels_init_optional)
parser_kernels_init.set_defaults(func=api.kernels_initialize_cli)
# Kernels push
parser_kernels_push = subparsers_kernels.add_parser(
'push',
formatter_class=argparse.RawTextHelpFormatter,
help=Help.command_kernels_push)
parser_kernels_push_optional = parser_kernels_push._action_groups.pop()
parser_kernels_push_optional.add_argument('-p',
'--path',
dest='folder',
required=False,
help=Help.param_kernel_upfile)
parser_kernels_push._action_groups.append(parser_kernels_push_optional)
parser_kernels_push.set_defaults(func=api.kernels_push_cli)
# Kernels pull
parser_kernels_pull = subparsers_kernels.add_parser(
'pull',
formatter_class=argparse.RawTextHelpFormatter,
help=Help.command_kernels_pull)
parser_kernels_pull_optional = parser_kernels_pull._action_groups.pop()
parser_kernels_pull_optional.add_argument('kernel',
nargs='?',
default=None,
help=Help.param_kernel)
parser_kernels_pull_optional.add_argument('-k',
'--kernel',
dest='kernel',
required=False,
help=argparse.SUPPRESS)
default=1,
required=False,
help=Help.param_page)
parser_competitions_list_optional.add_argument('-s',
'--search',
dest='search',
required=False,
help=Help.param_search)
parser_competitions_list_optional.add_argument('-v',
'--csv',
dest='csv_display',
action='store_true',
help=Help.param_csv)
parser_competitions_list._action_groups.append(
parser_competitions_list_optional)
parser_competitions_list.set_defaults(func=api.competitions_list_cli)
# Competitions list files
parser_competitions_files = subparsers_competitions.add_parser(
'files',
formatter_class=argparse.RawTextHelpFormatter,
help=Help.command_competitions_files)
parser_competitions_files_optional = parser_competitions_files._action_groups.pop(
)
parser_competitions_files_optional.add_argument(
'competition', nargs='?', default=None, help=Help.param_competition)
parser_competitions_files_optional.add_argument('-c',
'--competition',
dest='competition_opt',
required=False,
help=argparse.SUPPRESS)
parser_competitions_files_optional.add_argument('-v',
""".replace('{self.competition}', self.competition).replace(
'{self.kernel_suffix}', self.kernel_suffix
).replace('{self.file_name}', self.file_name
).replace('{self.predict_column}', self.predict_column)
with open(f'{folder}/code.py', 'w') as f:
f.write(code)
self.info('kernel data created')
api.kernels_push(folder)
self.info('kernel is pushed. waiting for the end of the commit')
self.info(f'kernel address: https://www.kaggle.com/{username}/{slug}')
seconds = self.wait_seconds
for i in range(seconds):
response = api.kernel_status(username, slug)
if response['status'] == 'complete':
self.info(f'kernel has completed successfully. '
f'Please go to '
f'https://www.kaggle.com/{username}/{slug} '
f'and push the button "Submit to the competition"')
return
if response['status'] == 'error':
raise Exception(
f'Kernel is failed. Msg = {response["failureMessage"]}'
)
time.sleep(1)
self.wait_seconds -= 1
self.info(f'kernel is not finished after {seconds}')
required=False,
help=Help.param_wp)
parser_kernels_output_optional.add_argument('-o',
'--force',
dest='force',
action='store_true',
required=False,
help=Help.param_force)
parser_kernels_output_optional.add_argument('-q',
'--quiet',
dest='quiet',
action='store_true',
required=False,
help=Help.param_quiet)
parser_kernels_output._action_groups.append(parser_kernels_output_optional)
parser_kernels_output.set_defaults(func=api.kernels_output_cli)
# Kernels status
parser_kernels_status = subparsers_kernels.add_parser(
'status',
formatter_class=argparse.RawTextHelpFormatter,
help=Help.command_kernels_status)
parser_kernels_status_optional = parser_kernels_status._action_groups.pop()
parser_kernels_status_optional.add_argument('kernel',
nargs='?',
default=None,
help=Help.param_kernel)
parser_kernels_status_optional.add_argument('-k',
'--kernel',
dest='kernel',
required=False,
help=argparse.SUPPRESS)
parser_datasets_version_optional)
parser_datasets_version.set_defaults(func=api.dataset_create_version_cli)
# Datasets init
parser_datasets_init = subparsers_datasets.add_parser(
'init',
formatter_class=argparse.RawTextHelpFormatter,
help=Help.command_datasets_init)
parser_datasets_init_optional = parser_datasets_init._action_groups.pop()
parser_datasets_init_optional.add_argument('-p',
'--path',
dest='folder',
required=False,
help=Help.param_dataset_upfile)
parser_datasets_init._action_groups.append(parser_datasets_init_optional)
parser_datasets_init.set_defaults(func=api.dataset_initialize_cli)
# Datasets metadata
parser_datasets_metadata = subparsers_datasets.add_parser(
'metadata',
formatter_class=argparse.RawTextHelpFormatter,
help=Help.command_datasets_metadata)
parser_datasets_metadata_optional = parser_datasets_metadata._action_groups.pop(
)
parser_datasets_metadata_optional.add_argument('dataset',
nargs='?',
default=None,
help=Help.param_dataset)
parser_datasets_metadata_optional.add_argument('-d',
'--dataset',
dest='dataset',
required=False,
def _download(self):
if not os.path.exists(self.root+'/'):
os.makedirs(self.root+'/')
else:
return
import kaggle
kaggle.api.authenticate()
kaggle.api.dataset_download_files('drgilermo/face-images-with-marked-landmark-points', path=self.root, unzip=True)