Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Define a DataSet Schema
dsr = DataSetRequest()
dsr.name = 'Leonhard Euler Party'
dsr.description = 'Mathematician Guest List'
dsr.schema = Schema([Column(ColumnType.STRING, 'Friend')])
# Create a DataSet with the given Schema
dataset = datasets.create(dsr)
domo.logger.info("Created DataSet " + dataset['id'])
# Get a DataSets's metadata
retrieved_dataset = datasets.get(dataset['id'])
domo.logger.info("Retrieved DataSet " + retrieved_dataset['id'])
# List DataSets
dataset_list = list(datasets.list(sort=Sorting.NAME, limit=10))
domo.logger.info("Retrieved a list containing {} DataSet(s)".format(
len(dataset_list)))
# Update a DataSets's metadata
update = DataSetRequest()
update.name = 'Leonhard Euler Party - Update'
update.description = 'Mathematician Guest List - Update'
update.schema = Schema([Column(ColumnType.STRING, 'Friend'),
Column(ColumnType.STRING, 'Attending')])
updated_dataset = datasets.update(dataset['id'], update)
domo.logger.info("Updated DataSet {}: {}".format(updated_dataset['id'],
updated_dataset['name']))
# Import Data from a string
csv_upload = '"Pythagoras","FALSE"\n"Alan Turing","TRUE"\n' \
'"George Boole","TRUE"'
def list(self, sort=Sorting.DEFAULT, per_page=50, offset=0, limit=0):
# API uses pagination with a max of 50 per page
if per_page not in range(1, 51):
raise ValueError('per_page must be between 1 and 50 (inclusive)')
# Don't pull 50 values if user requests 10
if limit:
per_page = min(per_page, limit)
params = {
'sort': sort,
'limit': per_page,
'offset': offset,
}
dataset_count = 0
datasets = self._list(URL_BASE, params, DATA_SET_DESC)