Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_dataset_download_validations(self):
self.assertNotExistsTable(self.test_write_table)
df = load_geojson(self.test_geojson)
dataset = Dataset(df)
error_msg = 'You should provide a context and a table_name or query to download data.'
with self.assertRaises(ValueError, msg=error_msg):
dataset.download()
query = 'SELECT 1 as fakec'
dataset = Dataset(query, credentials=self.credentials)
dataset.upload(table_name=self.test_write_table)
dataset._table_name = 'non_used_table'
df = dataset.download()
self.assertEqual('fakec' in df.columns, True)
dataset = Dataset(self.test_write_table, credentials=self.credentials)
df = dataset.download()
self.assertEqual('fakec' in df.columns, True)
def test_dataset_write_polygons_dataset(self):
self.assertNotExistsTable(self.test_write_table)
from cartoframes.examples import read_brooklyn_poverty
df = read_brooklyn_poverty()
dataset = Dataset(df).upload(table_name=self.test_write_table, credentials=self.credentials)
self.test_write_table = dataset.table_name
query = 'SELECT cartodb_id FROM {} WHERE the_geom IS NOT NULL'.format(self.test_write_table)
result = self.sql_client.query(query, verbose=True)
self.assertEqual(result['total_rows'], 2049)
def test_isochrones_from_table_dataset_as_new_table(self):
self.skip(if_no_credits=True, if_no_credentials=True)
iso = Isolines(credentials=self.credentials)
df = pd.DataFrame(self.points, columns=['name', 'the_geom'])
table_name = self.get_test_table_name('isotb')
Dataset(df).upload(table_name=table_name, credentials=self.credentials)
ds = Dataset(table_name, credentials=self.credentials)
result_table_name = self.get_test_table_name('isotbr')
quota = self.used_quota(iso)
# Preview
result = iso.isochrones(ds, [100, 1000], mode='car', table_name=result_table_name, dry_run=True).metadata
self.assertEqual(result.get('required_quota'), 6)
self.assertEqual(self.used_quota(iso), quota)
# Isochrones
result = iso.isochrones(ds, [100, 1000], mode='car', table_name=result_table_name).data
self.assertTrue(isinstance(result, Dataset))
self.assertTrue(result.is_remote())
quota += 6
def test_dataset_download_and_upload(self):
self.assertNotExistsTable(self.test_write_table)
query = 'SELECT 1 as fakec'
dataset = Dataset(query, credentials=self.credentials)
dataset.upload(table_name=self.test_write_table)
dataset = Dataset(self.test_write_table, credentials=self.credentials)
df = dataset.download()
dataset = Dataset(df)
dataset.upload(table_name=self.test_write_table,
credentials=self.credentials,
if_exists=Dataset.IF_EXISTS_REPLACE)
def test_isochrones_from_query_dataset(self):
self.skip(if_no_credits=True, if_no_credentials=True)
iso = Isolines(credentials=self.credentials)
ds = Dataset(self.points_query(), credentials=self.credentials)
quota = self.used_quota(iso)
# Preview
result = iso.isochrones(ds, [100, 1000], mode='car', dry_run=True).metadata
self.assertEqual(result.get('required_quota'), 6)
self.assertEqual(self.used_quota(iso), quota)
# Isochrones
result = iso.isochrones(ds, [100, 1000], mode='car').data
self.assertTrue(isinstance(result, Dataset))
self.assertTrue(result.is_local())
quota += 6
self.assertEqual(self.used_quota(iso), quota)
result_columns = result.get_column_names()
self.assertTrue('the_geom' in result_columns)
def test_dataset_from_dataframe(self):
df = load_geojson(self.test_geojson)
dataset = Dataset(df)
self.assertIsInstance(dataset, Dataset)
self.assertIsNotNone(dataset.dataframe)
self.assertIsNone(dataset.table_name)
self.assertIsNone(dataset.credentials)
def tearDown(self):
"""restore to original state"""
tables = (self.test_write_table, )
sql_drop = 'DROP TABLE IF EXISTS {};'
for table in tables:
try:
Dataset(table, credentials=self.credentials).delete()
self.sql_client.query(sql_drop.format(table))
except CartoException:
warnings.warn('Error deleting tables')
StrategiesRegistry.instance = None
def test_isochrones_from_table_dataset(self):
self.skip(if_no_credits=True, if_no_credentials=True)
iso = Isolines(credentials=self.credentials)
df = pd.DataFrame(self.points, columns=['name', 'the_geom'])
table_name = self.get_test_table_name('isotb')
Dataset(df).upload(table_name=table_name, credentials=self.credentials)
ds = Dataset(table_name, credentials=self.credentials)
quota = self.used_quota(iso)
# Preview
result = iso.isochrones(ds, [100, 1000], mode='car', dry_run=True).metadata
self.assertEqual(result.get('required_quota'), 6)
self.assertEqual(self.used_quota(iso), quota)
# Isochrones
result = iso.isochrones(ds, [100, 1000], mode='car').data
self.assertTrue(isinstance(result, Dataset))
self.assertTrue(result.is_local())
quota += 6
self.assertEqual(self.used_quota(iso), quota)
result_columns = result.get_column_names()
def tearDown(self):
"""restore to original state"""
sql_drop = 'DROP TABLE IF EXISTS {};'
for table in self.test_tables:
try:
Dataset(table, credentials=self.credentials).delete()
self.sql_client.query(sql_drop.format(table))
except CartoException:
warnings.warn('Error deleting tables')
def read_nat(self, limit=None, **kwargs):
"""Historical homicide rates for the United States at the county level.
See the function :py:func:`read_nat
` for more information
Example:
.. code::
from cartoframes.examples import examples
df = examples.read_nat()
"""
return Dataset('nat', self._credentials).download(limit, **kwargs)