Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_data_returned_are_data_objects(self):
results = Data.all(
params={'database_code': 'NSE', 'dataset_code': 'OIL'})
self.assertEqual(len(results), 4)
for result in results:
self.assertIsInstance(result, Data)
def test_get_merged_dataset_data_returns_correct_types(self):
data = MergedDataset(
[('NSE/OIL', {'column_index': [1, 2]}),
('WIKI/AAPL', {'column_index': [1]}),
('WIKI/MSFT')]).data()
self.assertIsInstance(data, MergedDataList)
self.assertIsInstance(data[0], Data)
def setUpClass(cls):
cls.expected_column_names = [six.u('Date'), six.u('column.1'),
six.u('column.2'), six.u('column.3')]
cls.data_object = Data(['2015-07-15', 440.0, 2, 3],
meta={'column_names': cls.expected_column_names})
def test_data_returned_are_data_objects(self):
results = Data.all(
params={'database_code': 'NSE', 'dataset_code': 'OIL'})
self.assertEqual(len(results), 4)
for result in results:
self.assertIsInstance(result, Data)
def setUpClass(cls):
cls.expected_column_names = [six.u('per_end_date'),
six.u('ticker'),
six.u('tot_oper_exp')]
cls.expected_column_types = [six.u('Date'),
six.u('String'),
six.u('String')]
cls.data_object = Data(['2015-07-11', 'AAPL', 440.0],
meta={'columns': cls.expected_column_names,
'column_types': cls.expected_column_types})
httpretty.reset()
httpretty.enable()
unit_test.dataset_data = {'dataset_data': DatasetDataFactory.build()}
# mock out calls with column_index query param
# NOTE: this will always return 'column.1' as the column name
single_col_data = DatasetDataFactory.build(column_names=[six.u('Date'), six.u('column.1')],
data=[['2015-07-11', 444.3], ['2015-07-13', 433.3],
['2015-07-14', 437.5], ['2015-07-15', 440.0]])
unit_test.single_dataset_data = {'dataset_data': single_col_data}
dataset_data = DatasetDataFactory.build()
d_values = dataset_data.pop('data')
d_metadata = dataset_data
unit_test.data_list_obj = DataList(Data, d_values, d_metadata)
unit_test.nse_oil = {'dataset': DatasetFactory.build(
database_code='NSE', dataset_code='OIL')}
unit_test.wiki_aapl = {'dataset': DatasetFactory.build(
database_code='WIKI', dataset_code='AAPL')}
unit_test.wiki_msft = {'dataset': DatasetFactory.build(
database_code='WIKI', dataset_code='MSFT',
newest_available_date='2015-07-30', oldest_available_date='2013-01-01')}
unit_test.single_col = {'dataset': DatasetFactory.build(
database_code='SINGLE', dataset_code='COLUMN',
newest_available_date='2015-07-30', oldest_available_date='2013-01-01')}
unit_test.oil_obj = Dataset('NSE/OIL', unit_test.nse_oil['dataset'])
def data(self, **options):
# handle_not_found_error if set to True will add an empty DataFrame
# for a non-existent dataset instead of raising an error
handle_not_found_error = options.pop('handle_not_found_error', False)
handle_column_not_found = options.pop('handle_column_not_found', False)
# default order to ascending, and respect whatever user passes in
params = {
'database_code': self.database_code,
'dataset_code': self.dataset_code,
'order': 'asc'
}
updated_options = Util.merge_options('params', params, **options)
try:
return Data.all(**updated_options)
except NotFoundError:
if handle_not_found_error:
return DataList(Data, [], {'column_names': [six.u('None'), six.u('Not Found')]})
raise
except ColumnNotFound:
if handle_column_not_found:
return DataList(Data, [], {'column_names': [six.u('None'), six.u('Not Found')]})
raise
for index, data_frame in enumerate(data_frames):
metadata = self.__dataset_objects__()[index]
# use code to prevent metadata api call
data_frame.rename(
columns=lambda x: self._rename_columns(metadata.code, x), inplace=True)
merged_data_frame = pd.merge(
merged_data_frame, data_frame, right_index=True, left_index=True, how='outer')
merged_data_metadata = self._build_data_meta(dataset_data_list, merged_data_frame)
# check if descending was explicitly set
# if set we need to sort in descending order
# since panda merged dataframe will
# by default sort everything in ascending
return MergedDataList(
Data, merged_data_frame, merged_data_metadata,
ascending=self._order_is_ascending(**options))