Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_closed_is_false_after_init(client_id, client_secret, mock_box_session):
auth = OAuth2(client_id=client_id, client_secret=client_secret, session=mock_box_session)
assert auth.closed is False
'Content-Type': 'application/json',
'Digest': 'SHA={}'.format(base64.b64encode(sha1.digest()).decode('utf-8')),
'If-Match': '7',
}
mock_box_session.post.return_value.json.return_value = {
'entries': [
{
'type': file_type,
'id': file_id,
'description': 'This is a test description.',
},
],
}
created_file = test_upload_session.commit(content_sha1=sha1.digest(), parts=parts, file_attributes=file_attributes, etag=file_etag)
mock_box_session.post.assert_called_once_with(expected_url, data=json.dumps(expected_data), headers=expected_headers)
assert isinstance(created_file, File)
assert created_file.id == file_id
assert created_file.type == file_type
assert created_file.description == 'This is a test description.'
'offset': 0,
'total_count': 1,
'limit': 1000,
}
mock_box_session.post.return_value.json.return_value = {
'entries': [
{
'type': file_type,
'id': file_id,
},
],
}
created_file = test_upload_session.commit(content_sha1=sha1.digest())
mock_box_session.get.assert_called_once_with(expected_get_url, params={'offset': None})
mock_box_session.post.assert_called_once_with(expected_url, data=json.dumps(expected_data), headers=expected_headers)
assert isinstance(created_file, File)
assert created_file.id == file_id
assert created_file.type == file_type
def get_response(limit, marker):
mock_box_response = Mock(BoxResponse)
mock_network_response = Mock(DefaultNetworkResponse)
mock_box_response.network_response = mock_network_response
mock_json = {}
# The marker string should be of format: "marker_i", where i is the offset. Parse that out.
# If the marker is None, then begin paging from the start of the entries.
offset = 0
if marker is not None:
offset = int(marker.split('_')[1])
mock_json['entries'] = entries[offset:limit + offset]
# A next_marker is only returned if there are more pages left.
if (offset + limit) < len(entries):
mock_json['next_marker'] = 'marker_{0}'.format(offset + limit)
elif next_marker_value_for_last_page is not self.NO_NEXT_MARKER:
mock_json['next_marker'] = next_marker_value_for_last_page
@api_call
def api_call_method(self, *args, **kwargs):
return self, args, kwargs, api_call_result
def test_update_contents_does_preflight_check_if_specified(
test_file,
mock_file_path,
preflight_check,
file_size,
preflight_fails,
mock_box_session,
):
with patch.object(File, 'preflight_check', return_value=None):
kwargs = {'file_path': mock_file_path}
if preflight_check:
kwargs['preflight_check'] = preflight_check
kwargs['preflight_expected_size'] = file_size
if preflight_fails:
test_file.preflight_check.side_effect = BoxAPIException(400)
with pytest.raises(BoxAPIException):
test_file.update_contents(**kwargs)
else:
test_file.update_contents(**kwargs)
if preflight_check:
assert test_file.preflight_check.called_once_with(size=file_size)
if preflight_fails:
assert not mock_box_session.post.called
else:
assert mock_box_session.post.called
else:
assert not test_file.preflight_check.called
mock_new_upload_accelerator_url,
upload_using_accelerator_fails,
is_stream,
etag,
sha1,
if_match_sha1_header,
):
# pylint:disable=too-many-locals
file_description = 'Test File Description'
content_created_at = '1970-01-01T00:00:00+00:00'
content_modified_at = '1970-01-01T11:11:11+11:11'
additional_attributes = {'attr': 123}
expected_url = '{0}/files/content'.format(API.UPLOAD_URL)
if upload_using_accelerator:
if upload_using_accelerator_fails:
mock_box_session.options.side_effect = BoxAPIException(400)
else:
mock_box_session.options.return_value = mock_accelerator_response_for_new_uploads
expected_url = mock_new_upload_accelerator_url
mock_box_session.post.return_value = mock_upload_response
if is_stream:
mock_file_stream = BytesIO(mock_content_response.content)
new_file = test_folder.upload_stream(
mock_file_stream,
basename(mock_file_path),
file_description,
upload_using_accelerator=upload_using_accelerator,
content_created_at=content_created_at,
content_modified_at=content_modified_at,
additional_attributes=additional_attributes,
'sha1': third_sha1,
}
part_four = {
'part_id': '4DBC872D',
'offset': 6,
'size': 1,
'sha1': fourth_sha1,
}
third_response_mock.json.return_value = {
'part': part_three
}
fourth_response_mock.json.return_value = {
'part': part_four
}
chunked_uploader = ChunkedUploader(first_upload_session_mock_object, stream, file_size)
first_upload_session_mock_object.upload_part_bytes.side_effect = [uploaded_part_mock_object, BoxAPIException(502)]
second_chunked_uploader.upload_session = second_upload_session_mock_object
second_upload_session_mock_object.upload_part_bytes.side_effect = [third_response_mock, fourth_response_mock]
second_upload_session_mock_object.get_parts.return_value = {
'entries': [
{
'part_id': 'CFEB4BA9',
'offset': 0,
'size': 2,
'sha1': None,
},
{
'part_id': '4DBB872D',
'offset': 2,
'size': 2,
'sha1': None,
},
def test_get_item_info_for_missing_file(box_client, constructor):
with pytest.raises(BoxAPIException) as exc_info:
constructor(box_client, '1').delete()
assert exc_info.value.status == 404
def test_delete(test_metadata_template, mock_box_session):
expected_url = '{0}/metadata_templates/{1}/{2}/schema'.format(
API.BASE_API_URL,
test_metadata_template.scope,
test_metadata_template.template_key,
)
mock_box_session.delete.return_value.ok = True
result = test_metadata_template.delete()
mock_box_session.delete.assert_called_once_with(expected_url, expect_json_response=False, headers=None, params={})
assert result is True