Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
Returns all user-defined metadata for the specified directory.
:param str share_name:
Name of existing share.
:param str directory_name:
The path to the directory.
:param int timeout:
The timeout parameter is expressed in seconds.
:param str snapshot:
A string that represents the snapshot version, if applicable.
:return:
A dictionary representing the directory metadata name, value pairs.
:rtype: dict(str, str)
'''
_validate_not_none('share_name', share_name)
_validate_not_none('directory_name', directory_name)
request = HTTPRequest()
request.method = 'GET'
request.host_locations = self._get_host_locations()
request.path = _get_path(share_name, directory_name)
request.query = {
'restype': 'directory',
'comp': 'metadata',
'timeout': _int_to_str(timeout),
'sharesnapshot': _to_str(snapshot)
}
return self._perform_request(request, _parse_metadata)
def _delete_entity(partition_key, row_key, if_match):
'''
Constructs a delete entity request.
'''
_validate_not_none('if_match', if_match)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
request = HTTPRequest()
request.method = 'DELETE'
request.headers = {
_DEFAULT_ACCEPT_HEADER[0]: _DEFAULT_ACCEPT_HEADER[1],
'If-Match': _to_str(if_match)
}
return request
encoding, the string must be less than or equal to 64 bytes in size.
For a given blob, the length of the value specified for the blockid
parameter must be the same size for each block. Note that the Base64
string must be URL-encoded.
:param str source_content_md5:
If given, the service will calculate the MD5 hash of the block content and compare against this value.
:param str lease_id:
Required if the blob has an active lease.
:param int timeout:
The timeout parameter is expressed in seconds.
"""
_validate_encryption_unsupported(self.require_encryption, self.key_encryption_key)
_validate_not_none('container_name', container_name)
_validate_not_none('blob_name', blob_name)
_validate_not_none('copy_source_url', copy_source_url)
_validate_not_none('block_id', block_id)
request = HTTPRequest()
request.method = 'PUT'
request.host_locations = self._get_host_locations()
request.path = _get_path(container_name, blob_name)
request.query = {
'comp': 'block',
'blockid': _encode_base64(_to_str(block_id)),
'timeout': _int_to_str(timeout),
}
request.headers = {
'x-ms-lease-id': _to_str(lease_id),
'x-ms-copy-source': copy_source_url,
'x-ms-source-content-md5': source_content_md5,
}
_validate_and_format_range_headers(
file.
:param progress_callback:
Callback for progress with signature function(current, total) where
current is the number of bytes transfered so far and total is the
size of the file, or None if the total size is unknown.
:type progress_callback: func(current, total)
:param int max_connections:
Maximum number of parallel connections to use.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
'''
_validate_not_none('share_name', share_name)
_validate_not_none('file_name', file_name)
_validate_not_none('local_file_path', local_file_path)
count = path.getsize(local_file_path)
with open(local_file_path, 'rb') as stream:
self.create_file_from_stream(
share_name, directory_name, file_name, stream,
count, content_settings, metadata, validate_content, progress_callback,
max_connections, timeout)
if max_connections is greater than 1.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:param str snapshot:
A string that represents the snapshot version, if applicable.
:return: A File with properties and metadata.
:rtype: :class:`~azure.storage.file.models.File`
'''
_validate_not_none('share_name', share_name)
_validate_not_none('file_name', file_name)
_validate_not_none('stream', stream)
if end_range is not None:
_validate_not_none("start_range", start_range)
# the stream must be seekable if parallel download is required
if max_connections > 1:
if sys.version_info >= (3,) and not stream.seekable():
raise ValueError(_ERROR_PARALLEL_NOT_SEEKABLE)
else:
try:
stream.seek(stream.tell())
except (NotImplementedError, AttributeError):
raise ValueError(_ERROR_PARALLEL_NOT_SEEKABLE)
# The service only provides transactional MD5s for chunks under 4MB.
# If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first
# chunk so a transactional MD5 can be retrieved.
first_get_size = self.MAX_SINGLE_GET_SIZE if not validate_content else self.MAX_CHUNK_GET_SIZE
An ETag value, or the wildcard character (*). Specify this header
to perform the operation only if the resource's ETag does not match
the value specified. Specify the wildcard character (*) to perform
the operation only if the resource does not exist, and fail the
operation if it does exist.
:param int timeout:
The timeout parameter is expressed in seconds. This method may make
multiple calls to the Azure service and the timeout will apply to
each call individually.
:return: ETag and last modified properties for the Block Blob
:rtype: :class:`~azure.storage.blob.models.ResourceProperties`
'''
_validate_not_none('container_name', container_name)
_validate_not_none('blob_name', blob_name)
_validate_not_none('blob', blob)
_validate_not_none('index', index)
_validate_type_bytes('blob', blob)
if index < 0:
raise IndexError(_ERROR_VALUE_NEGATIVE.format('index'))
if count is None or count < 0:
count = len(blob) - index
stream = BytesIO(blob)
stream.seek(index)
return self.create_blob_from_stream(
container_name=container_name,
blob_name=blob_name,
stream=stream,
count=count,
When you establish a stored access policy on a table, it may take up to
30 seconds to take effect. During this interval, a shared access signature
that is associated with the stored access policy will throw an
:class:`AzureHttpError` until the access policy becomes active.
:param str table_name:
The name of an existing table.
:param signed_identifiers:
A dictionary of access policies to associate with the table. The
dictionary may contain up to 5 elements. An empty dictionary
will clear the access policies set on the service.
:type signed_identifiers: dict(str, :class:`~azure.storage.common.models.AccessPolicy`)
:param int timeout:
The server timeout, expressed in seconds.
'''
_validate_not_none('table_name', table_name)
_validate_access_policies(signed_identifiers)
request = HTTPRequest()
request.method = 'PUT'
request.host_locations = self._get_host_locations()
request.path = '/' + _to_str(table_name)
request.query = {
'comp': 'acl',
'timeout': _int_to_str(timeout),
}
request.body = _get_request_body(
_convert_signed_identifiers_to_xml(signed_identifiers))
self._perform_request(request)
:param str share_name:
Name of existing share.
:param str directory_name:
Name of directory to delete, including the path to the parent
directory.
:param bool fail_not_exist:
Specify whether to throw an exception when the directory doesn't
exist.
:param int timeout:
The timeout parameter is expressed in seconds.
:return: True if directory is deleted, False otherwise.
:rtype: bool
'''
_validate_not_none('share_name', share_name)
_validate_not_none('directory_name', directory_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host_locations = self._get_host_locations()
request.path = _get_path(share_name, directory_name)
request.query = {
'restype': 'directory',
'timeout': _int_to_str(timeout),
}
if not fail_not_exist:
try:
self._perform_request(request, expected_errors=[_RESOURCE_NOT_FOUND_ERROR_CODE])
return True
except AzureHttpError as ex:
_dont_fail_not_exist(ex)
return False
def _merge_entity(entity, if_match, require_encryption=False, key_encryption_key=None):
'''
Constructs a merge entity request.
'''
_validate_not_none('if_match', if_match)
_validate_entity(entity)
_validate_encryption_unsupported(require_encryption, key_encryption_key)
request = HTTPRequest()
request.method = 'MERGE'
request.headers = {
_DEFAULT_CONTENT_TYPE_HEADER[0]: _DEFAULT_CONTENT_TYPE_HEADER[1],
_DEFAULT_ACCEPT_HEADER[0]: _DEFAULT_ACCEPT_HEADER[1],
'If-Match': _to_str(if_match)
}
request.body = _get_request_body(_convert_entity_to_json(entity))
return request
next_marker field of a previous generator object if max_results was
specified and that generator has finished enumerating results. If
specified, this generator will begin returning results from the point
where the previous generator stopped.
:param int max_results:
Specifies the maximum number of handles to return,
including all directory elements. If the request does not specify
max_results or specifies a value greater than 5,000, the server will
return up to 5,000 items. Setting max_results to a value less than
or equal to zero results in error response code 400 (Bad Request).
:param int timeout:
The timeout parameter is expressed in seconds.
:param str snapshot:
A string that represents the snapshot version, if applicable.
"""
_validate_not_none('share_name', share_name)
request = HTTPRequest()
request.method = 'GET'
request.host_locations = self._get_host_locations()
request.path = _get_path(share_name, directory_name, file_name)
request.query = {
'comp': 'listhandles',
'marker': _to_str(marker),
'maxresults': _int_to_str(max_results),
'timeout': _int_to_str(timeout),
'sharesnapshot': _to_str(snapshot)
}
request.headers = {
'x-ms-recursive': _to_str(recursive)
}
return self._perform_request(request, _convert_xml_to_handles,