How to use the hdfs.__main__._Progress function in hdfs

To help you get started, we’ve selected a few hdfs examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github mtth / hdfs / test / test_main.py View on Github external
def test_from_local_path(self):
    with temppath() as dpath:
      os.mkdir(dpath)
      fpath1 = osp.join(dpath, 'foo')
      with open(fpath1, 'w') as writer:
        writer.write('hey')
      os.mkdir(osp.join(dpath, 'bar'))
      fpath2 = osp.join(dpath, 'bar', 'baz')
      with open(fpath2, 'w') as writer:
        writer.write('hello')
      with temppath() as tpath:
        with open(tpath, 'w') as writer:
          progress = _Progress.from_local_path(dpath, writer=writer)
          eq_(progress._total_bytes, 8)
          eq_(progress._pending_files, 2)
github mtth / hdfs / test / test_main.py View on Github external
def test_single_file(self):
    with temppath() as tpath:
      with open(tpath, 'w') as writer:
        progress = _Progress(100, 1, writer=writer)
        progress('foo', 60)
        eq_(progress._data['foo'], 60)
        eq_(progress._pending_files, 0)
        eq_(progress._downloading_files, 1)
        progress('foo', 40)
        progress('foo', -1)
        eq_(progress._downloading_files, 0)
        eq_(progress._complete_files, 1)
github mtth / hdfs / hdfs / __main__.py View on Github external
"""
  args = docopt(__doc__, argv=argv, version=__version__)
  if not client:
    client = configure_client('hdfscli', args)
  elif args['--log']:
    raise HdfsError('Logging is only available when no client is specified.')
  hdfs_path = args['HDFS_PATH']
  local_path = args['LOCAL_PATH']
  n_threads = parse_arg(args, '--threads', int)
  force = args['--force']
  silent = args['--silent']
  if args['download']:
    chunk_size = 2 ** 16
    if local_path == '-':
      if not sys.stdout.isatty() and sys.stderr.isatty() and not silent:
        progress = _Progress.from_hdfs_path(client, hdfs_path)
      else:
        progress = None
      with client.read(
        hdfs_path,
        chunk_size=chunk_size,
        progress=progress,
      ) as reader:
        # https://stackoverflow.com/a/23932488/1062617
        stdout = getattr(sys.stdout, 'buffer', sys.stdout)
        for chunk in reader:
          stdout.write(chunk)
    else:
      if sys.stderr.isatty() and not silent:
        progress = _Progress.from_hdfs_path(client, hdfs_path)
      else:
        progress = None
github mtth / hdfs / hdfs / __main__.py View on Github external
hdfs_path,
        (line for line in sys.stdin), # Doesn't work with stdin.
        append=append,
        overwrite=force,
      )
    else:
      if append:
        # TODO: Add progress tracking here.
        if osp.isfile(local_path):
          with open(local_path) as reader:
            client.write(hdfs_path, reader, append=True)
        else:
          raise HdfsError('Can only append when uploading a single file.')
      else:
        if sys.stderr.isatty() and not silent:
          progress = _Progress.from_local_path(local_path)
        else:
          progress = None
        client.upload(
          hdfs_path,
          local_path,
          overwrite=force,
          n_threads=n_threads,
          progress=progress,
        )
  else:
    banner = (
      '\n'
      'Welcome to the interactive HDFS python shell.\n'
      'The HDFS client is available as `CLIENT`.\n'
    )
    namespace = {'CLIENT': client}