Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_string_credentials_user(self):
with open(self.user_file, 'r') as json_file:
project.initialize(_user=json_file.read())
service = get_service('oauth2', 'v2', 'user')
response = service.userinfo().get().execute()
self.assertIn('email', response)
self.helper_refresh()
def test_string_credentials_service(self):
with open(self.service_file, 'r') as json_file:
project.initialize(_service=json_file.read())
service = get_service('cloudresourcemanager', 'v1', 'service')
response = service.projects().list().execute()
self.assertIn('projects', response)
def account_create():
accounts = Account.objects.all()
if len(accounts) > 0:
account = accounts[0]
else:
project.initialize(_client=UI_CLIENT, _service=UI_SERVICE, _user=UI_USER)
credentials = get_credentials('user')
account = Account.objects.get_or_create_user(credentials, 'password')
return account
def test_dictionary_credentials_user(self):
with open(self.user_file, 'r') as json_file:
project.initialize(_user=json.load(json_file))
service = get_service('oauth2', 'v2', 'user')
response = service.userinfo().get().execute()
self.assertIn('email', response)
self.helper_refresh()
def test_remote_credentials_user(self):
project.initialize(_user=self.user_file)
credentials = get_credentials('user')
account = Account.objects.get_or_create_user(credentials, 'password')
clear_credentials_cache()
project.initialize(_user=account.get_credentials_path())
self.assertEqual(project.recipe['setup']['auth']['user'], account.get_credentials_path())
service = get_service('oauth2', 'v2', 'user')
response = service.userinfo().get().execute()
self.assertIn('email', response)
self.helper_refresh()
def object_delete(auth, path):
bucket, filename = path.split(':', 1)
service = get_service('storage', 'v1', auth)
return service.objects().delete(bucket=bucket, object=filename).execute()
def object_put(auth, path, data, mimetype='application/octet-stream'):
bucket, filename = path.split(':', 1)
service = get_service('storage', 'v1', auth)
media = MediaIoBaseUpload(data, mimetype=mimetype, chunksize=CHUNKSIZE, resumable=True)
request = service.objects().insert(bucket=bucket, name=filename, media_body=media)
response = None
errors = 0
while response is None:
error = None
try:
status, response = request.next_chunk()
if project.verbose and status: print "Uploaded %d%%." % int(status.progress() * 100)
except HttpError, e:
if e.resp.status < 500: raise
error = e
except (httplib2.HttpLib2Error, IOError), e:
error = e
def object_get_chunks(auth, path, chunksize=CHUNKSIZE, encoding=None):
bucket, filename = path.split(':', 1)
service = get_service('storage', 'v1', auth)
data = BytesIO()
request = service.objects().get_media(bucket=bucket, object=filename)
media = MediaIoBaseDownload(data, request, chunksize=chunksize)
retries = 0
done = False
while not done:
error = None
try:
progress, done = media.next_chunk()
if progress: print('Download %d%%' % int(progress.progress() * 100))
data.seek(0)
#yield data
yield data.read().decode(encoding) if encoding else data
data.seek(0)
# legacy deprecated ( do not use )
if 'path' in project.task: project.task['paths'] = [project.task['path']]
# loop all dt files to match pattern or match any pattern
print('PATHS', project.task['paths'])
for path in (project.task['paths'] or ['']):
print(path)
for dt_object in object_list(project.task['auth'], '%s:%s' % (project.task['bucket'], path), raw=True):
dt_size = dt_object['size']
dt_file = dt_object['name']
dt_time = dt_timestamp(dt_file)
dt_partition = dt_file.split('.', 1)[0]
if ((project.task.get('days') is None and project.task.get('hours') is None) or (dt_time > project.now - timedelta(days=project.task.get('days', 60), hours=project.task.get('hours', 0)))):
if not table_exists(project.task['to']['auth'], project.id, project.task['to']['dataset'], dt_partition):
dt_move(dt_object, dt_partition, jobs)
else:
if project.verbose: print ('DT Partition Exists:', dt_partition)
for count, job in enumerate(jobs):
print('Waiting For Job: %d of %d' % (count + 1, len(jobs)))
job_wait(project.task['to']['auth'], job)
def job_wait(auth, job):
if job:
if project.verbose: print('BIGQUERY JOB WAIT:', job['jobReference']['jobId'])
request = API_BigQuery(auth).jobs().get(
projectId=job['jobReference']['projectId'],
jobId=job['jobReference']['jobId']
)
while True:
sleep(5)
if project.verbose: print('.', end='')
sys.stdout.flush()
result = API_Retry(request)
if 'errors' in result['status']:
raise Exception('BigQuery Job Error: %s' % ' '.join([e['message'] for e in result['status']['errors']]))
elif 'errorResult' in result['status']:
raise Exception('BigQuery Job Error: %s' % result['status']['errorResult']['message'])
elif result['status']['state'] == 'DONE':
if project.verbose: print('JOB COMPLETE:', result['id'])
break