Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
project.task['accounts'] = [project.task['partner_id']]
# create entities
for entity in project.task['entities']:
if project.verbose: print 'ENTITY:', entity
# write public files only once
if entity in PUBLIC_FILES:
path = 'gdbm-public:entity/%s.0.%s.json' % (project.date.strftime('%Y%m%d'), entity)
schema = Entity_Schema_Lookup[entity]
move_entity(project, path, entity, schema, 'WRITE_TRUNCATE')
# supports multiple partners, first one resets table, others append
else:
disposition = 'WRITE_TRUNCATE'
for account in get_rows('user', project.task['partners']):
#for account in project.task['accounts']:
# if advertiser given do not run it ( SAFETY )
if ':' in str(account):
print 'WARNING: Skipping advertiser: ', account
continue
if project.verbose: print 'PARTNER:', account
path = 'gdbm-%s:entity/%s.0.%s.json' % (account, project.date.strftime('%Y%m%d'), entity)
schema = Entity_Schema_Lookup[entity]
move_entity(project, path, entity, schema, disposition)
disposition = 'WRITE_APPEND'
# write data if specified
if 'write' in project.task:
rows = get_rows(project.task['auth'], project.task['write'])
sheets_write(
project.task['auth'],
project.task['sheet'],
project.task['tab'],
project.task['range'],
rows,
append = False
)
# append data if specified
if 'append' in project.task:
rows = get_rows(project.task['auth'], project.task['append'])
sheets_write(
project.task['auth'],
project.task['sheet'],
project.task['tab'],
project.task['range'],
rows,
append = True
)
# move data if specified
# move data if specified
if 'out' in project.task:
rows = sheets_read(
project.task['auth'],
project.task['sheet'],
project.task['tab'],
def sdf_download(auth, version, partner_id, file_types, filter_type, filter_ids_obj):
#Read Filter Ids
filter_ids = list(get_rows(auth, filter_ids_obj))
body = {
"version": version,
"partnerId": partner_id,
"parentEntityFilter": {
"fileType": file_types,
"filterType": filter_type,
"filterIds": filter_ids
},
"idFilter": None
}
operation = API_DV360_Beta(auth).sdfdownloadtasks().create(body=body).execute()
if operation and 'name' in operation:
request = API_DV360_Beta(auth).sdfdownloadtasks().operations().get(name=operation['name'])
def lineitem():
if project.verbose: print('LINEITEM')
if 'read' in project.task:
advertisers = []
insertion_orders = []
line_items = []
if 'advertisers' in project.task['read']:
advertisers = get_rows(project.task['auth'], project.task['read']['advertisers'])
elif 'insertion_orders' in project.task['read']:
insertion_orders = get_rows(project.task['auth'], project.task['read']['insertion_orders'])
elif 'line_items' in project.task['read']:
line_items = get_rows(project.task['auth'], project.task['read']['line_items'])
rows = lineitem_read(
project.task['auth'],
advertisers,
insertion_orders,
line_items
)
if rows:
if 'bigquery' in project.task['read']['out']:
project.task['read']['out']['bigquery']['schema'] = LineItem_Read_Schema
project.task['read']['out']['bigquery']['skip_rows'] = 0
def lineitem():
if project.verbose: print('LINEITEM')
if 'read' in project.task:
advertisers = []
insertion_orders = []
line_items = []
if 'advertisers' in project.task['read']:
advertisers = get_rows(project.task['auth'], project.task['read']['advertisers'])
elif 'insertion_orders' in project.task['read']:
insertion_orders = get_rows(project.task['auth'], project.task['read']['insertion_orders'])
elif 'line_items' in project.task['read']:
line_items = get_rows(project.task['auth'], project.task['read']['line_items'])
rows = lineitem_read(
project.task['auth'],
advertisers,
insertion_orders,
line_items
)
if rows:
if 'bigquery' in project.task['read']['out']:
def twitter_trends_places():
if project.verbose: print('TWITTER TRENDS PLACE')
print("PL", list(get_rows(project.task['auth'], project.task['trends']['places'])))
for place in get_rows(project.task['auth'], project.task['trends']['places']):
if project.verbose: print('PLACE', place)
results = get_twitter_api().request('trends/place', {'id':int(place)})
for r in results:
if project.verbose: print('RESULT', r['name'])
yield [place, r['name'], r['url'], r['promoted_content'], r['query'], r['tweet_volume']]
print('.', end='')
sleep(15 * 60 / 75) # rate limit ( improve to retry )
def email_send():
if project.verbose: print('EMAIL SEND')
send_email(
'user',
project.task['send']['from'],
project.task['send']['to'],
project.task['send'].get('cc', ''),
project.task['send']['subject'],
project.task['send']['text'],
project.task['send']['html'],
project.task['send']['attachment']['filename'],
get_rows('user', project.task['send']['attachment']),
)
def twitter_trends_closest():
if project.verbose: print('TWITTER TRENDS CLOSEST')
for row in get_rows(project.task['auth'], project.task['trends']['closest']):
lat, lon = row[0], row[1]
results = api.request('trends/closest', {'lat':lat, 'long':lon})
for r in results:
yield [lat, lon, r['country'], r['countryCode'], r['name'], r['parentid'], r['placeType']['code'], r['placeType']['name'], r['url'], r['woeid']]
}
Args: * auth: (string) Either user or service. * body: (json) the report body ( with or without filters ) * filters: (json) a dictionary of filters to apply ( see above examples )
Returns: * body: ( json ) modified report body """
new_body = body.copy()
for f, d in filters.items(): for v in get_rows(auth, d):
# accounts are specified in a unique part of the report json
if f == 'accountId':
new_body['accountId'] = v
# activities are specified in a unique part of the report json
elif f == 'dfa:activity':
new_body['reachCriteria']['activities'].setdefault('filters', []).append({
"kind":"dfareporting#dimensionValue",
"dimensionName": f,
"id": v
})
# all other filters go in the same place
else:
new_body.setdefault('criteria', {}).setdefault('dimensionFilters', []).append({