Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
}
time.sleep(RETRY_INTERVAL)
# Poll for job status until it's finished.
print 'Retrieving job status...'
for i in range(RETRIES_COUNT):
job_status_response = mutate_job_service.get(selector)
status = job_status_response[0]['status']
if status in ('COMPLETED', 'FAILED'):
break
print ('[%d] Current status is \'%s\', waiting %d seconds to retry...' %
(i, status, RETRY_INTERVAL))
time.sleep(RETRY_INTERVAL)
if status == 'FAILED':
raise errors.GoogleAdsError('Job failed with reason: \'%s\'' %
job_status_response[0]['failure_reason'])
if status in ('PROCESSING', 'PENDING'):
raise errors.GoogleAdsError('Job did not complete within %d seconds' %
(RETRY_INTERVAL * (RETRIES_COUNT - 1)))
# Status must be COMPLETED.
# Get the job result. Here we re-use the same selector.
result_response = mutate_job_service.getResult(selector)
# Output results.
index = 0
for result in result_response['SimpleMutateResult']['results']:
if 'PlaceHolder' in result:
print 'Operation [%d] - FAILED' % index
else:
print 'Operation [%d] - SUCCEEDED' % index
final_url_feed_attribute_id = feed['attributes'][1]['id']
line_2_feed_attribute_id = feed['attributes'][2]['id']
line_3_feed_attribute_id = feed['attributes'][3]['id']
print ('Feed with name "%s" and ID "%s" was added with' %
(feed['name'], feed['id']))
print ('\tText attribute ID "%s" and Final URL attribute ID "%s".' %
(link_text_feed_attribute_id, final_url_feed_attribute_id))
print ('\tLine 2 attribute ID "%s" and Line 3 attribute ID "%s".' %
(line_2_feed_attribute_id, line_3_feed_attribute_id))
sitelinks_data['feedId'] = feed['id']
sitelinks_data['linkTextFeedId'] = link_text_feed_attribute_id
sitelinks_data['finalUrlFeedId'] = final_url_feed_attribute_id
sitelinks_data['line2FeedId'] = line_2_feed_attribute_id
sitelinks_data['line3FeedId'] = line_3_feed_attribute_id
else:
raise errors.GoogleAdsError('No feeds were added.')
# Create site links feed items.
items_data = [
{'text': 'Home', 'finalUrls': 'http://www.example.com',
'line2': 'Home line 2', 'line3': 'Home line 3'},
{'text': 'Stores', 'finalUrls': 'http://www.example.com/stores',
'line2': 'Stores line 2', 'line3': 'Stores line 3'},
{'text': 'On Sale', 'finalUrls': 'http://www.example.com/sale',
'line2': 'On Sale line 2', 'line3': 'On Sale line 3'},
{'text': 'Support', 'finalUrls': 'http://www.example.com/support',
'line2': 'Support line 2', 'line3': 'Support line 3'},
{'text': 'Products', 'finalUrls': 'http://www.example.com/products',
'line2': 'Products line 2', 'line3': 'Products line 3'},
{'text': 'About Us', 'finalUrls': 'http://www.example.com/about',
'line2': 'About line 2', 'line3': 'About line 3', 'locationId': '21137'}
]
'dimensions': ['DATE', 'AD_UNIT_NAME'],
'adUnitView': 'HIERARCHICAL',
'columns': ['AD_SERVER_IMPRESSIONS', 'AD_SERVER_CLICKS',
'DYNAMIC_ALLOCATION_INVENTORY_LEVEL_IMPRESSIONS',
'DYNAMIC_ALLOCATION_INVENTORY_LEVEL_CLICKS',
'TOTAL_INVENTORY_LEVEL_IMPRESSIONS',
'TOTAL_INVENTORY_LEVEL_CPM_AND_CPC_REVENUE'],
'dateRangeType': 'LAST_WEEK',
'statement': statement.ToStatement()
}
}
try:
# Run the report and wait for it to finish.
report_job_id = report_downloader.WaitForReport(report_job)
except errors.AdManagerReportError as e:
print('Failed to generate report. Error was: %s' % e)
# Change to your preferred export format.
export_format = 'CSV_DUMP'
report_file = tempfile.NamedTemporaryFile(suffix='.csv.gz', delete=False)
# Download report data.
report_downloader.DownloadReportToFile(
report_job_id, export_format, report_file)
report_file.close()
# Display results.
print('Report job with id "%s" downloaded to:\n%s' % (
report_job_id, report_file.name))
'columns': ['AD_SERVER_IMPRESSIONS', 'AD_SERVER_CLICKS',
'AD_SERVER_CTR', 'AD_SERVER_CPM_AND_CPC_REVENUE',
'AD_SERVER_WITHOUT_CPD_AVERAGE_ECPM'],
'dateRangeType': 'CUSTOM_DATE',
'startDate': start_date,
'endDate': end_date
}
}
# Initialize a DataDownloader.
report_downloader = client.GetDataDownloader(version='v201902')
try:
# Run the report and wait for it to finish.
report_job_id = report_downloader.WaitForReport(report_job)
except errors.AdManagerReportError as e:
print('Failed to generate report. Error was: %s' % e)
with tempfile.NamedTemporaryFile(
suffix='.csv.gz', mode='wb', delete=False) as report_file:
# Download report data.
report_downloader.DownloadReportToFile(
report_job_id, 'CSV_DUMP', report_file)
# Create a PQL query to fetch the line item data
line_items_pql_query = ('SELECT Id, LineItemType, Status FROM LineItem')
# Download the response from PQL select statement
line_items = report_downloader.DownloadPqlResultToList(line_items_pql_query)
# Use pandas to join the two csv files into a match table
report = pandas.read_csv(report_file.name)
i = 0
while i < MAX_CUSTOMER_FEED_ADD_ATTEMPTS and added_customer_feed is None:
try:
added_customer_feed = customer_feed_service.mutate([
customer_feed_operation])['value'][0]
except errors.GoogleAdsServerFault:
# Wait using exponential backoff policy
sleep_seconds = 2 ** i
print ('Attempt %d to add the CustomerFeed was not successful.'
'Waiting %d seconds before trying again.\n' % (i, sleep_seconds))
time.sleep(sleep_seconds)
i += 1
if added_customer_feed is None:
raise errors.GoogleAdsError(
'Could not create the CustomerFeed after %s attempts. Please retry the '
'CustomerFeed ADD operation later.' % MAX_CUSTOMER_FEED_ADD_ATTEMPTS)
print ('Added CustomerFeed for feed ID %d and placeholder type %d\n'
% (added_customer_feed['id'], added_customer_feed['placeholderTypes']))
'operator': 'ADD',
'operand': {
'xsi_type': 'AdGroupAd',
'adGroupId': ad_group_id,
'ad': {
'xsi_type': 'ExpandedTextAd',
'headlinePart1': 'Luxury Cruise to Mars',
'headlinePart2': 'Visit the Red Planet in style.',
'description': 'Low-gravity fun for all astronauts in orbit',
'finalUrls': ['http://www.example.com'],
}
}
}]
try:
ad_group_ad_service.mutate(operations)
except errors.GoogleAdsServerFault as e:
print('Validation correctly failed with "%s".' % str(e))
# Create an operation to add the feed.
operations = [{
'operator': 'ADD',
'operand': customer_extension_setting
}]
# Add the price extension.
response = customer_extension_setting_service.mutate(operations)
# Print the results.
if 'value' in response:
print ('Extension setting with type "%s" was added to your account.'
% response['value'][0]['extensionType'])
else:
raise errors.GoogleAdsError('No extension settings were added.')
'fieldId': PLACEHOLDER_FIELD_LINE_3_TEXT
}
]
}
response = feed_mapping_service.mutate([
{'operator': 'ADD', 'operand': feed_mapping}
])
if 'value' in response:
feed_mapping = response['value'][0]
print ('Feed mapping with ID %s and placeholder type %s was saved for feed'
' with ID %s.' %
(feed_mapping['feedMappingId'], feed_mapping['placeholderType'],
feed_mapping['feedId']))
else:
raise errors.GoogleAdsError('No feed mappings were added.')
# Construct a matching function that associates the sitelink feeditems to the
# campaign, and set the device preference to Mobile. For more details, see the
# matching function guide:
# https://developers.google.com/adwords/api/docs/guides/feed-matching-functions
matching_function_string = (
'AND(IN(FEED_ITEM_ID, {%s}), EQUALS(CONTEXT.DEVICE, \'Mobile\'))' %
re.sub(r'\[|\]|L', '', str(sitelinks_data['feedItemIds'])))
campaign_feed = {
'feedId': sitelinks_data['feedId'],
'campaignId': campaign_id,
'matchingFunction': {'functionString': matching_function_string},
# Specifying placeholder types on the CampaignFeed allows the same feed
# to be used for different placeholders in different Campaigns.
'placeholderTypes': [PLACEHOLDER_SITELINKS]
def UploadOperations(self, operations, is_last=False):
"""Uploads operations to the given uploadUrl in incremental steps.
Note: Each list of operations is expected to contain operations of the
same type, similar to how one would normally send operations in an
AdWords API Service request.
Args:
operations: one or more lists of operations as would be sent to the
AdWords API for the associated service.
is_last: a boolean indicating whether this is the final increment to be
added to the batch job.
"""
if self._is_last:
raise googleads.errors.AdWordsBatchJobServiceInvalidOperationError(
'Can\'t add new operations to a completed incremental upload.')
# Build the request
req = self._request_builder.BuildUploadRequest(
self._upload_url, operations,
current_content_length=self._current_content_length, is_last=is_last)
# Make the request, ignoring the urllib.error.HTTPError raised due to HTTP
# status code 308 (for resumable uploads).
try:
_batch_job_logger.debug('Outgoing request: %s %s %s',
req.get_full_url(), req.headers, req.data)
self._url_opener.open(req)
if _batch_job_logger.isEnabledFor(logging.INFO):
_batch_job_logger.info('Request summary: %s',
self._ExtractRequestSummaryFields(req))
customer_feed_operation = {
'xsi_type': 'CustomerFeedOperation',
'operator': 'ADD',
'operand': customer_feed
}
customer_feed_service = client.GetService(
'CustomerFeedService', version='v201809')
added_customer_feed = None
i = 0
while i < MAX_CUSTOMER_FEED_ADD_ATTEMPTS and added_customer_feed is None:
try:
added_customer_feed = customer_feed_service.mutate([
customer_feed_operation])['value'][0]
except errors.GoogleAdsServerFault:
# Wait using exponential backoff policy
sleep_seconds = 2 ** i
print('Attempt %d to add the CustomerFeed was not successful.'
'Waiting %d seconds before trying again.\n' % (i, sleep_seconds))
time.sleep(sleep_seconds)
i += 1
if added_customer_feed is None:
raise errors.GoogleAdsError(
'Could not create the CustomerFeed after %s attempts. Please retry the '
'CustomerFeed ADD operation later.' % MAX_CUSTOMER_FEED_ADD_ATTEMPTS)
print('Added CustomerFeed for feed ID %d and placeholder type %d\n'
% (added_customer_feed['id'], added_customer_feed['placeholderTypes']))