Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
continue
image_resp = util.urlopen(url)
bad_type = self._check_mime_type(url, image_resp, IMAGE_MIME_TYPES,
'JPG, PNG, GIF, and WEBP images')
if bad_type:
return bad_type
headers = twitter_auth.auth_header(
API_UPLOAD_MEDIA, self.access_token_key, self.access_token_secret, 'POST')
resp = util.requests_post(API_UPLOAD_MEDIA,
files={'media': image_resp},
headers=headers)
resp.raise_for_status()
logging.info('Got: %s', resp.text)
media_id = source.load_json(resp.text, API_UPLOAD_MEDIA)['media_id_string']
ids.append(media_id)
alt = image.get('displayName')
if alt:
alt = util.ellipsize(alt, chars=MAX_ALT_LENGTH)
headers = twitter_auth.auth_header(
API_MEDIA_METADATA, self.access_token_key, self.access_token_secret, 'POST')
resp = util.requests_post(API_MEDIA_METADATA,
json={'media_id': media_id,'alt_text': {'text': alt}},
headers=headers)
logging.info('Got: %s', resp)
return ids
if fetch_mentions:
# fetch mentions *after* replies so that we don't get replies to mentions
# https://github.com/snarfed/bridgy/issues/631
mentions = self.fetch_mentions(_user().get('screen_name'), tweets,
min_id=min_id)
tweet_activities += [self.tweet_to_activity(m) for m in mentions]
if fetch_likes:
for tweet, activity in zip(tweets, tweet_activities):
id = tweet['id_str']
count = tweet.get('favorite_count')
if self.is_public(activity) and count and count != cached.get('ATF ' + id):
url = HTML_FAVORITES % id
try:
resp = util.urlopen(url).read()
html = source.load_json(resp, url).get('htmlUsers', '')
except urllib_error.URLError as e:
util.interpret_http_exception(e) # just log it
continue
likes = self.favorites_html_to_likes(tweet, html)
activity['object'].setdefault('tags', []).extend(likes)
cache_updates['ATF ' + id] = count
activities += tweet_activities
response = self.make_activities_base_response(activities)
response.update({'total_count': total_count, 'etag': etag})
if cache_updates and cache is not None:
cache.set_multi(cache_updates)
return response
url = API_TIMELINE % (count)
else:
if not user_id:
user_id = _user().get('screen_name')
url = API_LIST_TIMELINE % {
'count': count,
'slug': group_id,
'owner_screen_name': user_id,
}
headers = {'If-None-Match': etag} if etag else {}
total_count = None
try:
resp = self.urlopen(url, headers=headers, parse_response=False)
etag = resp.info().get('ETag')
tweet_obj = source.load_json(resp.read(), url)
if group_id == source.SEARCH:
tweet_obj = tweet_obj.get('statuses', [])
tweets = tweet_obj[start_index:]
except urllib.error.HTTPError as e:
if e.code == 304: # Not Modified, from a matching ETag
tweets = []
else:
raise
# batch get memcached counts of favorites and retweets for all tweets
cached = {}
if cache is not None:
keys = itertools.product(('ATR', 'ATF'), [t['id_str'] for t in tweets])
cached = cache.get_multi('%s %s' % (prefix, id) for prefix, id in keys)
# only update the cache at the end, in case we hit an error before then
cache_updates = {}
def urlopen(self, url, **kwargs):
"""Wraps :func:`urllib2.urlopen()` and passes through the access token."""
if self.access_token:
# TODO add access_token to the data parameter for POST requests
url = util.add_query_params(url, [('access_token', self.access_token)])
resp = util.urlopen(urllib.request.Request(url, **kwargs))
return (resp if kwargs.get('data')
else source.load_json(resp.read(), url).get('data'))
continue
image_resp = util.urlopen(url)
error = self._check_media(url, image_resp, IMAGE_MIME_TYPES,
'JPG, PNG, GIF, and WEBP images', MAX_IMAGE_SIZE)
if error:
return error
headers = twitter_auth.auth_header(
API_UPLOAD_MEDIA, self.access_token_key, self.access_token_secret, 'POST')
resp = util.requests_post(API_UPLOAD_MEDIA,
files={'media': image_resp},
headers=headers)
resp.raise_for_status()
logging.info('Got: %s', resp.text)
media_id = source.load_json(resp.text, API_UPLOAD_MEDIA)['media_id_string']
ids.append(media_id)
alt = image.get('displayName')
if alt:
alt = util.ellipsize(alt, chars=MAX_ALT_LENGTH)
headers = twitter_auth.auth_header(
API_MEDIA_METADATA, self.access_token_key, self.access_token_secret, 'POST')
resp = util.requests_post(API_MEDIA_METADATA,
json={'media_id': media_id,'alt_text': {'text': alt}},
headers=headers)
resp.raise_for_status()
logging.info('Got: %s', resp.text)
return ids
if fetch_mentions:
# fetch mentions *after* replies so that we don't get replies to mentions
# https://github.com/snarfed/bridgy/issues/631
mentions = self.fetch_mentions(_user().get('screen_name'), tweets,
min_id=min_id)
tweet_activities += [self.tweet_to_activity(m) for m in mentions]
if fetch_likes:
for tweet, activity in zip(tweets, tweet_activities):
id = tweet['id_str']
count = tweet.get('favorite_count')
if self.is_public(activity) and count and count != cached.get('ATF ' + id):
url = HTML_FAVORITES % id
try:
resp = util.urlopen(url).read()
html = source.load_json(resp, url).get('htmlUsers', '')
except urllib.error.URLError as e:
util.interpret_http_exception(e) # just log it
continue
likes = self.favorites_html_to_likes(tweet, html)
activity['object'].setdefault('tags', []).extend(likes)
cache_updates['ATF ' + id] = count
activities += tweet_activities
response = self.make_activities_base_response(activities)
response.update({'total_count': total_count, 'etag': etag})
if cache_updates and cache is not None:
cache.set_multi(cache_updates)
return response
def request():
resp = twitter_auth.signed_urlopen(
url, self.access_token_key, self.access_token_secret, **kwargs)
return source.load_json(resp.read(), url) if parse_response else resp