Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_save_cache_path(self):
tmpdir = tempfile.mkdtemp()
try:
environ['XDG_CACHE_HOME'] = tmpdir
reload(BaseDirectory)
datapath = BaseDirectory.save_cache_path("foo")
self.assertEqual(datapath, os.path.join(tmpdir, "foo"))
finally:
shutil.rmtree(tmpdir)
from xdg import BaseDirectory
from logger import StreamToLogger
# Check https://developers.google.com/gmail/api/auth/scopes
# for all available scopes
OAUTH_SCOPE = 'https://mail.google.com/'
# Path to the client_secret.json, file comes from the Google Developer Console
CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'client_secret.json')
# Directory where the credentials storage files are placed
STORAGE_DIR = BaseDirectory.save_cache_path(os.path.join('goopg', 'storage'))
class Gmail():
def __init__(self, username):
# the main username
self.username = username
self.http = httplib2.Http()
self.logger = logging.getLogger('Gmail')
self.logger.setLevel(logging.DEBUG)
# Start the OAuth flow to retrieve credentials
flow = flow_from_clientsecrets(CLIENT_SECRET_FILE,
scope=OAUTH_SCOPE,
redirect_uri='urn:ietf:wg:oauth:2.0:oob:auto')
def _create_cache_directory(self) -> str:
cache_dir = BaseDirectory.save_cache_path("snapcraft", "provider", "launchpad")
os.makedirs(cache_dir, mode=0o700, exist_ok=True)
return cache_dir
def cache_directory(container_name):
"""
A directory to cache the container data in.
"""
return os.path.join(save_cache_path('forklift'), container_name)
for distro in self.config['distributions']:
if self.release in self.config['distributions'][distro]['releases']:
self.distro = distro
if not self.distro:
base = "Release \"{}\" not valid. ".format(self.release)
if not self.release:
base = "No release declared. "
all_releases = []
for distro in sorted(self.config['distributions'].keys()):
releases = self.config['distributions'][distro]['releases']
all_releases.append(" [%s] %s" % (distro, ' - '.join(releases)))
raise ValueError(base +
"Please specify one of:\n%s" %
'\n'.join(all_releases))
self.config_path = _BaseDirectory.save_config_path(self.name)
self.cache_path = _BaseDirectory.save_cache_path(self.name)
self.data_path = _BaseDirectory.save_data_path(self.name)
self.config_path = _os.path.join(self.config_path, self.release)
self.cache_path = _os.path.join(self.cache_path, self.release)
self.data_path = _os.path.join(self.data_path, self.release)
self.bashrc = _os.path.join(self.config_path, "bash.rc")
self.sourceslist = _os.path.join(self.config_path, "sources.list")
self.aptconf = _os.path.join(self.config_path, "apt.conf")
def load(self, filename):
'''Load registered version database into this database'''
assert(filename is not None)
path = join(save_cache_path(XDG_DIRECTORY), filename)
try:
open(path, "a")
except (IOError, OSError) as exp:
raise BaseError("Create database filename failed; %s" % exp)
logging.debug("Loading database %s" % path)
try:
fileobj = open(path, "r")
dico = json.load(fileobj)
self.update(dico)
except Exception as exp:
logging.error("Unable to load database %s: %s" % (path, exp))
# because we use self._path is __del__, this should be done when
# we are sure that db is loaded
self._path = path
import hashlib
import json
import os
import time
import xdg.BaseDirectory
class JsonCache(object):
cache_dir = xdg.BaseDirectory.save_cache_path("nullroute.eu.org/url")
def __init__(self, expiry=0):
self.expiry = expiry or 86400
def get_path(self, name):
name = hashlib.sha1(name.encode("utf-8")).hexdigest()
return os.path.join(self.cache_dir, "%s.json" % name)
def load(self, name):
path = self.get_path(name)
try:
with open(path, "r") as fh:
package = json.load(fh)
if package.get("expire", 0) >= time.time():
return package["data"]
else: