Here are the examples of the python api os.path.sep.join taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
91 Examples
4
Example 1
def get_cert(self, cn, san):
"""Get a signed certificate for a cn, san.
cn: The common name to use in the certificate
san: The subject alt name to add to the certificate, or None
Returns a path to a pem file containing the key and cert"""
self._ensure_loaded()
path = os.path.sep.join([self.cert_dir, self._get_file_name(cn, san)])
# TODO: Check that the cert at path is valid
if not os.path.exists(path):
self._generate_cert(cn, san, path)
return path
4
Example 2
def _search_zip(modpath, pic):
for filepath, importer in list(pic.items()):
if importer is not None:
found = importer.find_module(modpath[0])
if found:
if not importer.find_module(os.path.sep.join(modpath)):
raise ImportError('No module named %s in %s/%s' % (
'.'.join(modpath[1:]), filepath, modpath))
#import code; code.interact(local=locals())
return (ModuleType.PY_ZIPMODULE,
os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath),
filepath)
raise ImportError('No module named %s' % '.'.join(modpath))
3
Example 3
def build_script(script):
"""make path to scripts used by tests
"""
from vimpdb.config import get_package_path
tests_path = get_package_path(build_script)
script_path = sys.executable + " " + os.path.sep.join([tests_path,
'scripts', script])
return script_path
3
Example 4
def load_process_graph(self):
import networkx
dumpname = self._memory_handler.get_name()
fname = os.path.sep.join([config.get_cache_folder_name(dumpname), config.CACHE_GRAPH])
my_graph = networkx.readwrite.gexf.read_gexf(fname)
return my_graph
3
Example 5
def get_fs_package_ops(fspath):
if not fspath:
return []
ops = []
for snapfile in sorted(glob.glob(os.path.sep.join([fspath, '*.snap']))):
(name, shortname, fname_noext) = parse_filename(snapfile)
cfg = None
for cand in (fname_noext, name, shortname):
fpcand = os.path.sep.join([fspath, cand]) + ".config"
if os.path.isfile(fpcand):
cfg = fpcand
break
ops.append(makeop('install', name, config=None,
path=snapfile, cfgfile=cfg))
return ops
3
Example 6
def get_file(self, f):
leaf = self.get_leaf(f)
if self.strip:
f = self.strip_name(f)
if self.compress:
fi = cStringIO.StringIO(self.get_zip(leaf).read(f))
else:
fi = open(os.path.sep.join([self.root, leaf, f]))
return fi
3
Example 7
def get_cache_filename(typ, dumpname, address=None):
"""
Returns a filename for caching a type of data based on the dump filename.
:param typ: one of Config.CACHE_XX types.
:param dumpname: the dumpname to get the cache folder
:param address: a optional unique identifier
:return:
"""
fname = typ
if address is not None:
fname = '%x.%s' % (address, typ)
return os.path.sep.join([get_cache_folder_name(dumpname), fname])
3
Example 8
def test_store_download_thumbnail(self):
# Setup
recordings = self.conn.getRecordings()
self.assertTrue(recordings, 'Recordings needed in to run test')
downloader = MythThumbnailResolver(self.conn, self.db)
dest = os.path.sep.join([tempfile.gettempdir(), 'thumbnail_' + str(random.randint(1, 999999)) + '.png'])
# Test
downloader.store(recordings[-1], dest)
# Verify
log.debug('Downloaded %s to %s' % (safe_str(recordings[-1].title()), dest))
self.assertTrue(os.path.exists(dest))
self.assertTrue(os.path.isfile(dest))
self.assertTrue(os.path.getsize(dest) > 0)
# Cleanup
os.remove(dest)
3
Example 9
def _search_zip(modpath, pic):
for filepath, importer in pic.items():
if importer is not None:
if importer.find_module(modpath[0]):
if not importer.find_module(os.path.sep.join(modpath)):
raise ImportError('No module named %s in %s/%s' % (
'.'.join(modpath[1:]), filepath, modpath))
return PY_ZIPMODULE, os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), filepath
raise ImportError('No module named %s' % '.'.join(modpath))
3
Example 10
def test_ssh_client_mkdir_recursive(self):
"""Test SFTP mkdir of SSHClient"""
base_path = 'remote_test_dir1'
remote_dir = os.path.sep.join([base_path,
'remote_test_dir2',
'remote_test_dir3'])
try:
shutil.rmtree(base_path)
except OSError:
pass
client = SSHClient(self.host, port=self.listen_port,
pkey=self.user_key)
client.mkdir(client._make_sftp(), remote_dir)
self.assertTrue(os.path.isdir(remote_dir),
msg="SFTP recursive mkdir failed")
shutil.rmtree(base_path)
del client
3
Example 11
def test_more_args(self):
with tempdir() as chroot:
create_files(['a/b/c/__init__.py', 'a/d/__init__.py', 'a/e/f.py'])
expected = [
join(chroot, suffix)
for suffix in [sep.join(('a', 'b')), 'a', sep.join(('a', 'e'))]
] + ["."] + self.fake
cases = (
['a/b/c/__init__.py', 'a/d/__init__.py', 'a/e/f.py'],
['a/b/c', 'a', 'a/e'],
['a/b/c', 'a', 'a/b/c', 'a/e', 'a'],
)
self.assertEqual(sys.path, self.fake)
for case in cases:
with lint.fix_import_path(case):
self.assertEqual(sys.path, expected)
self.assertEqual(sys.path, self.fake)
3
Example 12
def get_files_details(self):
''' Parses torrent file and returns details of the files contained in the torrent.
Details include name, length and checksum for each file in the torrent.
'''
parsed_files_info = []
files_info = self.parsed_content.get('info')
if files_info: # 'info' should be present in all torrent files. Nevertheless..
multiple_files_info = files_info.get('files')
if multiple_files_info: # multiple-file torrent
for file_info in multiple_files_info:
parsed_files_info.append((os.path.sep.join(file_info.get('path')), file_info.get('length'), ))
elif files_info.get('name'): # single file torrent format 1
parsed_files_info.append((files_info.get('name'), files_info.get('length'), ))
elif self.parsed_content.get('name'): # single file format 2
parsed_files_info.append((self.parsed_content.get('name'), self.parsed_content.get('length'), ))
else:
return None
return parsed_files_info
3
Example 13
def _get_path_url(self, folder_deepness, filename):
rurl = os.path.sep.join([
config.script_folder_url.rstrip('/'),
self.folders_rel[folder_deepness].strip('/'),
filename.lstrip('/')]
)
rpath = os.path.sep.join([
config.script_folder.rstrip('/'),
self.folders_rel[folder_deepness].strip('/'),
filename.lstrip('/')]
)
return rpath, rurl
3
Example 14
def _protected_open_file(self, mmap_fname, mmap_pathname):
if mmap_pathname is not None and mmap_pathname in self._maps_to_load:
log.debug('SELECTED: %s', mmap_pathname)
return self._open_file(self.archive, self.filePrefix + mmap_fname)
else:
log.debug('IGNORED: %s', mmap_pathname)
# return lambda: (file(os.path.sep.join([self.archive,
# self.filePrefix+mmap_fname]),'r')
raise LazyLoadingException(os.path.sep.join([self.archive, self.filePrefix + mmap_fname]))
3
Example 15
def __init__(self, naming_dir=None, naming_file=None, naming_type=None):
"""Set the default values for a new Naming object."""
self.current_symbol = None
self.services = {}
self.networks = {}
self.unseen_services = {}
self.unseen_networks = {}
if naming_file and naming_type:
filename = os.path.sep.join([naming_dir, naming_file])
file_handle = open(filename, 'r')
self._ParseFile(file_handle, naming_type)
elif naming_dir:
self._Parse(naming_dir, 'services')
self._CheckUnseen('services')
self._Parse(naming_dir, 'networks')
self._CheckUnseen('networks')
3
Example 16
def os_path(self):
"""Get a path suitable to be used with os APIs."""
result = os.path.sep.join(self.components())
# On unix systems we anchor at root but on windows the drive
# name should be first.
if os.path.sep == "/":
result = "/" + result
return result
3
Example 17
def import_available_modules(namespace):
"""
Import all available module in the specified namespace.
:param namespace: The namespace to import modules from.
"""
modules = []
for path in sys.path:
for module in sorted(glob.glob(os.path.sep.join([path] + namespace.split('.') + ['*.py']))):
if not module.endswith('__init__.py'):
mod_name = module.split(os.path.sep)[-1].split('.')[0]
modpath = '.'.join([namespace, mod_name])
_log.debug("importing module %s" % modpath)
try:
mod = __import__(modpath, globals(), locals(), [''])
except ImportError as err:
raise EasyBuildError("import_available_modules: Failed to import %s: %s", modpath, err)
modules.append(mod)
return modules
3
Example 18
def save_graph_headers(ctx, graph, fname):
fout = file(os.path.sep.join([config.cacheDir, fname]), 'w')
towrite = []
structs = [ctx.structures[int(addr, 16)] for addr in graph.nodes()]
for anon in structs:
print anon
towrite.append(anon.to_string())
if len(towrite) >= 10000:
try:
fout.write('\n'.join(towrite))
except UnicodeDecodeError as e:
print 'ERROR on ', anon
towrite = []
fout.flush()
fout.write('\n'.join(towrite))
fout.close()
return
3
Example 19
def commonprefix(paths):
if not paths or any(i.root != paths[0].root for i in paths):
return None
split = [i.split() for i in paths]
lo, hi = min(split), max(split)
for i, bit in enumerate(lo):
if bit != hi[i]:
return Path(os.path.sep.join(lo[:i]), paths[0].root)
return Path(os.path.sep.join(lo), paths[0].root)
3
Example 20
def _run(self):
if not use_inotify:
log.warn("gevent_inotifyx not loaded; not using inotify")
return
fd = inotify.init()
wd = inotify.add_watch(fd, self.opts.spool_dir,
inotify.IN_CLOSE_WRITE | inotify.IN_MOVED_TO)
while True:
events = inotify.get_events(fd)
for event in events:
path = os.path.sep.join([self.opts.spool_dir,
event.name])
# Filter out inotify events generated for files that
# have been already unlinked from the filesystem
# (IN_EXCL_UNLINK emulation)
if os.path.exists(path):
self.on_find(path)
3
Example 21
def removedupl(picdir):
dict = {}
rep = {}
for f in os.listdir(fullpath(picdir)):
fp = path.sep.join([picdir, f])
hash = hashlib.md5()
hash.update(readdata(fp, 'rb'))
md5 = hash.hexdigest()
if md5 in dict:
rep[f] = dict[md5]
os.remove(fp)
else:
dict[md5] = f
return rep
3
Example 22
def load(self):
outdir = config.get_cache_filename(
config.CACHE_SIGNATURE_GROUPS_DIR,
self._context.dumpname)
inname = os.path.sep.join([outdir, self._name])
self._similarities = utils.int_array_cache(inname)
return
3
Example 23
def _loadCache(self):
outdir = config.get_cache_filename(
config.CACHE_SIGNATURE_SIZES_DIR,
self._context.dumpname)
fdone = os.path.sep.join(
[outdir, config.CACHE_SIGNATURE_SIZES_DIR_TAG])
if not os.access(fdone, os.R_OK):
return False
for myfile in os.listdir(outdir):
try:
# FIXME: not sure its -
# and what that section is about in general.
addr = int(myfile.split('-')[1], 16)
except IndexError as e:
continue # ignore file
3
Example 24
def render_template(self, name, **kwargs):
"""
Template renderer.
"""
tpl = open(os.path.sep.join([self._template_path, name]), 'r').read()
for key, value in kwargs.items():
tpl = tpl.replace("{{- " + key + " -}}", value)
return tpl
3
Example 25
def basic_test(self):
folder = temp_folder()
paths = SimplePaths(folder)
self.assertEqual(paths._store_folder, folder)
conan_ref = ConanFileReference.loads("opencv/2.4.10 @ lasote /testing")
package_ref = PackageReference(conan_ref, "456fa678eae68")
expected_base = os.path.join(folder, os.path.sep.join(["opencv", "2.4.10",
"lasote", "testing"]))
self.assertEqual(paths.conan(conan_ref),
os.path.join(paths.store, expected_base))
self.assertEqual(paths.export(conan_ref),
os.path.join(paths.store, expected_base, EXPORT_FOLDER))
self.assertEqual(paths.build(package_ref),
os.path.join(paths.store, expected_base, BUILD_FOLDER, "456fa678eae68"))
self.assertEqual(paths.package(package_ref),
os.path.join(paths.store, expected_base, PACKAGES_FOLDER,
"456fa678eae68"))
3
Example 26
def joinpaths(*args, **kwargs):
path = os.path.sep.join(args)
if kwargs.get("follow_symlinks"):
return os.path.realpath(path)
else:
return path
3
Example 27
@property
def relpath(self):
'''this File's path relative to its root'''
up = self
components = deque()
while up != self.root:
components.appendleft(up.basename)
up = up.parent
return os.path.sep.join(components)
3
Example 28
def path_walker(basedir, dirname, fname):
# print "path_walker:",basedir, dirname, fname
for f in fname:
try:
p = path.sep.join([dirname, f])
p = "/".join(p.split(path.sep))
if matcher(p):
le(p, open(p).read())
except Exception, e:
logging.error("Caught exception: " + str(e))
3
Example 30
def setUp(self, extra_plugin_dir=None, extra_test_file=None, loglevel=logging.DEBUG, extra_config=None):
"""
:param extra_plugin_dir: Path to a directory from which additional
plugins should be loaded.
:param extra_test_file: [Deprecated but kept for backward-compatibility,
use extra_plugin_dir instead]
Path to an additional plugin which should be loaded.
:param loglevel: Logging verbosity. Expects one of the constants
defined by the logging module.
:param extra_config: Piece of extra bot config in a dict.
"""
if extra_plugin_dir is None and extra_test_file is not None:
extra_plugin_dir = sep.join(abspath(extra_test_file).split(sep)[:-2])
self.setup(extra_plugin_dir=extra_plugin_dir, loglevel=loglevel, extra_config=extra_config)
self.start()
3
Example 31
def makeop_tmpd(tmpd, op, name, config=None, path=None, cfgfile=None):
if cfgfile:
cfgfile = os.path.sep.join([tmpd, cfgfile])
if path:
path = os.path.sep.join([tmpd, path])
return(makeop(op=op, name=name, config=config, path=path, cfgfile=cfgfile))
3
Example 32
def _load_memory_mappings(self):
""" make the python objects"""
_mappings = []
default_ctypes = types.load_ctypes_default()
for mmap_fname, start, end, permissions, offset, major_device, minor_device, inode, pathname in self.metalines:
log.debug('Loading %s - %s' % (mmap_fname, pathname))
# open the file in the archive
fname = os.path.sep.join([self.dumpname, mmap_fname])
mmap = FilenameBackedMemoryMapping(fname, start, end, permissions, offset, major_device, minor_device, inode, pathname=pathname)
mmap.set_ctypes(default_ctypes)
_mappings.append(mmap)
_target_platform = target.TargetPlatform(_mappings, cpu_bits=self._cpu_bits, os_name=self._os_name)
self._memory_handler = MemoryHandler(_mappings, _target_platform, self.dumpname)
self._memory_handler.reset_mappings()
return
3
Example 33
def reverse(self):
super(PointerGraphReverser, self).reverse()
import networkx
dumpname = self._memory_handler.get_name()
outname1 = os.path.sep.join([config.get_cache_folder_name(dumpname), config.CACHE_GRAPH])
outname2 = os.path.sep.join([config.get_cache_folder_name(dumpname), config.CACHE_GRAPH_HEAP])
log.info('[+] Process Graph == %d Nodes', self._master_graph.number_of_nodes())
log.info('[+] Process Graph == %d Edges', self._master_graph.number_of_edges())
networkx.readwrite.gexf.write_gexf(self._master_graph, outname1)
log.info('[+] Process Heaps Graph == %d Nodes', self._heaps_graph.number_of_nodes())
log.info('[+] Process Heaps Graph == %d Edges', self._heaps_graph.number_of_edges())
networkx.readwrite.gexf.write_gexf(self._heaps_graph, outname2)
return
3
Example 34
def metric_to_filepath(data_dir, metric, instance_num):
if metric.startswith('rurouni.'):
instance = metric.split('.')[2]
else:
idx = fnv1a.get_hash_bugfree(metric) % instance_num
# TODO: change this
# instance = str(idx)
instance = 'abc'[idx]
return os.path.sep.join([data_dir, instance] + metric.split('.')) + '.hs'
3
Example 35
def path_to_node(base_node, path, cached_nodes):
# Take the base node and the path and return a node
# Results are cached because searching the node tree is expensive
# The following code is executed by threads, it is not safe, so a lock is needed...
if getattr(path, '__hash__'):
node_lookup_key = (base_node, path)
else:
# Not hashable, assume it is a list and join into a string
node_lookup_key = (base_node, os.path.sep.join(path))
try:
lock.acquire()
node = cached_nodes[node_lookup_key]
except KeyError:
node = base_node.find_resource(path)
cached_nodes[node_lookup_key] = node
finally:
lock.release()
return node
3
Example 36
def printGraph(G, gname):
h = networkx.DiGraph()
h.add_edges_from(G.edges())
networkx.draw_graphviz(h)
fname = os.path.sep.join([config.imgCacheDir, 'graph_%s.png' % gname])
plt.savefig(fname)
plt.clf()
fname = os.path.sep.join([config.cacheDir, 'graph_%s.gexf' % gname])
networkx.readwrite.gexf.write_gexf(h, fname)
return
3
Example 37
def test_store_When_channel_has_iconpath_but_filename_misspelled_Then_do_nothing(self):
# Setup
channel = Channel({'name':'Bogus Channel', 'icon': 'bogusIcon.png', 'chanid': '9', 'callsign': 'WXYZ'})
downloader = MythChannelIconResolver(self.conn)
# Test - download icons for first 5 channels
dest = os.path.sep.join([tempfile.gettempdir(), str(channel.getChannelId()) + channel.getCallSign() + str(time.time()) + ".png"])
downloader.store(channel, dest)
# Verify
self.assertFalse(os.path.exists(dest))
3
Example 38
def persist(self):
outdir = config.get_cache_filename(
config.CACHE_SIGNATURE_GROUPS_DIR,
self._context.dumpname)
config.create_cache_folder(outdir)
#
outname = os.path.sep.join([outdir, self._name])
ar = utils.int_array_save(outname, self._similarities)
return
3
Example 39
def prefixed_extensions(
prefix : "prefix to prepend to paths" = default_prefix,
extensions_data : "`extensions_data`" = extensions_data,
) -> [Extension]:
"""
Generator producing the `distutils` `Extension` objects.
"""
pkg_prefix = '.'.join(prefix) + '.'
path_prefix = os.path.sep.join(prefix)
for mod, data in extensions_data.items():
yield Extension(
pkg_prefix + mod,
[os.path.join(path_prefix, src) for src in data['sources']],
libraries = data.get('libraries', ()),
optional = True,
)
3
Example 40
@staticmethod
def gen_path(data_dir, metric_name):
"""
Generate file path of `metric_name`.
eg, metric_name is `sys.cpu.user`, the absolute file path will be
`self.data_dir/sys/cpu/user.hs`
"""
if metric_name[0] == '/':
return metric_name
parts = metric_name.split('.')
parts[-1] = parts[-1] + '.hs'
file_path = os.path.sep.join(parts)
return os.path.join(data_dir, file_path)
2
Example 41
def save_to_cache(self, key, json_data):
"""
Holds data in local 'cache'.
"""
cache_name = os.path.sep.join([self._cache_dir, key + '.json'])
f = open(cache_name, 'w')
f.write(json.dumps(json_data))
f.close()
self.logger.info('Saved "%s" in cache.' % key)
2
Example 42
def get_fixture_file_path(filename):
"""
Attempts to return the path to a fixture file.
:param filename: The name of the file to look for.
:type filename: str
:returns: Full path to the file
:rtype: str
:raises: Exception
"""
for x in ('.', '..'):
try:
a_path = os.path.sep.join((x, filename))
os.stat(a_path)
return os.path.realpath(a_path)
except:
pass
raise Exception(
'Can not find path for config: {0}'.format(filename))
0
Example 43
def setup(self):
# Sometimes the 'task' command line tool is not installed.
if self.should_skip():
raise nose.SkipTest(
"%r unsupported on this system" % (self.class_to_test)
)
# Create some temporary config stuff
fd, fname = tempfile.mkstemp(prefix='taskw-testsrc')
dname = tempfile.mkdtemp(prefix='taskw-tests-data')
with open(fname, 'w') as f:
f.writelines([
'data.location=%s\n' % dname,
'uda.somestring.label=Testing String\n',
'uda.somestring.type=string\n',
'uda.somedate.label=Testing Date\n',
'uda.somedate.type=date\n',
'uda.somenumber.label=Testing Number\n',
'uda.somenumber.type=numeric\n',
])
# Create empty .data files
for piece in ['completed', 'pending', 'undo']:
with open(os.path.sep.join([dname, piece + '.data']), 'w'):
pass
# Save names for .tearDown()
self.fname, self.dname = fname, dname
# Create the taskwarrior db object that each test will use.
self.tw = self.class_to_test(config_filename=fname)
# Create a taskwarrior db object for tests where marshal=True,
# but only for TaskwarriorShellout
if self.class_to_test == TaskWarriorShellout:
self.tw_marshal = self.class_to_test(config_filename=fname,
marshal=True)
0
Example 44
def compileJava(java_dir, java):
if not java:
return None
sources = []
with capture_output():
for descriptor, code in java.items():
parts = descriptor.split('/')
class_dir = os.path.sep.join(parts[:-1])
class_file = os.path.join(class_dir, "%s.java" % parts[-1])
full_dir = os.path.join(java_dir, class_dir)
full_path = os.path.join(java_dir, class_file)
try:
os.makedirs(full_dir)
except FileExistsError:
pass
with open(full_path, 'w', encoding="utf-8") as java_source:
java_source.write(adjust(code))
sources.append(class_file)
classpath = os.pathsep.join([
os.path.join('..', 'dist', 'python-java-support.jar'),
os.curdir,
])
proc = subprocess.Popen(
["javac", "-classpath", classpath] + sources,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=java_dir,
)
out = proc.communicate()
return out[0].decode('utf8')
0
Example 45
def setup(self):
if not TaskWarriorShellout.can_use():
# Sometimes the 'task' command line tool is not installed.
raise nose.SkipTest("taskwarrior not installed")
# Create some temporary config stuff
fd, fname = tempfile.mkstemp(prefix='taskw-testsrc')
dname = tempfile.mkdtemp(prefix='taskw-tests-data')
with open(fname, 'w') as f:
f.writelines([
'data.location=%s\n' % dname,
'uda.somestring.label=Testing String\n',
'uda.somestring.type=string\n',
'uda.somedate.label=Testing Date\n',
'uda.somedate.type=date\n',
'uda.somenumber.label=Testing Number\n',
'uda.somenumber.type=numeric\n',
])
# Create empty .data files
for piece in ['completed', 'pending', 'undo']:
with open(os.path.sep.join([dname, piece + '.data']), 'w'):
pass
# Save names for .tearDown()
self.fname, self.dname = fname, dname
# Create the taskwarrior db object that each test will use.
self.tw = TaskWarriorShellout(config_filename=fname, marshal=True)
0
Example 46
@pytest.mark.usefixtures('clean_system')
def test_generate_context_with_json_decoding_error():
with pytest.raises(ContextDecodingException) as excinfo:
generate.generate_context(
'tests/test-generate-context/invalid-syntax.json'
)
# original message from json module should be included
pattern = (
'Expecting \'{0,1}:\'{0,1} delimiter: '
'line 1 column (19|20) \(char 19\)'
)
assert re.search(pattern, str(excinfo.value))
# File name should be included too...for testing purposes, just test the
# last part of the file. If we wanted to test the absolute path, we'd have
# to do some additional work in the test which doesn't seem that needed at
# this point.
path = os.path.sep.join(
['tests', 'test-generate-context', 'invalid-syntax.json']
)
assert path in str(excinfo.value)
0
Example 47
def get_from_cache(self, key, source=None):
"""
Gets data from a local cache. If it's not there it will run the
source callable, save the result and return the results.
"""
# If we have cache ...
if self._cache:
cache_name = os.path.sep.join([self._cache_dir, key + '.json'])
if not os.path.exists(cache_name):
self.logger.info('Key "%s" was NOT in cache.' % key)
# And the cache file exists ...
else:
mtime = datetime.datetime.fromtimestamp(
os.stat(cache_name).st_mtime)
now = datetime.datetime.now()
# If we are still in the cache time then use the cache
if now - self._cache_time < mtime:
self.logger.info('Found "%s" in cache.' % key)
data = open(cache_name, 'r').read()
return json.loads(data)
else:
self.logger.info('Key "%s" is expired in cache.' % key)
try:
status_code, data = source()
if self._cache and status_code == 200:
self.save_to_cache(key, data)
else:
self.logger.warn(
'Not saving %s to cache. Non 200 response.' % key)
return data
except Exception, ex:
print ex
0
Example 48
def url2path(url):
return op.sep.join(url.split('/'))
0
Example 49
def __enter__(self):
# Mount the device selected by mount_ok, if possible
self.temp_dir = tempfile.mkdtemp()
if self.submount:
mount_dir = os.path.normpath(os.path.sep.join([self.temp_dir, self.submount]))
os.makedirs(mount_dir, mode=0o755, exist_ok=True)
else:
mount_dir = self.temp_dir
for dev, size in self.loop_devices:
try:
mount( "/dev/mapper/"+dev, mnt=mount_dir )
if self.mount_ok(mount_dir):
self.mount_dir = mount_dir
self.mount_dev = dev
self.mount_size = size
break
umount( mount_dir )
except CalledProcessError:
logger.debug(traceback.format_exc())
if self.mount_dir:
logger.info("Partition mounted on %s size=%s", self.mount_dir, self.mount_size)
else:
logger.debug("Unable to mount anything from %s", self.disk_img)
os.rmdir(self.temp_dir)
self.temp_dir = None
return self
0
Example 50
def __getCachedTokenPath(self):
"""Return the directory holding the app data."""
return os.path.expanduser(os.path.sep.join(["~", ".flickr", \
self.apiKey]))