Here are the examples of the python api os.path.sep.join taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
91 Examples
0
Example 51
Project: SimpleCV Source File: flickrapi2.py
def __getCachedTokenFilename(self):
"""Return the full pathname of the cached token file."""
return os.path.sep.join([self.__getCachedTokenPath(), "auth.xml"])
0
Example 52
def _GetPrefix(self, hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
0
Example 53
Project: gup Source File: gupfile.py
def _up_path(n):
return os.path.sep.join(itertools.repeat('..',n))
0
Example 54
def _GetPrefix(self,hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
0
Example 55
def _check_file_exists(self, filename, parent_id=None):
"""
Check if a file with specific parameters exists in Google Drive.
:param filename: File or folder to search
:type filename: string
:param parent_id: Unique identifier for its parent (folder)
:type parent_id: string
:param mime_type: Mime Type for the file to search
:type mime_type: string
:returns: dict containing file / folder data if exists or None if does not exists
"""
split_filename = self._split_path(filename)
if len(split_filename) > 1:
# This is an absolute path with folder inside
# First check if the first element exists as a folder
# If so call the method recursively with next portion of path
# Otherwise the path does not exists hence the file does not exists
q = "mimeType = '{0}' and title = '{1}'".format(self._GOOGLE_DRIVE_FOLDER_MIMETYPE_,
split_filename[0])
if parent_id is not None:
q = "{0} and '{1}' in parents".format(q, parent_id)
max_results = 1000 # Max value admitted from google drive
folders = self._drive_service.files().list(q=q, maxResults=max_results).execute()
for folder in folders["items"]:
if folder["title"] == split_filename[0]:
# Assuming every folder has a single parent
return self._check_file_exists(os.path.sep.join(split_filename[1:]), folder["id"])
return None
else:
# This is a file, checking if exists
q = "title = '{0}'".format(split_filename[0])
if parent_id is not None:
q = "{0} and '{1}' in parents".format(q, parent_id)
max_results = 1000 # Max value admitted from google drive
file_list = self._drive_service.files().list(q=q, maxResults=max_results).execute()
if len(file_list["items"]) == 0:
q = "" if parent_id is None else "'{0}' in parents".format(parent_id)
file_list = self._drive_service.files().list(q=q, maxResults=max_results).execute()
for element in file_list["items"]:
if split_filename[0] in element["title"]:
return element
return None
else:
return file_list["items"][0]
0
Example 56
def _save(self, name, content):
folder_path = os.path.sep.join(self._split_path(name)[:-1])
folder_data = self._get_or_create_folder(folder_path)
parent_id = None if folder_data is None else folder_data['id']
# Now we had created (or obtained) folder on GDrive
# Upload the file
fd = BytesIO(content.file.read())
mime_type = mimetypes.guess_type(name)
if mime_type[0] is None:
mime_type = self._UNKNOWN_MIMETYPE_
media_body = MediaIoBaseUpload(fd, mime_type, resumable=True)
body = {
'title': name,
'mimeType': mime_type
}
# Set the parent folder.
if parent_id:
body['parents'] = [{'id': parent_id}]
file_data = self._drive_service.files().insert(
body=body,
media_body=media_body).execute()
# Setting up public permission
public_permission = {
'type': 'anyone',
'role': 'reader'
}
self._drive_service.permissions().insert(fileId=file_data["id"], body=public_permission).execute()
return file_data[u'originalFilename']
0
Example 57
def clean(digraph):
# clean solos
isolates = networkx.algorithms.isolate.isolates(digraph)
digraph.remove_nodes_from(isolates)
# clean solos clusters
graph = networkx.Graph(digraph) # undirected
subgraphs = networkx.algorithms.components.connected.connected_component_subgraphs(
graph)
isolates1 = set(utils.flatten(g.nodes() for g in subgraphs if len(g) == 1)) # self connected
isolates2 = set(utils.flatten(g.nodes() for g in subgraphs if len(g) == 2))
isolates3 = set(utils.flatten(g.nodes() for g in subgraphs if len(g) == 3))
digraph.remove_nodes_from(isolates1)
digraph.remove_nodes_from(isolates2)
digraph.remove_nodes_from(isolates3)
#
#graph = digraph.to_undirected()
#subgraphs = networkx.algorithms.components.connected.connected_component_subgraphs(graph)
subgraphs = [g for g in subgraphs if len(g) > 3]
isolatedGraphs = subgraphs[1:100]
# group by nodes number
isoDict = defaultdict(list)
[isoDict[len(g)].append(g) for g in isolatedGraphs]
# test isomorphism
isoGraphs = dict()
for numNodes, graphs in isoDict.items():
numgraphs = len(graphs)
if numgraphs == 1:
continue
isoGraph = networkx.Graph()
# quick find isomorphisms
todo = set(graphs)
for i, g1 in enumerate(graphs):
for g2 in graphs[i + 1:]:
if networkx.is_isomorphic(g1, g2):
print 'numNodes:%d graphs %d, %d are isomorphic' % (numNodes, i, i + 1)
isoGraph.add_edge(g1, g2, {'isomorphic': True})
if g2 in todo:
todo.remove(g2)
if g1 in todo:
todo.remove(g1)
# we can stop here, chain comparaison will work between g2
# and g3
break
if len(isoGraph) > 0:
isoGraphs[numNodes] = isoGraph
# draw the isomorphisms
for i, item in enumerate(isoGraphs.items()):
num, g = item
# networkx.draw(g)
for rg in g.nodes():
networkx.draw(rg)
fname = os.path.sep.join(
[config.imgCacheDir, 'isomorph_subgraphs_%d.png' % (num)])
plt.savefig(fname)
plt.clf()
# need to use gephi-like for rendering nicely on the same pic
bigGraph = networkx.DiGraph()
bigGraph.add_edges_from(digraph.edges(subgraphs[0].nodes()))
stack_addrs = utils.int_array_cache(
config.get_cache_filename(config.CACHE_STACK_VALUES, ctx.dumpname, ctx._heap_addr))
stack_addrs_txt = set(['%x' % (addr)
for addr in stack_addrs]) # new, no long
stacknodes = list(set(bigGraph.nodes()) & stack_addrs_txt)
print 'stacknodes left', len(stacknodes)
orig = list(set(graph.nodes()) & stack_addrs_txt)
print 'stacknodes orig', len(orig)
# identify strongly referenced allocators
degreesList = [(bigGraph.in_degree(node), node)
for node in bigGraph.nodes()]
degreesList.sort(reverse=True)
0
Example 58
Project: python-haystack Source File: graph.py
def printImportant(ctx, digraph, degreesList, ind, bigGraph):
nb, saddr = degreesList[ind]
addr = int(saddr, 16)
s1 = ctx.structures[addr] # TODO FIXME RAISES
# s1 = s1._load() #structure.cacheLoad(ctx, int(saddr,16))
s1.decodeFields()
print s1.to_string()
# strip the node from its predecessors, they are numerously too numerous
impDiGraph = networkx.DiGraph()
root = '%d nodes' % (nb)
impDiGraph.add_edge(root, saddr)
depthSubgraph(bigGraph, impDiGraph, [saddr], 2)
print 'important struct with %d structs pointing to it, %d pointerFields' % (digraph.in_degree(saddr), digraph.out_degree(saddr))
# print 'important struct with %d structs pointing to it, %d
# pointerFields'%(impDiGraph.in_degree(saddr),
# impDiGraph.out_degree(saddr))
fname = os.path.sep.join(
[config.imgCacheDir, 'important_%s.png' % (saddr)])
networkx.draw(impDiGraph)
plt.savefig(fname)
plt.clf()
# check for children with identical sig
for node in impDiGraph.successors(saddr):
st = ctx.structures[int(node, 16)]
st.decodeFields()
# FIXME rework, usage of obselete function
st.resolvePointers()
# st.pointerResolved=True
# st._aggregateFields()
print node, st.get_signature(text=True)
# clean and print
# s1._aggregateFields()
impDiGraph.remove_node(root)
save_graph_headers(ctx, impDiGraph, '%s.subdigraph.py' % (saddr))
return s1
0
Example 59
def isPersisted(self):
outdir = config.get_cache_filename(
config.CACHE_SIGNATURE_GROUPS_DIR,
self._context.dumpname)
return os.access(os.path.sep.join([outdir, self._name]), os.F_OK)
0
Example 60
Project: python-haystack Source File: signature.py
def cacheSizes(self):
"""Find the number of different sizes, and creates that much numpyarray"""
# if not os.access
outdir = config.get_cache_filename(
config.CACHE_SIGNATURE_SIZES_DIR,
self._context.dumpname)
config.create_cache_folder(outdir)
#
sizes = map(int, set(self._context._malloc_sizes))
arrays = dict([(s, []) for s in sizes])
# sort all addr in all sizes..
[arrays[self._context._malloc_sizes[i]].append(
long(addr)) for i, addr in enumerate(self._context._malloc_addresses)]
# saving all sizes dictionary in files...
for size, lst in arrays.items():
fout = os.path.sep.join([outdir, 'size.%0.4x' % (size)])
arrays[size] = utils.int_array_save(fout, lst)
# saved all sizes dictionaries.
# tag it as done
file(
os.path.sep.join([outdir, config.CACHE_SIGNATURE_SIZES_DIR_TAG]), 'w')
self._sizes = arrays
return
0
Example 61
Project: ultimate-smash-friends Source File: entity.py
def draw(self, coords, zoom, surface, debug_params=dict()):
"""
Draw the entity on the surface(i.e: the screen), applying coordinates
offsets and zoom scaling as necessary, implementation depends on the
definition of the global "SIZE", as a 2 elements list of in integers,
containing respective height and width of the screen.
coords is a tuple containing the current position of the camera, zoom is
the current zoom of the camera.
"""
# Draw a point on the map at the entity position.
if not self.present:
return
if self.visible:
if not self.reversed:
place = (
self.rect[0] - self.hardshape[0],
self.rect[1] - self.hardshape[1])
else:
place = (
self.rect[0],
self.rect[1] - self.hardshape[1])
real_coords = (
int(place[0] * zoom) + coords[0],
int(place[1] * zoom) + coords[1])
self._draw_debug(real_coords, zoom, surface, debug_params)
if self.entity_skin.animation.trails and self.old_pos:
for i, (x, y) in enumerate(reversed(self.old_pos)):
img = self.entity_skin.animation.trails[
len(self.old_pos) - (i + 1)]
surface.blit(
loaders.image(
img,
reversed=self.reversed,
zoom=zoom)[0],
(
int(x * zoom) + coords[0] - (
not self.reversed and self.hardshape[0] or 0),
int(y * zoom) + coords[1] - self.hardshape[1]))
skin_image = loaders.image(
self.entity_skin.animation.image,
reversed=self.reversed,
lighten=self.lighten,
zoom=zoom)
surface.blit(
skin_image[0],
real_coords)
if self.shield['on']:
image = loaders.image(
os.path.sep.join(
(CONFIG.system_path, 'misc', 'shield.png')),
zoom=zoom * self.shield['power'] * 3)
shield_coords = (
coords[0] + int(
self.rect[0]
+ self.entity_skin.shield_center[0]
- .5 * image[1][2]) * zoom,
coords[1] + int(
self.rect[1]
+ self.entity_skin.shield_center[1]
- .5 * image[1][3]) * zoom)
surface.blit(image[0], shield_coords)
0
Example 62
Project: ultimate-smash-friends Source File: cv.py
def main(charnames):
pygame.init()
window_size = (200*len(charnames), 400)
screen = pygame.display.set_mode(window_size)
skins, wds = load_entities(charnames)
anim = 0
display_hardshape = True
speed = 1.0
font = pygame.font.Font(pygame.font.get_default_font(), 12)
pause = False
shield = False
frame = 0
mouse_click = False
mouse_xy = [0, 0]
bottom_center_hardshape = [window_size[0]/2, window_size[1]*2/3]
last_time = time.time()
while True:
# frame limiter
while time.time() < last_time + 0.05:
time.sleep(0.05)
last_time = time.time()
# get key events
pygame.event.pump()
if inotifyx:
for i, w in enumerate(wds):
if inotifyx.get_events(fd, 0):
print "events"
for i in range(3):
try:
skins = load_entities(charnames)[0]
break
except:
time.sleep(0.5)
else:
print "doh!"
if mouse_click and not pygame.mouse.get_pressed()[0]:
print "click released"
print """
<agressiv-point
coords="%s,%s"
vector="%s,%s"
></agressiv-point>
""" % (
(mouse_xy[0] - image_position[0]) % 200,
mouse_xy[1] - image_position[1],
2 * (pygame.mouse.get_pos()[0] - mouse_xy[0]),
2 * (pygame.mouse.get_pos()[1] - mouse_xy[1])
)
print """
<vector
time="%s"
vector="%s,%s"
></agressiv-point>
""" % (
img.time,
2 * (pygame.mouse.get_pos()[0] - mouse_xy[0]),
2 * (pygame.mouse.get_pos()[1] - mouse_xy[1])
)
mouse_click = False
if not mouse_click and pygame.mouse.get_pressed()[0]:
print "click pressed"
mouse_xy = pygame.mouse.get_pos()
mouse_click = True
for event in pygame.event.get(
[KEYDOWN, KEYUP]
):
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
return
elif event.key == K_F5:
entity_skins = load_entities(charnames)[0]
print "reloaded"
elif event.key == K_s:
shield = not shield
elif event.key == K_p:
if pause:
print "normal mode"
else:
print "pause: frame mode, chose frame with ← and →"
pause = not pause
elif event.key == K_RIGHT:
frame +=1
elif event.key == K_LEFT:
frame -=1
elif event.key == K_UP:
anim +=1
elif event.key == K_DOWN:
anim -=1
elif event.key == K_SPACE:
display_hardshape = not display_hardshape
elif event.key == K_PLUS or event.key == K_KP_PLUS:
speed *= 2
elif event.key == K_MINUS or event.key == K_KP_MINUS:
speed /= 2
pygame.event.clear( [MOUSEMOTION, MOUSEBUTTONUP, MOUSEBUTTONDOWN] )
screen.fill(pygame.Color('green'))
for i, skin in enumerate(skins):
animation = animations[anim % len(animations)]
# update screen
if animation in skin.animations:
if not pause:
try:
img = filter(
lambda f : f.time <= (
(time.time()*1000.0*speed) %
skin.animations[animation].duration
),
skin.animations[animation].frames
)[-1]
except ZeroDivisionError:
print "error: duration of 0 in", charnames[i], "in animation", animation
continue
bottom_center_hardshape[0] = (window_size[0]/(len(charnames)
* 2)) + (int(time.time() * CONFIG.general.WALKSPEED) % 200 if
"walk" in animation else 0)
#if "walk" in animation:
#bottom_center_hardshape[0] = int(
#time.time() * CONFIG.general.WALKSPEED']
#) % window_size[0]
#else:
#bottom_center_hardshape[0] = window_size[0]/(len(charnames) * 2)
else:
img = skin.animations[animation].frames[frame % len(skin.animations[animation].frames)]
else:
img = Placeholder()
# update the image_position of the up-left corner of image, so that the
# bottom-middle of the hardhape never moves (as in the game)
image_position = (
(bottom_center_hardshape[0] - img.hardshape[0] -
img.hardshape[2]/2 + 200 * i) % window_size[0],
bottom_center_hardshape[1] - img.hardshape[1] - img.hardshape[3]
)
if display_hardshape:
screen.fill(
pygame.Color('grey'),
pygame.Rect((
image_position[0],
image_position[1],
image(img.image)[1][2],
image(img.image)[1][3]
))
)
screen.fill(
pygame.Color('blue'),
pygame.Rect((
image_position[0] + img.hardshape[0],
image_position[1] + img.hardshape[1],
img.hardshape[2],
img.hardshape[3]
))
)
screen.blit(image(img.image)[0], image_position)
screen.blit(
font.render( charnames[i], True, pygame.Color('red')),
(10 + 200 * i,10))
screen.blit(
font.render(str(anim)+': '+animation, True, pygame.Color('red')),
(10 + 200 * i,20))
screen.blit(
font.render(str(img.time), True, pygame.Color('red')),
(10 + 200 * i,30))
if shield:
pygame.draw.circle(
screen,
pygame.Color('red'),
(
image_position[0] + img.hardshape[0] +
skin.shield_center[0] - 100 + 200 * i,
image_position[1] + img.hardshape[1] + skin.shield_center[1]
),
10
)
image_shield = image(
os.path.sep.join(('..','data','misc','shield.png')),
zoom=3
)
screen.blit(
image_shield[0],
(
image_position[0]
+ skin.shield_center[0]
- .5 * image_shield[1][2]
,
image_position[1]
+ skin.shield_center[1]
- .5 * image_shield[1][3]
)
)
for i in img.agressivpoints:
pygame.draw.ellipse(
screen,
pygame.Color('red'),
pygame.Rect(
image_position[0]+i[0][0]-1, image_position[1]+i[0][1]-1, 2, 2
)
)
pygame.draw.line(
screen,
pygame.Color('red'),
(
image_position[0]+i[0][0],
image_position[1]+i[0][1],
),
(
image_position[0]+i[0][0]+i[1][0]/2,
image_position[1]+i[0][1]+i[1][1]/2,
),
1
)
if mouse_click:
pygame.draw.line(
screen,
pygame.color.Color("red"),
mouse_xy,
pygame.mouse.get_pos(),
1
)
pygame.display.flip()
for wd in wds:
inotifyx.rm_watch(wd)
0
Example 63
Project: stash Source File: git-branch.py
def create_branch(new_branch, base_rev, force=False ,no_track=False ):
"""Try creating a new branch which tracks the given remote
if such a branch does not exist then branch off a local branch
"""
repo=_get_repo()
# Already exists
if new_branch in repo.branches:
if not force:
raise GitError("branch %s already exists\n use --force to overwrite anyway" % new_branch)
# fork with new sha
new_ref = repo._format_ref_branch(new_branch)
base_sha=find_revision_sha(repo,base_rev)
repo.repo.refs[new_ref] = base_sha
#handle tracking, only if this was a remote
tracking,remote_branch =( ['origin']+base_rev.split('/'))[-2:] #branch-> origin/branch. remote/branch stays as is
qualified_remote_branch=os.path.sep.join([tracking,remote_branch])
if qualified_remote_branch in repo.remote_branches and not base_rev in repo.branches:
if not no_track:
add_tracking(new_branch,tracking,remote_branch)
else:
remove_tracking(new_branch)
#todo reflog
return new_ref
0
Example 64
Project: markdoc Source File: builder.py
def listing_context(self, directory):
"""
Generate the template context for a directory listing.
This method accepts a relative path, with the base assumed to be the
HTML root. This means listings must be generated after the wiki is
built, allowing them to list static media too.
Directories should always be '/'-delimited when specified, since it is
assumed that they are URL paths, not filesystem paths.
For information on what the produced context will look like, consult the
`listing` doctest.
"""
# Ensure the directory name ends with '/'.
directory = directory.strip('/')
# Resolve to filesystem paths.
fs_rel_dir = p.sep.join(directory.split('/'))
fs_abs_dir = p.join(self.config.html_dir, fs_rel_dir)
skip_files = set([self.config['listing-filename'], 'index.html'])
sub_directories, pages, files = [], [], []
for basename in os.listdir(fs_abs_dir):
fs_abs_path = p.join(fs_abs_dir, basename)
file_dict = {
'basename': basename,
'href': directory + '/' + basename}
if not file_dict['href'].startswith('/'):
file_dict['href'] = '/' + file_dict['href']
if p.isdir(fs_abs_path):
file_dict['href'] += '/'
sub_directories.append(file_dict)
else:
if (basename in skip_files or basename.startswith('.') or
basename.startswith('_')):
continue
file_dict['slug'] = p.splitext(basename)[0]
file_dict['size'] = p.getsize(fs_abs_path)
file_dict['humansize'] = humansize(file_dict['size'])
if p.splitext(basename)[1] == (p.extsep + 'html'):
# Get the title from the file.
contents = read_from(fs_abs_path)
file_dict['title'] = get_title(file_dict['slug'], contents)
# Remove .html from the end of the href.
file_dict['href'] = p.splitext(file_dict['href'])[0]
pages.append(file_dict)
else:
files.append(file_dict)
sub_directories.sort(key=lambda directory: directory['basename'])
pages.sort(key=lambda page: page['title'])
files.sort(key=lambda file_: file_['basename'])
return {
'directory': directory,
'sub_directories': sub_directories,
'pages': pages,
'files': files,
'make_relative': lambda href: make_relative(directory, href),
}
0
Example 65
Project: python-ant-downloader Source File: tcx.py
def export_tcx(device_sn, raw_file_name, output_dir):
"""
Given a garmin raw packet dump, tcx to specified output directory.
"""
with open(raw_file_name) as file:
result = []
host = garmin.MockHost(file.read())
host.device_id = device_sn
device = garmin.Device(host)
run_pkts = device.get_runs()
runs = garmin.extract_runs(device, run_pkts)
for run in runs:
tcx_name = time.strftime("%Y%m%d-%H%M%S.tcx", run.time.gmtime)
tcx_full_path = os.path.sep.join([output_dir, tcx_name])
_log.info("tcx: writing %s -> %s.", os.path.basename(raw_file_name), tcx_full_path)
with open(tcx_full_path, "w") as file:
doc = create_docuement(device, [run])
file.write(etree.tostring(doc, pretty_print=True, xml_declaration=True, encoding="UTF-8"))
result.append(tcx_full_path)
return result
0
Example 66
Project: google-drive-recursive-ownership Source File: transfer.py
def grant_ownership(service, drive_item, prefix, permission_id, show_already_owned):
full_path = os.path.join(os.path.sep.join(prefix), drive_item['title']).encode('utf-8', 'replace')
#pprint.pprint(drive_item)
current_user_owns = False
for owner in drive_item['owners']:
if owner['permissionId'] == permission_id:
if show_already_owned:
print('Item {} already has the right owner.'.format(full_path))
return
elif owner['isAuthenticatedUser']:
current_user_owns = True
print('Item {} needs ownership granted.'.format(full_path))
if not current_user_owns:
print(' But, current user does not own the item.'.format(full_path))
return
try:
permission = service.permissions().get(fileId=drive_item['id'], permissionId=permission_id).execute()
permission['role'] = 'owner'
print(' Upgrading existing permissions to ownership.')
return service.permissions().update(fileId=drive_item['id'], permissionId=permission_id, body=permission, transferOwnership=True).execute()
except apiclient.errors.HttpError as e:
if e.resp.status != 404:
print('An error occurred updating ownership permissions: {}'.format(e))
return
print(' Creating new ownership permissions.')
permission = {'role': 'owner',
'type': 'user',
'id': permission_id}
try:
service.permissions().insert(fileId=drive_item['id'], body=permission, emailMessage='Automated recursive transfer of ownership.').execute()
except apiclient.errors.HttpError as e:
print('An error occurred inserting ownership permissions: {}'.format(e))
0
Example 67
Project: cherrymusic Source File: httphandler.py
def trans(self, newformat, *path, **params):
''' Transcodes the track given as ``path`` into ``newformat``.
Streams the response of the corresponding
``audiotranscode.AudioTranscode().transcodeStream()`` call.
params:
bitrate: int for kbps. None or < 1 for default
'''
if not self.isAuthorized():
raise cherrypy.HTTPRedirect(self.getBaseUrl(), 302)
cherrypy.session.release_lock()
if cherry.config['media.transcode'] and path:
# bitrate
bitrate = params.pop('bitrate', None) or None # catch empty strings
if bitrate:
try:
bitrate = max(0, int(bitrate)) or None # None if < 1
except (TypeError, ValueError):
raise cherrypy.HTTPError(400, "Bad query: "
"bitrate ({0!r}) must be an integer".format(str(bitrate)))
# path
path = os.path.sep.join(path)
if sys.version_info < (3, 0): # workaround for #327 (cherrypy issue)
path = path.decode('utf-8') # make it work with non-ascii
else:
path = codecs.decode(codecs.encode(path, 'latin1'), 'utf-8')
fullpath = os.path.join(cherry.config['media.basedir'], path)
starttime = int(params.pop('starttime', 0))
transcoder = audiotranscode.AudioTranscode()
mimetype = audiotranscode.mime_type(newformat)
cherrypy.response.headers["Content-Type"] = mimetype
try:
return transcoder.transcode_stream(fullpath, newformat,
bitrate=bitrate, starttime=starttime)
except (audiotranscode.TranscodeError, IOError) as e:
raise cherrypy.HTTPError(404, e.value)
0
Example 68
Project: gitsome Source File: environ.py
@ensure_hg
def get_hg_branch(cwd=None, root=None):
branch = None
active_bookmark = None
if root is not None:
branch_path = os.path.sep.join([root, '.hg', 'branch'])
bookmark_path = os.path.sep.join([root, '.hg', 'bookmarks.current'])
if os.path.exists(branch_path):
with open(branch_path, 'r') as branch_file:
branch = branch_file.read()
else:
branch = call_hg_command(['branch'], cwd)
if os.path.exists(bookmark_path):
with open(bookmark_path, 'r') as bookmark_file:
active_bookmark = bookmark_file.read()
if active_bookmark is not None:
return "{0}, {1}".format(
*(b.strip(os.linesep) for b in (branch, active_bookmark)))
return branch.strip(os.linesep) if branch else None
0
Example 69
def metric_to_filepath(metric, data_dir):
return os.path.sep.join([data_dir] + metric.split('.')) + '.wsp'
0
Example 70
def __init__(self, cache_dir):
self.cache_dir = path.sep.join( [ cache_dir, ".sitemap_cache" ] )
makedirs(self.cache_dir)
self.smpat = re.compile(r"sitemap_part[0-9]+.xml")
0
Example 71
def fetch(self, host, resource, query = "",
port = 80, method = "GET",
headers = { }):
# All the caching logic goes here
logging.debug("Trying to fetch: %s:%d%s" % (host, port, resource))
m = self.smpat.search(resource)
cached_file_path = None
# If the resource matches something that we are capable of cache
# then go ahead
if m:
cached_file_path = path.sep.join( [ self.cache_dir, m.group(0) ] )
# The file containing the cached data MUST exist and be non-empty
if cached_file_path and path.exists(cached_file_path) and path.getsize(cached_file_path) > 0:
fh = open(cached_file_path, "r")
wr = WrappedResponse(fh, 200)
logging.debug("Returning cached file for: %s:%d%s" % (host, port, resource))
return wr
logging.debug("Fetching (%s:%d%s) from the web" % (host, port, resource))
request = (host, port, method, resource, query, headers)
response = make_HTTP_request(*request)
wr = WrappedResponse(response)
# We arrived here because we couldn't locate a valid cache file for
# the requested resource. Try to cache it if we can
if cached_file_path:
sitemap_contents = wr.read()
fh = open(cached_file_path, "w")
fh.write(sitemap_contents)
fh.close()
return wr
0
Example 72
Project: zeroclickinfo-longtail Source File: incremental_update.py
def is_lyrics_present_for_URL(self, url):
logging.debug("is_lyrics_present_for_URL(%s)" % url)
if not self.prepare_master_for_querying:
self.prepare_master_for_querying()
self.prepared_master = True
c = self.master_conn.cursor()
q = r"SELECT COUNT(*) FROM LYRICS WHERE url = ?"
res = c.execute(q, (url, ))
if res.fetchone()[0] > 0:
logging.debug("TRUE1")
return True
else:
m = self.url_pat.search(url)
staging_file = path.sep.join([ self.staging_directory, m.group(1) ] + \
get_nonempty_url_parts(m.group(2)))
# print "Staging file: " + staging_file
if path.exists(staging_file) and path.getsize(staging_file) > 0:
logging.debug("TRUE2")
return True
else:
logging.debug("FALSE")
return False
0
Example 73
Project: zeroclickinfo-longtail Source File: incremental_update.py
def download_and_save_URL(self, url):
m = self.url_pat.search(url)
if not m:
# ERROR: did not match!!
logging.warn("URL (%s) did not match pattern" % (url))
return
request = (m.group(1), 80, "GET", m.group(2), "", self.headers)
response = make_HTTP_request(*request)
if response.status != 200:
# ERROR
logging.error("Error fetching lyrics for URL: %s" % url)
return
components = [self.staging_directory, m.group(1)] + \
get_nonempty_url_parts(m.group(2))
dir_path = path.sep.join(components[:-1])
file_path = path.sep.join(components)
makedirs(dir_path)
fh = None
try:
docuement = response.read()
fh = open(file_path, "w")
fh.write(docuement)
except Exception, ex:
logging.debug("ERROR (%s) reading response for URL: %s" % (str(ex), m.group(2)))
if fh:
fh.close()
0
Example 74
def mk_path( srcfile ):
return os.path.sep.join( [DATADIR, srcfile] )
0
Example 75
Project: frappe Source File: test_runner.py
def _add_test(app, path, filename, verbose, test_suite=None):
import os
if os.path.sep.join(["doctype", "doctype", "boilerplate"]) in path:
# in /doctype/doctype/boilerplate/
return
app_path = frappe.get_pymodule_path(app)
relative_path = os.path.relpath(path, app_path)
if relative_path=='.':
module_name = app
else:
module_name = '{app}.{relative_path}.{module_name}'.format(app=app,
relative_path=relative_path.replace('/', '.'), module_name=filename[:-3])
module = frappe.get_module(module_name)
if getattr(module, "selenium_tests", False) and not frappe.conf.run_selenium_tests:
return
if not test_suite:
test_suite = unittest.TestSuite()
if os.path.basename(os.path.dirname(path))=="doctype":
txt_file = os.path.join(path, filename[5:].replace(".py", ".json"))
with open(txt_file, 'r') as f:
doc = json.loads(f.read())
doctype = doc["name"]
make_test_records(doctype, verbose)
test_suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
0
Example 76
Project: ganeti Source File: io.py
def FindFile(name, search_path, test=os.path.exists):
"""Look for a filesystem object in a given path.
This is an abstract method to search for filesystem object (files,
dirs) under a given search path.
@type name: str
@param name: the name to look for
@type search_path: iterable of string
@param search_path: locations to start at
@type test: callable
@param test: a function taking one argument that should return True
if the a given object is valid; the default value is
os.path.exists, causing only existing files to be returned
@rtype: str or None
@return: full path to the object if found, None otherwise
"""
# validate the filename mask
if constants.EXT_PLUGIN_MASK.match(name) is None:
logging.critical("Invalid value passed for external script name: '%s'",
name)
return None
for dir_name in search_path:
# FIXME: investigate switch to PathJoin
item_name = os.path.sep.join([dir_name, name])
# check the user test and that we're indeed resolving to the given
# basename
if test(item_name) and os.path.basename(item_name) == name:
return item_name
return None
0
Example 77
def get_leaf(self, f):
return os.path.sep.join(split_count(f, self.width)[:self.levels])
0
Example 78
def get_full_filename(self, f):
leaf = self.get_leaf(f)
if self.compress:
return self.get_zip(leaf)
return os.path.sep.join([self.root, leaf, f])
0
Example 79
def __getitem__(self, f):
leaf = self.get_leaf(f)
if self.compress:
return self.get_zip(leaf)
return open(os.path.sep.join([self.root, leaf, f]), 'r')
0
Example 80
Project: pelican-plugins Source File: better_figures_and_images.py
def content_object_init(instance):
if instance._content is not None:
content = instance._content
soup = BeautifulSoup(content, 'html.parser')
for img in soup(['img', 'object']):
logger.debug('Better Fig. PATH: %s', instance.settings['PATH'])
if img.name == 'img':
logger.debug('Better Fig. img.src: %s', img['src'])
img_path, img_filename = path.split(img['src'])
else:
logger.debug('Better Fig. img.data: %s', img['data'])
img_path, img_filename = path.split(img['data'])
logger.debug('Better Fig. img_path: %s', img_path)
logger.debug('Better Fig. img_fname: %s', img_filename)
# Strip off {filename}, |filename| or /static
if img_path.startswith(('{filename}', '|filename|')):
img_path = img_path[10:]
elif img_path.startswith('/static'):
img_path = img_path[7:]
elif img_path.startswith('data:image'):
# Image is encoded in-line (not a file).
continue
else:
logger.warning('Better Fig. Error: img_path should start with either {filename}, |filename| or /static')
# search src path list
# 1. Build the source image filename from PATH
# 2. Build the source image filename from STATIC_PATHS
# if img_path start with '/', remove it.
img_path = os.path.sep.join([el for el in img_path.split("/") if len(el) > 0])
# style: {filename}/static/foo/bar.png
src = os.path.join(instance.settings['PATH'], img_path, img_filename)
src_candidates = [src]
# style: {filename}../static/foo/bar.png
src_candidates += [os.path.join(instance.settings['PATH'], static_path, img_path, img_filename) for static_path in instance.settings['STATIC_PATHS']]
src_candidates = [f for f in src_candidates if path.isfile(f) and access(f, R_OK)]
if not src_candidates:
logger.error('Better Fig. Error: image not found: %s', src)
logger.debug('Better Fig. Skip src: %s', img_path + '/' + img_filename)
continue
src = src_candidates[0]
logger.debug('Better Fig. src: %s', src)
# Open the source image and query dimensions; build style string
try:
if img.name == 'img':
im = Image.open(src)
extra_style = 'width: {}px; height: auto;'.format(im.size[0])
else:
svg = pysvg.parser.parse(src)
extra_style = 'width: {}px; height: auto;'.format(svg.get_width())
except IOError as e:
logger.debug('Better Fig. Failed to open: %s', src)
extra_style = 'width: 100%; height: auto;'
if 'RESPONSIVE_IMAGES' in instance.settings and instance.settings['RESPONSIVE_IMAGES']:
extra_style += ' max-width: 100%;'
if img.get('style'):
img['style'] += extra_style
else:
img['style'] = extra_style
if img.name == 'img':
if img['alt'] == img['src']:
img['alt'] = ''
fig = img.find_parent('div', 'figure')
if fig:
if fig.get('style'):
fig['style'] += extra_style
else:
fig['style'] = extra_style
instance._content = soup.decode()
0
Example 81
Project: mythbox Source File: test_resolver.py
def test_store_When_channel_has_icon_Then_download_icon(self):
# Setup
channels = filter(lambda x: x.getIconPath(), self.db.getChannels()) # filter out channels that don't have an icon
self.assertTrue(len(channels) > 0, 'Channels with icon needed in db to run test')
downloader = MythChannelIconResolver(self.conn)
# Test - download icons for first 5 channels
for channel in channels[:min(5, len(channels))]:
if channel.getIconPath():
dest = os.path.sep.join([tempfile.gettempdir(), 'channel_' + str(channel.getChannelId()) + channel.getCallSign() + str(time.time()) + '.png'])
downloader.store(channel, dest)
# Verify
log.debug('Downloaded %s to %s' % (channel.getIconPath(), dest))
self.assertTrue(os.path.exists(dest))
self.assertTrue(os.path.isfile(dest))
self.assertTrue(os.path.getsize(dest) > 0)
# Cleanup
os.remove(dest)
0
Example 82
def __normalize_path (self, path):
return os.path.join(os.path.sep.join(path.split('/')))
0
Example 83
Project: MailingListStats Source File: test_archives.py
def test_mailing_list_6(self):
'''Local directory in .mlstats with no trailing slash'''
base = os.path.sep.join([COMPRESSED_DIR,
'mlstats.org/pipermail/mlstats-list'])
target = base.lstrip(os.path.sep)
ml = MailingList(base)
expected = {
'location': base.rstrip(os.path.sep),
'alias': 'mlstats-list',
'compressed_dir': os.path.join(COMPRESSED_DIR, target),
'is_local': True,
'is_remote': False,
}
result = {
'location': ml.location,
'alias': ml.alias,
'compressed_dir': ml.compressed_dir,
'is_local': ml.is_local(),
'is_remote': ml.is_remote(),
}
self.check_single_dict(expected, result)
0
Example 84
Project: MailingListStats Source File: test_archives.py
def test_mailing_list_7(self):
'''Local directory in .mlstats with trailing slash'''
base = os.path.sep.join([COMPRESSED_DIR,
'mlstats.org/pipermail/mlstats-list/'])
ml = MailingList(base)
target = base.strip(os.path.sep)
expected = {
'location': base.rstrip(os.path.sep),
'alias': 'mlstats-list',
'compressed_dir': os.path.join(COMPRESSED_DIR, target),
'is_local': True,
'is_remote': False,
}
result = {
'location': ml.location,
'alias': ml.alias,
'compressed_dir': ml.compressed_dir,
'is_local': ml.is_local(),
'is_remote': ml.is_remote(),
}
self.check_single_dict(expected, result)
0
Example 85
def _GetPrefix(self, hashed_key):
return os.path.sep.join(hashed_key[0:FileCache.DEPTH])
0
Example 86
def native(path):
""" Convert slash path to native """
path = _os.path.sep.join(path.split('/'))
return _os.path.normpath(_os.path.join(cwd, path))
0
Example 87
Project: IPod-Shuffle-4g Source File: ipod-shuffle-4g.py
def ipod_to_path(self, ipodname):
return os.path.abspath(os.path.join(self.base, os.path.sep.join(ipodname.split("/"))))
0
Example 88
def start(dir, diff):
global base_dir
base_dir = dir
if base_dir:
base_dir = base_dir.strip('/\\ ').replace('/', '\\')
base_dir = ''.join([path.sep.join(base_dir.split('\\')), path.sep])
picdir = fullpath('p')
if not path.exists(picdir):
os.mkdir(picdir)
fp1 = fullpath('digest')
if diff != 'e':
fp2 = fullpath('vocabulary.txt.part')
fp3 = fullpath('failed.txt')
if path.exists(fp1) and path.exists(fp2) and path.exists(fp3):
print "Continue last failed"
mdict = json.loads(readdata('digest'), object_hook=to_worddata)
fetchdata_and_make_mdx(mdict, 'failed.txt', '.part')
elif not path.exists(fp1):
print "New session started"
mdict = OrderedDict()
fetchdata_and_make_mdx(mdict, 'wordlist.txt')
else:
if not path.exists(fullpath('usages.txt')) and path.exists(fp1) and path.exists(fullpath('vocabulary.txt')):
mdict = json.loads(readdata('digest'), object_hook=to_worddata)
if path.exists(fullpath('usages.txt.part') and fullpath('failedusg.txt')):
print "Downloading usages, continue last failed"
fetch_usg(mdict, 'failedusg.txt', '.part')
else:
print "Downloading usages, new session started"
fetch_usg(mdict, 'wordlist.txt')
0
Example 89
Project: parallel-ssh Source File: test_pssh_client.py
def test_pssh_copy_file(self):
"""Test parallel copy file"""
test_file_data = 'test'
local_filename = 'test_file'
remote_test_dir, remote_filename = 'remote_test_dir', 'test_file_copy'
remote_filename = os.path.sep.join([remote_test_dir, remote_filename])
test_file = open(local_filename, 'w')
test_file.writelines([test_file_data + os.linesep])
test_file.close()
client = ParallelSSHClient([self.host], port=self.listen_port,
pkey=self.user_key)
cmds = client.copy_file(local_filename, remote_filename)
cmds[0].get()
self.assertTrue(os.path.isdir(remote_test_dir),
msg="SFTP create remote directory failed")
self.assertTrue(os.path.isfile(remote_filename),
msg="SFTP copy failed")
for filepath in [local_filename, remote_filename]:
os.unlink(filepath)
del client
0
Example 90
Project: parallel-ssh Source File: test_ssh_client.py
def test_ssh_client_mkdir_recursive_abspath(self):
"""Test SFTP mkdir of SSHClient with absolute path
Absolute SFTP paths resolve under the users' home directory,
not the root filesystem
"""
base_path = 'tmp'
remote_dir = os.path.sep.join([base_path,
'remote_test_dir2',
'remote_test_dir3'])
try:
shutil.rmtree(base_path)
except OSError:
pass
client = SSHClient(self.host, port=self.listen_port,
pkey=self.user_key)
client.mkdir(client._make_sftp(), '/' + remote_dir)
self.assertTrue(os.path.isdir(remote_dir),
msg="SFTP recursive mkdir failed")
shutil.rmtree(base_path)
del client
0
Example 91
Project: parallel-ssh Source File: test_ssh_client.py
def test_ssh_client_sftp(self):
"""Test SFTP features of SSHClient. Copy local filename to server,
check that data in both files is the same, make new directory on
server, remove files and directory."""
test_file_data = 'test'
local_filename = 'test_file'
remote_test_dir, remote_filename = 'remote_test_dir', 'test_file_copy'
remote_filename = os.path.sep.join([remote_test_dir, remote_filename])
remote_dir = 'remote_dir'
test_file = open(local_filename, 'w')
test_file.writelines([test_file_data + os.linesep])
test_file.close()
client = SSHClient(self.host, port=self.listen_port,
pkey=self.user_key)
client.copy_file(local_filename, remote_filename)
self.assertTrue(os.path.isdir(remote_test_dir),
msg="SFTP create remote directory failed")
self.assertTrue(os.path.isfile(remote_filename),
msg="SFTP copy failed")
copied_file = open(remote_filename, 'r')
copied_file_data = copied_file.readlines()[0].strip()
copied_file.close()
self.assertEqual(test_file_data, copied_file_data,
msg="Data in destination file %s does \
not match source %s" % (copied_file_data, test_file_data))
for filepath in [local_filename, remote_filename]:
os.unlink(filepath)
client.mkdir(client._make_sftp(), remote_dir)
self.assertTrue(os.path.isdir(remote_dir))
for dirpath in [remote_dir, remote_test_dir]:
os.rmdir(dirpath)
del client