os.path.sep

Here are the examples of the python api os.path.sep taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

144 Examples 7

Example 1

Project: Qzone_Photo Source File: untils.py
Function: save_image
def saveImage(path, photo, qq, index):
    print index, photo.URL
    url = photo.URL.replace('\\', '')
    f = urllib2.urlopen(url, timeout=10)
    data = f.read()
    f.close()
    if not os.path.exists(path+os.path.sep+qq):
        os.mkdir(path+os.path.sep+qq)
    with open(path+os.path.sep+qq+os.path.sep + index + '.jpeg', "wb") as code:
        code.write(data)
        code.close()

Example 2

Project: FaST-LMM Source File: HPC.py
    def create_run_dir(self):
        username = os.environ["USERNAME"]
        localwd = os.getcwd()
        #!!make an option to specify the full remote WD. Also what is the "\\\\" case for?
        if localwd.startswith("\\\\"):
            remotewd = self.fileshare + os.path.sep + username +os.path.sep + "\\".join(localwd.split('\\')[4:])
            nodelocalwd =  "d:\scratch\escience" + os.path.sep + username +os.path.sep + "\\".join(localwd.split('\\')[4:]) #!!!const
        else:
            remotewd = self.fileshare + os.path.sep + username + os.path.splitdrive(localwd)[1]  #using '+' because 'os.path.join' isn't work with shares
            nodelocalwd = "d:\scratch\escience" + os.path.sep + username + os.path.splitdrive(localwd)[1]  #!!! const
        import datetime
        now = datetime.datetime.now()
        run_dir_rel = os.path.join("runs",util.datestamp(appendrandom=True))
        run_dir_abs = os.path.join(remotewd,run_dir_rel)
        util.create_directory_if_necessary(run_dir_abs,isfile=False)


        return remotewd, run_dir_abs, run_dir_rel, nodelocalwd

Example 3

Project: flickr-download Source File: utils.py
Function: test_separators
    def test_separators(self):
        self.assertEqual(get_full_path('moo/boo', 'foo.jpg'),
                         ''.join(['moo_boo', os.path.sep, 'foo.jpg']))
        self.assertEqual(get_full_path('moo', 'foo/faa.jpg'),
                         ''.join(['moo', os.path.sep, 'foo_faa.jpg']))
        self.assertEqual(get_full_path('moo/boo', 'foo/faa.jpg'),
                         ''.join(['moo_boo', os.path.sep, 'foo_faa.jpg']))

Example 4

Project: ro-manager Source File: ScanDirectories.py
def CollectDirectoryContents(srcDir, baseDir="", 
        listDirs=True, listFiles=False, recursive=True, appendSep=False):
    """
    Return a list of directory contents found under the source directory.
    """
    logger.debug("CollectDirectories: %s, %s, %s"%(srcDir,baseDir,str(os.path.sep)))
    dirsuffix = ""
    if appendSep: dirsuffix = os.path.sep
    collection = []
    if (baseDir != "") and (not baseDir.endswith(os.path.sep)):
        baseDir = baseDir+os.path.sep
    def CollectDir(path):
        logger.debug("- CollectDir base: %s, path: %s"%(baseDir, path))
        if listDirs: collection.append(path.replace(baseDir,"",1)+dirsuffix)
    def CollectFile(path):
        logger.debug("- CollectFile base: %s, path: %s"%(baseDir, path))
        if listFiles: collection.append(path.replace(baseDir,"",1))
    ScanDirectoriesEx(srcDir, CollectDir, CollectFile, recursive)
    return collection

Example 5

Project: buildtools-BaseTools Source File: Misc.py
Function: common_path
def CommonPath(PathList):
    Path1 = min(PathList).split(os.path.sep)
    Path2 = max(PathList).split(os.path.sep)
    for Index in xrange(min(len(Path1), len(Path2))):
        if Path1[Index] != Path2[Index]:
            return os.path.sep.join(Path1[:Index])
    return os.path.sep.join(Path1)

Example 6

Project: afl-crash-analyzer Source File: FilesizeFinder.py
    def rename_by_file_size(self, dirname, largest_to_smallest=False, keep_old_file=False, target=None):
        new_name = "%"+self.config.max_digets+"d" % 1
        for path, filename, _ in self.files_by_file_size(dirname, largest_to_smallest=largest_to_smallest):
            if target:
                new_name_path = target+os.path.sep+new_name
            else:
                #default: write it into the same directory where the input file lived 
                new_name_path = path+os.path.sep+new_name
            if keep_old_file:
                shutil.copyfile(path+os.path.sep+filename, new_name_path)
            else:
                shutil.move(path+os.path.sep+filename, new_name_path)
            new_name = "%"+self.config.max_digets+"d" % (int(new_name)+1)

Example 7

Project: attic Source File: helpers.py
Function: init
    def __init__(self, pattern):
        if pattern.endswith(os.path.sep):
            self.pattern = pattern+'*'+os.path.sep
        else:
            self.pattern = pattern+os.path.sep+'*'
        # fnmatch and re.match both cache compiled regular expressions.
        # Nevertheless, this is about 10 times faster.
        self.regex = re.compile(translate(self.pattern))

Example 8

Project: swiftly Source File: iomanager.py
    def os_path_to_client_path(self, os_path):
        """
        Converts an operating system path into a client path by
        replacing instances of os.path.sep with '/'.

        Note: If the client path contains any instances of '/'
        already, they will be replaced with '-'.
        """
        if os.path.sep == '/':
            return os_path
        return os_path.replace('/', '-').replace(os.path.sep, '/')

Example 9

Project: commotion-client Source File: fs_utils.py
Function: walklevel
def walklevel(some_dir, level=1):
    some_dir = some_dir.rstrip(os.path.sep)
    log.debug(translate("logs", "attempting to walk directory {0}".format(some_dir)))
    if not os.path.isdir(some_dir):
        raise NotADirectoryError(translate("logs", "{0} is not a directory. Can only 'walk' down through directories.".format(some_dir)))
    num_sep = some_dir.count(os.path.sep)
    for root, dirs, files in os.walk(some_dir):
        yield root, dirs, files
        num_sep_this = root.count(os.path.sep)
        if num_sep + level <= num_sep_this:
            del dirs[:]

Example 10

Project: pytest Source File: _argcomplete.py
Function: call
    def __call__(self, prefix, **kwargs):
        """only called on non option completions"""
        if os.path.sep in prefix[1:]: #
            prefix_dir = len(os.path.dirname(prefix) + os.path.sep)
        else:
            prefix_dir = 0
        completion = []
        globbed = []
        if '*' not in prefix and '?' not in prefix:
            if prefix[-1] == os.path.sep:  # we are on unix, otherwise no bash
                globbed.extend(glob(prefix + '.*'))
            prefix += '*'
        globbed.extend(glob(prefix))
        for x in sorted(globbed):
            if os.path.isdir(x):
                x += '/'
            # append stripping the prefix (like bash, not like compgen)
            completion.append(x[prefix_dir:])
        return completion

Example 11

Project: demimove Source File: helpers.py
def walklevels(path, levels=1):
    """Wrap os.walk to allow setting recursion depth."""
    path = path.rstrip(os.path.sep)
    assert os.path.isdir(path)
    num_sep = path.count(os.path.sep)
    for root, dirs, files in os.walk(path):
        yield root, dirs, files
        num_sep_this = root.count(os.path.sep)
        if num_sep + levels <= num_sep_this:
            del dirs[:]

Example 12

Project: pymt Source File: filebrowser.py
Function: on_path_change
    def _on_path_change(self, path):
        if len(path) > int(self.size[0]/8) :
            folders = path.split(os.path.sep)
            temp_label = ''
            i = -1
            max_len = int(self.size[0]/8)-8
            while len(temp_label) < max_len:
                temp_label = folders[i] + os.path.sep + temp_label
                i -= 1
            self.w_path.label = '..' + os.path.sep + temp_label
        else:
            self.w_path.label = path

Example 13

Project: simpeg Source File: GravityDriver.py
Function: init
    def __init__(self, input_file=None):
        if input_file is not None:
            self.basePath = os.path.sep.join(input_file.split(os.path.sep)[:-1])
            if len(self.basePath) > 0:
                self.basePath += os.path.sep
            self.readDriverFile(input_file.split(os.path.sep)[-1])

Example 14

Project: fabric Source File: server.py
Function: expand
def expand(path):
    """
    '/foo/bar/biz' => ('/', 'foo', 'bar', 'biz')
    'relative/path' => ('relative', 'path')
    """
    # Base case
    if path in ['', os.path.sep]:
        return [path]
    ret = PrependList()
    directory, filename = os.path.split(path)
    while directory and directory != os.path.sep:
        ret.prepend(filename)
        directory, filename = os.path.split(directory)
    ret.prepend(filename)
    # Handle absolute vs relative paths
    ret.prepend(directory if directory == os.path.sep else '')
    return ret

Example 15

Project: odo Source File: test_directory.py
Function: test_resource_directory
def test_resource_directory():
    with csvs() as path:
        r = resource(path)
        assert type(r) == Directory(CSV)
        assert r.path.rstrip(os.path.sep) == path.rstrip(os.path.sep)

        r2 = resource(os.path.join(path, '*.csv'))
        assert type(r2) == Directory(CSV)
        assert r2.path.rstrip(os.path.sep) == path.rstrip(os.path.sep)

Example 16

Project: golem Source File: test_ipfs_resourcemanager.py
    def testGetResourceRootDir(self):
        rm = IPFSResourceManager(self.dir_manager)
        rm.clear_resources()

        dm_dir = self.dir_manager.get_node_dir().rstrip(os.path.sep)
        rm_dir = rm.get_root_dir().rstrip(os.path.sep)

        self.assertEqual(dm_dir, rm_dir)
        self.assertEqual(dm_dir, rm.get_root_dir().rstrip(os.path.sep))

Example 17

Project: cesium Source File: apigen.py
Function: path2uri
    def _path2uri(self, dirpath):
        ''' Convert directory path to uri '''
        package_dir = self.package_name.replace('.', os.path.sep)
        relpath = dirpath.replace(self.root_path, package_dir)
        if relpath.startswith(os.path.sep):
            relpath = relpath[1:]
        return relpath.replace(os.path.sep, '.')

Example 18

Project: mishkal Source File: urlparser.py
Function: init
    def __init__(self, directory, root_directory=None,
                 cache_max_age=None):
        if os.path.sep != '/':
            directory = directory.replace(os.path.sep, '/')
        self.directory = os.path.normcase(os.path.abspath(directory))
        self.root_directory = root_directory
        if root_directory is not None:
            self.root_directory = os.path.normpath(self.root_directory)
        else:
            self.root_directory = directory
        self.root_directory = os.path.normcase(os.path.normpath(
            os.path.abspath(self.root_directory)))
        self.cache_max_age = cache_max_age
        if os.path.sep != '/':
            directory = directory.replace('/', os.path.sep)
            self.root_directory = self.root_directory.replace('/', os.path.sep)

Example 19

Project: PokemonGo-Bot-Desktop Source File: shutil.py
Function: destinsrc
def _destinsrc(src, dst):
    src = abspath(src)
    dst = abspath(dst)
    if not src.endswith(os.path.sep):
        src += os.path.sep
    if not dst.endswith(os.path.sep):
        dst += os.path.sep
    return dst.startswith(src)

Example 20

Project: bfg9000 Source File: test_path.py
    def test_realize_absolute(self):
        p = Path('/foo/bar', Root.builddir)
        self.assertEqual(p.realize(path_variables),
                         os.path.join(os.path.sep, 'foo', 'bar'))
        self.assertEqual(p.realize(path_variables, executable=True),
                         os.path.join(os.path.sep, 'foo', 'bar'))

        if platform_name() == 'windows':
            p = Path(r'C:\foo\bar', Root.builddir)
            self.assertEqual(p.realize(path_variables),
                             os.path.join('C:', os.path.sep, 'foo', 'bar'))
            self.assertEqual(p.realize(path_variables, executable=True),
                             os.path.join('C:', os.path.sep, 'foo', 'bar'))

Example 21

Project: Dalton Source File: __init__.py
Function: init
    def __init__(self, playback_dir, caller=None, use_global=False):
        """Create a player from the playback_dir"""
        mod_name = playback_dir.split(os.path.sep)[-1]
        container_dir = playback_dir.split(os.path.sep)[:-1]
        sys.path.insert(0, os.path.sep.join(container_dir))
        self._caller = caller
        self._global = use_global
        self._module = __import__(mod_name)
        self._current_step = getattr(self._module, 'StepNumber0', None)
        self._current_request = None

Example 22

Project: MailingListStats Source File: archives.py
Function: init
    def __init__(self, url_or_dirpath, compressed_dir=COMPRESSED_DIR):
        rpath = url_or_dirpath.rstrip(os.path.sep)

        url = urlparse.urlparse(rpath)
        lpath = url.path.rstrip(os.path.sep)

        self._local = url.scheme == 'file' or len(url.scheme) == 0
        self._location = os.path.realpath(lpath) if self._local else rpath
        self._alias = os.path.basename(self._location) or url.netloc

        # Define local directories to store mboxes archives
        target = os.path.join(url.netloc, lpath.lstrip(os.path.sep))
        target = target.rstrip(os.path.sep)

        self._compressed_dir = os.path.join(compressed_dir, target)

Example 23

Project: brozzler Source File: behaviors.py
    @staticmethod
    def behaviors():
        if Behavior._behaviors is None:
            behaviors_yaml = os.path.sep.join(__file__.split(os.path.sep)[:-1] + ['behaviors.yaml'])
            with open(behaviors_yaml) as fin:
                conf = yaml.load(fin)
            Behavior._behaviors = conf['behaviors']

            for behavior in Behavior._behaviors:
                if "behavior_js" in behavior:
                    behavior_js = os.path.sep.join(__file__.split(os.path.sep)[:-1] + ["behaviors.d"] + [behavior["behavior_js"]])
                    with open(behavior_js, encoding="utf-8") as fin:
                        behavior["script"] = fin.read()
                elif "behavior_js_template" in behavior:
                    behavior_js_template = os.path.sep.join(__file__.split(os.path.sep)[:-1] + ["behaviors.d"] + [behavior["behavior_js_template"]])
                    with open(behavior_js_template, encoding="utf-8") as fin:
                        behavior["template"] = string.Template(fin.read())

        return Behavior._behaviors

Example 24

Project: scancode-toolkit Source File: testcase.py
def to_os_native_path(path):
    """
    Normalize a path to use the native OS path separator.
    """
    path = path.replace(posixpath.sep, os.path.sep)
    path = path.replace(ntpath.sep, os.path.sep)
    path = path.rstrip(os.path.sep)
    return path

Example 25

Project: neurosynth Source File: test_analysis.py
    def test_meta_analysis(self):
        """ Test full meta-analysis stream. """
        # run a meta-analysis
        ids = ['study1', 'study3']
        ma = meta.MetaAnalysis(self.dataset, ids)
        # save the results
        tempdir = tempfile.mkdtemp()
        ma.save_results(tempdir + os.path.sep, prefix='test')
        files = glob(tempdir + os.path.sep + "test_*.nii.gz")
        self.assertEquals(len(files), 9)
        shutil.rmtree(tempdir)
        # test the analyze_features() wrapper
        tempdir = tempfile.mkdtemp()
        meta.analyze_features(
            self.dataset, output_dir=tempdir, prefix="meep")
        files = glob(tempdir + os.path.sep + "meep*.nii.gz")
        self.assertEquals(len(files), 9*5)
        shutil.rmtree(tempdir)

Example 26

Project: pycket Source File: input_output.py
Function: dirname
def _dirname(path):
    path = _strip_path_seps(path)
    components = path.split(os.path.sep)[:-1]
    if components == ['']:
        return os.path.sep
    return os.path.sep.join(components)

Example 27

Project: kcc Source File: shared.py
def walkLevel(some_dir, level=1):
    some_dir = some_dir.rstrip(os.path.sep)
    assert os.path.isdir(some_dir)
    num_sep = some_dir.count(os.path.sep)
    for root, dirs, files in walk(some_dir):
        dirs, files = walkSort(dirs, files)
        yield root, dirs, files
        num_sep_this = root.count(os.path.sep)
        if num_sep + level <= num_sep_this:
            del dirs[:]

Example 28

Project: DIRAC Source File: dirac-production-runjoblocal.py
Function: run_job_locally
def __runJobLocally(jobID, basepath, vo):
  """
  Runs the job!
  
  """
  ipr = __import__(str(vo) + 'DIRAC.Interfaces.API.' + str(vo) + 'Job', globals(), locals(), [str(vo) + 'Job'], -1)
  voJob = getattr(ipr, str(vo) + 'Job')
  localJob = voJob(basepath + "InputSandbox" + str(jobID) + os.path.sep + "jobDescription.xml")
  localJob.setInputSandbox(os.getcwd()+ os.path.sep+"pilot.cfg")
  localJob.setConfigArgs(os.getcwd()+ os.path.sep+"pilot.cfg")
  os.chdir(basepath)
  localJob.runLocal()

Example 29

Project: pycp Source File: util.py
def pprint_transfer(src, dest):
    """
    Directly borrowed from git's diff.c file.

    >>> pprint_transfer("/path/to/foo", "/path/to/bar")
    '/path/to/{foo => bar}'
    """
    len_src = len(src)
    len_dest = len(dest)

    # Find common prefix
    pfx_length = 0
    i = 0
    j = 0
    while (i < len_src and j < len_dest and src[i] == dest[j]):
        if src[i] == os.path.sep:
            pfx_length = i + 1
        i += 1
        j += 1

    # Find common suffix
    sfx_length = 0
    i  = len_src - 1
    j = len_dest - 1
    while (i > 0 and j > 0 and src[i] == dest[j]):
        if src[i] == os.path.sep:
            sfx_length = len_src - i
        i -= 1
        j -= 1

    src_midlen  = len_src  - pfx_length - sfx_length
    dest_midlen = len_dest - pfx_length - sfx_length

    pfx   = src[:pfx_length]
    sfx   = dest[len_dest - sfx_length:]
    src_mid  = src [pfx_length:pfx_length + src_midlen ]
    dest_mid = dest[pfx_length:pfx_length + dest_midlen]

    if pfx == os.path.sep:
        # The common prefix is / ,
        # avoid print /{etc => tmp}/foo, and
        # print {/etc => /tmp}/foo
        pfx = ""
        src_mid  = os.path.sep + src_mid
        dest_mid = os.path.sep + dest_mid

    if not pfx and not sfx:
        return "%s => %s" % (src, dest)

    res = "%s{%s => %s}%s" % (pfx, src_mid, dest_mid, sfx)
    return res

Example 30

Project: wal-e Source File: tar_partition.py
Function: partition
def partition(pg_cluster_dir):
    def raise_walk_error(e):
        raise e
    if not pg_cluster_dir.endswith(os.path.sep):
        pg_cluster_dir += os.path.sep

    # Accuemulates a list of archived files while walking the file
    # system.
    matches = []
    # Maintain a manifest of archived files. Tra
    spec = {'base_prefix': pg_cluster_dir,
            'tablespaces': []}

    walker = os.walk(pg_cluster_dir, onerror=raise_walk_error)
    for root, dirnames, filenames in walker:
        is_cluster_toplevel = (os.path.abspath(root) ==
                               os.path.abspath(pg_cluster_dir))

        # Append "root" so the directory is created during restore
        # even if PostgreSQL empties the directory before tar and
        # upload completes.
        matches.append(root)

        # Do not capture any WAL files, although we do want to
        # capture the WAL directory or symlink
        if is_cluster_toplevel and 'pg_xlog' in dirnames:
            dirnames.remove('pg_xlog')
            matches.append(os.path.join(root, 'pg_xlog'))

        # Do not capture any TEMP Space files, although we do want to
        # capture the directory name or symlink
        if 'pgsql_tmp' in dirnames:
                dirnames.remove('pgsql_tmp')
                matches.append(os.path.join(root, 'pgsql_tmp'))
        if 'pg_stat_tmp' in dirnames:
                dirnames.remove('pg_stat_tmp')
                matches.append(os.path.join(root, 'pg_stat_tmp'))

        # Do not capture ".wal-e" directories which also contain
        # temporary working space.
        if '.wal-e' in dirnames:
            dirnames.remove('.wal-e')
            matches.append(os.path.join(root, '.wal-e'))

        # Do not capture lost+found directories, generated by fsck of
        # some file systems, and often only accessible by root,
        # unhelpfully causing a permission error.
        #
        # And, do not bother creating it on restore either, i.e. it is
        # not part of "matches".
        if 'lost+found' in dirnames:
            dirnames.remove('lost+found')

        for filename in filenames:
            if is_cluster_toplevel and filename in ('postmaster.pid',
                                                    'postmaster.opts'):
                # Do not include the postmaster pid file or the
                # configuration file in the backup.
                pass
            elif is_cluster_toplevel and filename in PG_CONF:
                # Do not include config files in the backup
                pass
            else:
                matches.append(os.path.join(root, filename))

        # Special case for tablespaces
        if root == os.path.join(pg_cluster_dir, 'pg_tblspc'):
            for tablespace in dirnames:
                ts_path = os.path.join(root, tablespace)
                ts_name = os.path.basename(ts_path)

                if os.path.islink(ts_path) and os.path.isdir(ts_path):
                    ts_loc = os.readlink(ts_path)
                    ts_walker = os.walk(ts_path)
                    if not ts_loc.endswith(os.path.sep):
                        ts_loc += os.path.sep

                    if ts_name not in spec['tablespaces']:
                        spec['tablespaces'].append(ts_name)
                        link_start = len(spec['base_prefix'])
                        spec[ts_name] = {
                            'loc': ts_loc,
                            # Link path is relative to base_prefix
                            'link': ts_path[link_start:]
                        }

                    for ts_root, ts_dirnames, ts_filenames in ts_walker:
                        if 'pgsql_tmp' in ts_dirnames:
                            ts_dirnames.remove('pgsql_tmp')
                            matches.append(os.path.join(ts_root, 'pgsql_tmp'))

                        for ts_filename in ts_filenames:
                            matches.append(os.path.join(ts_root, ts_filename))

                        # pick up the empty directories, make sure ts_root
                        # isn't duplicated
                        if not ts_filenames and ts_root not in matches:
                            matches.append(ts_root)

                    # The symlink for this tablespace is now in the match list,
                    # remove it.
                    if ts_path in matches:
                        matches.remove(ts_path)

    # Absolute upload paths are used for telling lzop what to compress. We
    # must evaluate tablespace storage dirs separately from core file to handle
    # the case where a common prefix does not exist between the two.
    local_abspaths = [os.path.abspath(match) for match in matches]
    # Common local prefix is the prefix removed from the path all tar members.
    # Core files first
    local_prefix = os.path.commonprefix(local_abspaths)
    if not local_prefix.endswith(os.path.sep):
        local_prefix += os.path.sep

    parts = _segmentation_guts(
        local_prefix, matches, PARTITION_MAX_SZ)

    return spec, parts

Example 31

Project: pycp Source File: util.py
def shorten_path(path, length):
    """Shorten a path so that it is never longer
    that the given length

    >>> shorten_path("bazinga", 6)
    'baz...'
    >>> shorten_path("foo/bar/baz", 12)
    'foo/bar/baz'
    >>> shorten_path("foo/bar/baz", 10)
    'f/b/baz'
    >>> shorten_path("/foo/bar/baz", 11)
    '/f/b/baz'
    >>> shorten_path("foo/bar/bazinga", 10)
    'f/b/baz...'
    >>> shorten_path("foo/bar/baz/spam/eggs", 6)
    'eggs'
    >>> shorten_path("foo/bar/baz/spam/elephant", 4)
    'e...'
    """
    if len(path) < length:
        return path
    if os.path.sep not in path:
        return shorten_string(path, length)

    short_base = ""
    if path.startswith(os.path.sep):
        short_base = os.path.sep
        path = path[1:]
    parts = path.split(os.path.sep)
    short_base += os.path.sep.join([p[0] for p in parts[:-1]])
    if len(short_base) > length:
        short_base = ""

    # Shorten the last part:
    short_name = parts[-1]
    last_length = length - len(short_base)
    if short_base:
        last_length = last_length - 1
    short_name = shorten_string(short_name, last_length)
    return os.path.join(short_base, short_name)

Example 32

Project: HealthStarter Source File: wheel.py
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
                     pycompile=True, scheme=None, isolated=False):
    """Install a wheel"""

    if not scheme:
        scheme = distutils_scheme(
            name, user=user, home=home, root=root, isolated=isolated
        )

    if root_is_purelib(name, wheeldir):
        lib_dir = scheme['purelib']
    else:
        lib_dir = scheme['platlib']

    info_dir = []
    data_dirs = []
    source = wheeldir.rstrip(os.path.sep) + os.path.sep

    # Record details of the files moved
    #   installed = files copied from the wheel to the destination
    #   changed = files changed while installing (scripts #! line typically)
    #   generated = files newly generated during the install (script wrappers)
    installed = {}
    changed = set()
    generated = []

    # Compile all of the pyc files that we're going to be installing
    if pycompile:
        with captured_stdout() as stdout:
            with warnings.catch_warnings():
                warnings.filterwarnings('ignore')
                compileall.compile_dir(source, force=True, quiet=True)
        logger.debug(stdout.getvalue())

    def normpath(src, p):
        return make_path_relative(src, p).replace(os.path.sep, '/')

    def record_installed(srcfile, destfile, modified=False):
        """Map archive RECORD paths to installation RECORD paths."""
        oldpath = normpath(srcfile, wheeldir)
        newpath = normpath(destfile, lib_dir)
        installed[oldpath] = newpath
        if modified:
            changed.add(destfile)

    def clobber(source, dest, is_base, fixer=None, filter=None):
        ensure_dir(dest)  # common for the 'include' path

        for dir, subdirs, files in os.walk(source):
            basedir = dir[len(source):].lstrip(os.path.sep)
            destdir = os.path.join(dest, basedir)
            if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
                continue
            for s in subdirs:
                destsubdir = os.path.join(dest, basedir, s)
                if is_base and basedir == '' and destsubdir.endswith('.data'):
                    data_dirs.append(s)
                    continue
                elif (is_base and
                        s.endswith('.dist-info') and
                        # is self.req.project_name case preserving?
                        s.lower().startswith(
                            req.project_name.replace('-', '_').lower())):
                    assert not info_dir, 'Multiple .dist-info directories'
                    info_dir.append(destsubdir)
            for f in files:
                # Skip unwanted files
                if filter and filter(f):
                    continue
                srcfile = os.path.join(dir, f)
                destfile = os.path.join(dest, basedir, f)
                # directory creation is lazy and after the file filtering above
                # to ensure we don't install empty dirs; empty dirs can't be
                # uninstalled.
                ensure_dir(destdir)

                # We use copyfile (not move, copy, or copy2) to be extra sure
                # that we are not moving directories over (copyfile fails for
                # directories) as well as to ensure that we are not copying
                # over any metadata because we want more control over what
                # metadata we actually copy over.
                shutil.copyfile(srcfile, destfile)

                # Copy over the metadata for the file, currently this only
                # includes the atime and mtime.
                st = os.stat(srcfile)
                if hasattr(os, "utime"):
                    os.utime(destfile, (st.st_atime, st.st_mtime))

                # If our file is executable, then make our destination file
                # executable.
                if os.access(srcfile, os.X_OK):
                    st = os.stat(srcfile)
                    permissions = (
                        st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
                    )
                    os.chmod(destfile, permissions)

                changed = False
                if fixer:
                    changed = fixer(destfile)
                record_installed(srcfile, destfile, changed)

    clobber(source, lib_dir, True)

    assert info_dir, "%s .dist-info directory not found" % req

    # Get the defined entry points
    ep_file = os.path.join(info_dir[0], 'entry_points.txt')
    console, gui = get_entrypoints(ep_file)

    def is_entrypoint_wrapper(name):
        # EP, EP.exe and EP-script.py are scripts generated for
        # entry point EP by setuptools
        if name.lower().endswith('.exe'):
            matchname = name[:-4]
        elif name.lower().endswith('-script.py'):
            matchname = name[:-10]
        elif name.lower().endswith(".pya"):
            matchname = name[:-4]
        else:
            matchname = name
        # Ignore setuptools-generated scripts
        return (matchname in console or matchname in gui)

    for datadir in data_dirs:
        fixer = None
        filter = None
        for subdir in os.listdir(os.path.join(wheeldir, datadir)):
            fixer = None
            if subdir == 'scripts':
                fixer = fix_script
                filter = is_entrypoint_wrapper
            source = os.path.join(wheeldir, datadir, subdir)
            dest = scheme[subdir]
            clobber(source, dest, False, fixer=fixer, filter=filter)

    maker = ScriptMaker(None, scheme['scripts'])

    # Ensure old scripts are overwritten.
    # See https://github.com/pypa/pip/issues/1800
    maker.clobber = True

    # Ensure we don't generate any variants for scripts because this is almost
    # never what somebody wants.
    # See https://bitbucket.org/pypa/distlib/issue/35/
    maker.variants = set(('', ))

    # This is required because otherwise distlib creates scripts that are not
    # executable.
    # See https://bitbucket.org/pypa/distlib/issue/32/
    maker.set_mode = True

    # Simplify the script and fix the fact that the default script swallows
    # every single stack trace.
    # See https://bitbucket.org/pypa/distlib/issue/34/
    # See https://bitbucket.org/pypa/distlib/issue/33/
    def _get_script_text(entry):
        return maker.script_template % {
            "module": entry.prefix,
            "import_name": entry.suffix.split(".")[0],
            "func": entry.suffix,
        }

    maker._get_script_text = _get_script_text
    maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
"""

    # Special case pip and setuptools to generate versioned wrappers
    #
    # The issue is that some projects (specifically, pip and setuptools) use
    # code in setup.py to create "versioned" entry points - pip2.7 on Python
    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
    # the wheel metadata at build time, and so if the wheel is installed with
    # a *different* version of Python the entry points will be wrong. The
    # correct fix for this is to enhance the metadata to be able to describe
    # such versioned entry points, but that won't happen till Metadata 2.0 is
    # available.
    # In the meantime, projects using versioned entry points will either have
    # incorrect versioned entry points, or they will not be able to distribute
    # "universal" wheels (i.e., they will need a wheel per Python version).
    #
    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
    # override the versioned entry points in the wheel and generate the
    # correct ones. This code is purely a short-term measure until Metadat 2.0
    # is available.
    #
    # To add the level of hack in this section of code, in order to support
    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
    # variable which will control which version scripts get installed.
    #
    # ENSUREPIP_OPTIONS=altinstall
    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
    # ENSUREPIP_OPTIONS=install
    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
    #     not altinstall
    # DEFAULT
    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
    #     and easy_install-X.Y.
    pip_script = console.pop('pip', None)
    if pip_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'pip = ' + pip_script
            generated.extend(maker.make(spec))

        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
            spec = 'pip%s = %s' % (sys.version[:1], pip_script)
            generated.extend(maker.make(spec))

        spec = 'pip%s = %s' % (sys.version[:3], pip_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned pip entry points
        pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
        for k in pip_ep:
            del console[k]
    easy_install_script = console.pop('easy_install', None)
    if easy_install_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'easy_install = ' + easy_install_script
            generated.extend(maker.make(spec))

        spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned easy_install entry points
        easy_install_ep = [
            k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
        ]
        for k in easy_install_ep:
            del console[k]

    # Generate the console and GUI entry points specified in the wheel
    if len(console) > 0:
        generated.extend(
            maker.make_multiple(['%s = %s' % kv for kv in console.items()])
        )
    if len(gui) > 0:
        generated.extend(
            maker.make_multiple(
                ['%s = %s' % kv for kv in gui.items()],
                {'gui': True}
            )
        )

    record = os.path.join(info_dir[0], 'RECORD')
    temp_record = os.path.join(info_dir[0], 'RECORD.pip')
    with open_for_csv(record, 'r') as record_in:
        with open_for_csv(temp_record, 'w+') as record_out:
            reader = csv.reader(record_in)
            writer = csv.writer(record_out)
            for row in reader:
                row[0] = installed.pop(row[0], row[0])
                if row[0] in changed:
                    row[1], row[2] = rehash(row[0])
                writer.writerow(row)
            for f in generated:
                h, l = rehash(f)
                writer.writerow((f, h, l))
            for f in installed:
                writer.writerow((installed[f], '', ''))
    shutil.move(temp_record, record)

Example 33

Project: pip-update-requirements Source File: wheel.py
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
                     pycompile=True, scheme=None, isolated=False, prefix=None):
    """Install a wheel"""

    if not scheme:
        scheme = distutils_scheme(
            name, user=user, home=home, root=root, isolated=isolated,
            prefix=prefix,
        )

    if root_is_purelib(name, wheeldir):
        lib_dir = scheme['purelib']
    else:
        lib_dir = scheme['platlib']

    info_dir = []
    data_dirs = []
    source = wheeldir.rstrip(os.path.sep) + os.path.sep

    # Record details of the files moved
    #   installed = files copied from the wheel to the destination
    #   changed = files changed while installing (scripts #! line typically)
    #   generated = files newly generated during the install (script wrappers)
    installed = {}
    changed = set()
    generated = []

    # Compile all of the pyc files that we're going to be installing
    if pycompile:
        with captured_stdout() as stdout:
            with warnings.catch_warnings():
                warnings.filterwarnings('ignore')
                compileall.compile_dir(source, force=True, quiet=True)
        logger.debug(stdout.getvalue())

    def normpath(src, p):
        return os.path.relpath(src, p).replace(os.path.sep, '/')

    def record_installed(srcfile, destfile, modified=False):
        """Map archive RECORD paths to installation RECORD paths."""
        oldpath = normpath(srcfile, wheeldir)
        newpath = normpath(destfile, lib_dir)
        installed[oldpath] = newpath
        if modified:
            changed.add(destfile)

    def clobber(source, dest, is_base, fixer=None, filter=None):
        ensure_dir(dest)  # common for the 'include' path

        for dir, subdirs, files in os.walk(source):
            basedir = dir[len(source):].lstrip(os.path.sep)
            destdir = os.path.join(dest, basedir)
            if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
                continue
            for s in subdirs:
                destsubdir = os.path.join(dest, basedir, s)
                if is_base and basedir == '' and destsubdir.endswith('.data'):
                    data_dirs.append(s)
                    continue
                elif (is_base and
                        s.endswith('.dist-info') and
                        # is self.req.project_name case preserving?
                        s.lower().startswith(
                            req.project_name.replace('-', '_').lower())):
                    assert not info_dir, 'Multiple .dist-info directories'
                    info_dir.append(destsubdir)
            for f in files:
                # Skip unwanted files
                if filter and filter(f):
                    continue
                srcfile = os.path.join(dir, f)
                destfile = os.path.join(dest, basedir, f)
                # directory creation is lazy and after the file filtering above
                # to ensure we don't install empty dirs; empty dirs can't be
                # uninstalled.
                ensure_dir(destdir)

                # We use copyfile (not move, copy, or copy2) to be extra sure
                # that we are not moving directories over (copyfile fails for
                # directories) as well as to ensure that we are not copying
                # over any metadata because we want more control over what
                # metadata we actually copy over.
                shutil.copyfile(srcfile, destfile)

                # Copy over the metadata for the file, currently this only
                # includes the atime and mtime.
                st = os.stat(srcfile)
                if hasattr(os, "utime"):
                    os.utime(destfile, (st.st_atime, st.st_mtime))

                # If our file is executable, then make our destination file
                # executable.
                if os.access(srcfile, os.X_OK):
                    st = os.stat(srcfile)
                    permissions = (
                        st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
                    )
                    os.chmod(destfile, permissions)

                changed = False
                if fixer:
                    changed = fixer(destfile)
                record_installed(srcfile, destfile, changed)

    clobber(source, lib_dir, True)

    assert info_dir, "%s .dist-info directory not found" % req

    # Get the defined entry points
    ep_file = os.path.join(info_dir[0], 'entry_points.txt')
    console, gui = get_entrypoints(ep_file)

    def is_entrypoint_wrapper(name):
        # EP, EP.exe and EP-script.py are scripts generated for
        # entry point EP by setuptools
        if name.lower().endswith('.exe'):
            matchname = name[:-4]
        elif name.lower().endswith('-script.py'):
            matchname = name[:-10]
        elif name.lower().endswith(".pya"):
            matchname = name[:-4]
        else:
            matchname = name
        # Ignore setuptools-generated scripts
        return (matchname in console or matchname in gui)

    for datadir in data_dirs:
        fixer = None
        filter = None
        for subdir in os.listdir(os.path.join(wheeldir, datadir)):
            fixer = None
            if subdir == 'scripts':
                fixer = fix_script
                filter = is_entrypoint_wrapper
            source = os.path.join(wheeldir, datadir, subdir)
            dest = scheme[subdir]
            clobber(source, dest, False, fixer=fixer, filter=filter)

    maker = ScriptMaker(None, scheme['scripts'])

    # Ensure old scripts are overwritten.
    # See https://github.com/pypa/pip/issues/1800
    maker.clobber = True

    # Ensure we don't generate any variants for scripts because this is almost
    # never what somebody wants.
    # See https://bitbucket.org/pypa/distlib/issue/35/
    maker.variants = set(('', ))

    # This is required because otherwise distlib creates scripts that are not
    # executable.
    # See https://bitbucket.org/pypa/distlib/issue/32/
    maker.set_mode = True

    # Simplify the script and fix the fact that the default script swallows
    # every single stack trace.
    # See https://bitbucket.org/pypa/distlib/issue/34/
    # See https://bitbucket.org/pypa/distlib/issue/33/
    def _get_script_text(entry):
        if entry.suffix is None:
            raise InstallationError(
                "Invalid script entry point: %s for req: %s - A callable "
                "suffix is required. Cf https://packaging.python.org/en/"
                "latest/distributing.html#console-scripts for more "
                "information." % (entry, req)
            )
        return maker.script_template % {
            "module": entry.prefix,
            "import_name": entry.suffix.split(".")[0],
            "func": entry.suffix,
        }

    maker._get_script_text = _get_script_text
    maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
"""

    # Special case pip and setuptools to generate versioned wrappers
    #
    # The issue is that some projects (specifically, pip and setuptools) use
    # code in setup.py to create "versioned" entry points - pip2.7 on Python
    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
    # the wheel metadata at build time, and so if the wheel is installed with
    # a *different* version of Python the entry points will be wrong. The
    # correct fix for this is to enhance the metadata to be able to describe
    # such versioned entry points, but that won't happen till Metadata 2.0 is
    # available.
    # In the meantime, projects using versioned entry points will either have
    # incorrect versioned entry points, or they will not be able to distribute
    # "universal" wheels (i.e., they will need a wheel per Python version).
    #
    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
    # override the versioned entry points in the wheel and generate the
    # correct ones. This code is purely a short-term measure until Metadat 2.0
    # is available.
    #
    # To add the level of hack in this section of code, in order to support
    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
    # variable which will control which version scripts get installed.
    #
    # ENSUREPIP_OPTIONS=altinstall
    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
    # ENSUREPIP_OPTIONS=install
    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
    #     not altinstall
    # DEFAULT
    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
    #     and easy_install-X.Y.
    pip_script = console.pop('pip', None)
    if pip_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'pip = ' + pip_script
            generated.extend(maker.make(spec))

        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
            spec = 'pip%s = %s' % (sys.version[:1], pip_script)
            generated.extend(maker.make(spec))

        spec = 'pip%s = %s' % (sys.version[:3], pip_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned pip entry points
        pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
        for k in pip_ep:
            del console[k]
    easy_install_script = console.pop('easy_install', None)
    if easy_install_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'easy_install = ' + easy_install_script
            generated.extend(maker.make(spec))

        spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned easy_install entry points
        easy_install_ep = [
            k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
        ]
        for k in easy_install_ep:
            del console[k]

    # Generate the console and GUI entry points specified in the wheel
    if len(console) > 0:
        generated.extend(
            maker.make_multiple(['%s = %s' % kv for kv in console.items()])
        )
    if len(gui) > 0:
        generated.extend(
            maker.make_multiple(
                ['%s = %s' % kv for kv in gui.items()],
                {'gui': True}
            )
        )

    # Record pip as the installer
    installer = os.path.join(info_dir[0], 'INSTALLER')
    temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
    with open(temp_installer, 'wb') as installer_file:
        installer_file.write(b'pip\n')
    shutil.move(temp_installer, installer)
    generated.append(installer)

    # Record details of all files installed
    record = os.path.join(info_dir[0], 'RECORD')
    temp_record = os.path.join(info_dir[0], 'RECORD.pip')
    with open_for_csv(record, 'r') as record_in:
        with open_for_csv(temp_record, 'w+') as record_out:
            reader = csv.reader(record_in)
            writer = csv.writer(record_out)
            for row in reader:
                row[0] = installed.pop(row[0], row[0])
                if row[0] in changed:
                    row[1], row[2] = rehash(row[0])
                writer.writerow(row)
            for f in generated:
                h, l = rehash(f)
                writer.writerow((normpath(f, lib_dir), h, l))
            for f in installed:
                writer.writerow((installed[f], '', ''))
    shutil.move(temp_record, record)

Example 34

Project: pip Source File: wheel.py
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
                     pycompile=True, scheme=None, isolated=False, prefix=None):
    """Install a wheel"""

    if not scheme:
        scheme = distutils_scheme(
            name, user=user, home=home, root=root, isolated=isolated,
            prefix=prefix,
        )

    if root_is_purelib(name, wheeldir):
        lib_dir = scheme['purelib']
    else:
        lib_dir = scheme['platlib']

    info_dir = []
    data_dirs = []
    source = wheeldir.rstrip(os.path.sep) + os.path.sep

    # Record details of the files moved
    #   installed = files copied from the wheel to the destination
    #   changed = files changed while installing (scripts #! line typically)
    #   generated = files newly generated during the install (script wrappers)
    installed = {}
    changed = set()
    generated = []

    # Compile all of the pyc files that we're going to be installing
    if pycompile:
        with captured_stdout() as stdout:
            with warnings.catch_warnings():
                warnings.filterwarnings('ignore')
                compileall.compile_dir(source, force=True, quiet=True)
        logger.debug(stdout.getvalue())

    def normpath(src, p):
        return os.path.relpath(src, p).replace(os.path.sep, '/')

    def record_installed(srcfile, destfile, modified=False):
        """Map archive RECORD paths to installation RECORD paths."""
        oldpath = normpath(srcfile, wheeldir)
        newpath = normpath(destfile, lib_dir)
        installed[oldpath] = newpath
        if modified:
            changed.add(destfile)

    def clobber(source, dest, is_base, fixer=None, filter=None):
        ensure_dir(dest)  # common for the 'include' path

        for dir, subdirs, files in os.walk(source):
            basedir = dir[len(source):].lstrip(os.path.sep)
            destdir = os.path.join(dest, basedir)
            if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
                continue
            for s in subdirs:
                destsubdir = os.path.join(dest, basedir, s)
                if is_base and basedir == '' and destsubdir.endswith('.data'):
                    data_dirs.append(s)
                    continue
                elif (is_base and
                        s.endswith('.dist-info') and
                        canonicalize_name(s).startswith(
                            canonicalize_name(req.name))):
                    assert not info_dir, ('Multiple .dist-info directories: ' +
                                          destsubdir + ', ' +
                                          ', '.join(info_dir))
                    info_dir.append(destsubdir)
            for f in files:
                # Skip unwanted files
                if filter and filter(f):
                    continue
                srcfile = os.path.join(dir, f)
                destfile = os.path.join(dest, basedir, f)
                # directory creation is lazy and after the file filtering above
                # to ensure we don't install empty dirs; empty dirs can't be
                # uninstalled.
                ensure_dir(destdir)

                # We use copyfile (not move, copy, or copy2) to be extra sure
                # that we are not moving directories over (copyfile fails for
                # directories) as well as to ensure that we are not copying
                # over any metadata because we want more control over what
                # metadata we actually copy over.
                shutil.copyfile(srcfile, destfile)

                # Copy over the metadata for the file, currently this only
                # includes the atime and mtime.
                st = os.stat(srcfile)
                if hasattr(os, "utime"):
                    os.utime(destfile, (st.st_atime, st.st_mtime))

                # If our file is executable, then make our destination file
                # executable.
                if os.access(srcfile, os.X_OK):
                    st = os.stat(srcfile)
                    permissions = (
                        st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
                    )
                    os.chmod(destfile, permissions)

                changed = False
                if fixer:
                    changed = fixer(destfile)
                record_installed(srcfile, destfile, changed)

    clobber(source, lib_dir, True)

    assert info_dir, "%s .dist-info directory not found" % req

    # Get the defined entry points
    ep_file = os.path.join(info_dir[0], 'entry_points.txt')
    console, gui = get_entrypoints(ep_file)

    def is_entrypoint_wrapper(name):
        # EP, EP.exe and EP-script.py are scripts generated for
        # entry point EP by setuptools
        if name.lower().endswith('.exe'):
            matchname = name[:-4]
        elif name.lower().endswith('-script.py'):
            matchname = name[:-10]
        elif name.lower().endswith(".pya"):
            matchname = name[:-4]
        else:
            matchname = name
        # Ignore setuptools-generated scripts
        return (matchname in console or matchname in gui)

    for datadir in data_dirs:
        fixer = None
        filter = None
        for subdir in os.listdir(os.path.join(wheeldir, datadir)):
            fixer = None
            if subdir == 'scripts':
                fixer = fix_script
                filter = is_entrypoint_wrapper
            source = os.path.join(wheeldir, datadir, subdir)
            dest = scheme[subdir]
            clobber(source, dest, False, fixer=fixer, filter=filter)

    maker = ScriptMaker(None, scheme['scripts'])

    # Ensure old scripts are overwritten.
    # See https://github.com/pypa/pip/issues/1800
    maker.clobber = True

    # Ensure we don't generate any variants for scripts because this is almost
    # never what somebody wants.
    # See https://bitbucket.org/pypa/distlib/issue/35/
    maker.variants = set(('', ))

    # This is required because otherwise distlib creates scripts that are not
    # executable.
    # See https://bitbucket.org/pypa/distlib/issue/32/
    maker.set_mode = True

    # Simplify the script and fix the fact that the default script swallows
    # every single stack trace.
    # See https://bitbucket.org/pypa/distlib/issue/34/
    # See https://bitbucket.org/pypa/distlib/issue/33/
    def _get_script_text(entry):
        if entry.suffix is None:
            raise InstallationError(
                "Invalid script entry point: %s for req: %s - A callable "
                "suffix is required. Cf https://packaging.python.org/en/"
                "latest/distributing.html#console-scripts for more "
                "information." % (entry, req)
            )
        return maker.script_template % {
            "module": entry.prefix,
            "import_name": entry.suffix.split(".")[0],
            "func": entry.suffix,
        }

    maker._get_script_text = _get_script_text
    maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
"""

    # Special case pip and setuptools to generate versioned wrappers
    #
    # The issue is that some projects (specifically, pip and setuptools) use
    # code in setup.py to create "versioned" entry points - pip2.7 on Python
    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
    # the wheel metadata at build time, and so if the wheel is installed with
    # a *different* version of Python the entry points will be wrong. The
    # correct fix for this is to enhance the metadata to be able to describe
    # such versioned entry points, but that won't happen till Metadata 2.0 is
    # available.
    # In the meantime, projects using versioned entry points will either have
    # incorrect versioned entry points, or they will not be able to distribute
    # "universal" wheels (i.e., they will need a wheel per Python version).
    #
    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
    # override the versioned entry points in the wheel and generate the
    # correct ones. This code is purely a short-term measure until Metadata 2.0
    # is available.
    #
    # To add the level of hack in this section of code, in order to support
    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
    # variable which will control which version scripts get installed.
    #
    # ENSUREPIP_OPTIONS=altinstall
    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
    # ENSUREPIP_OPTIONS=install
    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
    #     not altinstall
    # DEFAULT
    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
    #     and easy_install-X.Y.
    pip_script = console.pop('pip', None)
    if pip_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'pip = ' + pip_script
            generated.extend(maker.make(spec))

        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
            spec = 'pip%s = %s' % (sys.version[:1], pip_script)
            generated.extend(maker.make(spec))

        spec = 'pip%s = %s' % (sys.version[:3], pip_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned pip entry points
        pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
        for k in pip_ep:
            del console[k]
    easy_install_script = console.pop('easy_install', None)
    if easy_install_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'easy_install = ' + easy_install_script
            generated.extend(maker.make(spec))

        spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned easy_install entry points
        easy_install_ep = [
            k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
        ]
        for k in easy_install_ep:
            del console[k]

    # Generate the console and GUI entry points specified in the wheel
    if len(console) > 0:
        generated.extend(
            maker.make_multiple(['%s = %s' % kv for kv in console.items()])
        )
    if len(gui) > 0:
        generated.extend(
            maker.make_multiple(
                ['%s = %s' % kv for kv in gui.items()],
                {'gui': True}
            )
        )

    # Record pip as the installer
    installer = os.path.join(info_dir[0], 'INSTALLER')
    temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
    with open(temp_installer, 'wb') as installer_file:
        installer_file.write(b'pip\n')
    shutil.move(temp_installer, installer)
    generated.append(installer)

    # Record details of all files installed
    record = os.path.join(info_dir[0], 'RECORD')
    temp_record = os.path.join(info_dir[0], 'RECORD.pip')
    with open_for_csv(record, 'r') as record_in:
        with open_for_csv(temp_record, 'w+') as record_out:
            reader = csv.reader(record_in)
            writer = csv.writer(record_out)
            for row in reader:
                row[0] = installed.pop(row[0], row[0])
                if row[0] in changed:
                    row[1], row[2] = rehash(row[0])
                writer.writerow(row)
            for f in generated:
                h, l = rehash(f)
                writer.writerow((normpath(f, lib_dir), h, l))
            for f in installed:
                writer.writerow((installed[f], '', ''))
    shutil.move(temp_record, record)

Example 35

Project: drydrop Source File: server.py
def CreateURLMatcherFromMaps(root_path,
                             url_map_list,
                             module_dict,
                             default_expiration,
                             vfs,
                             create_url_matcher=URLMatcher,
                             create_cgi_dispatcher=CGIDispatcher,
                             create_file_dispatcher=FileDispatcher,
                             create_path_adjuster=PathAdjuster,
                             normpath=os.path.normpath):
  """Creates a URLMatcher instance from URLMap.

  Creates all of the correct URLDispatcher instances to handle the various
  content types in the application configuration.

  Args:
    root_path: Path to the root of the application running on the server.
    url_map_list: List of appinfo.URLMap objects to initialize this
      matcher with. Can be an empty list if you would like to add patterns
      manually.
    module_dict: Dictionary in which application-loaded modules should be
      preserved between requests. This dictionary must be separate from the
      sys.modules dictionary.
    default_expiration: String describing default expiration time for browser
      based caching of static files.  If set to None this disallows any
      browser caching of static content.
    create_url_matcher, create_cgi_dispatcher, create_file_dispatcher,
    create_path_adjuster: Used for dependency injection.

  Returns:
    Instance of URLMatcher with the supplied URLMap objects properly loaded.
  """
  url_matcher = create_url_matcher()
  path_adjuster = create_path_adjuster(root_path)
  cgi_dispatcher = create_cgi_dispatcher(module_dict, root_path, path_adjuster)
  file_dispatcher = create_file_dispatcher(path_adjuster,
      StaticFileConfigMatcher(url_map_list, path_adjuster, default_expiration), vfs)

  for url_map in url_map_list:
    admin_only = url_map.login == appinfo.LOGIN_ADMIN
    requires_login = url_map.login == appinfo.LOGIN_REQUIRED or admin_only

    handler_type = url_map.GetHandlerType()
    if handler_type == appinfo.HANDLER_SCRIPT:
      dispatcher = cgi_dispatcher
    elif handler_type in (appinfo.STATIC_FILES, appinfo.STATIC_DIR):
      dispatcher = file_dispatcher
    else:
      raise InvalidAppConfigError('Unknown handler type "%s"' % handler_type)

    regex = url_map.url
    path = url_map.GetHandler()
    if handler_type == appinfo.STATIC_DIR:
      if regex[-1] == r'/':
        regex = regex[:-1]
      if path[-1] == os.path.sep:
        path = path[:-1]
      regex = '/'.join((re.escape(regex), '(.*)'))
      if os.path.sep == '\\':
        backref = r'\\1'
      else:
        backref = r'\1'
      path = (normpath(path).replace('\\', '\\\\') +
              os.path.sep + backref)

    url_matcher.AddURL(regex,
                       dispatcher,
                       path,
                       requires_login, admin_only)

  return url_matcher

Example 36

Project: blockcanvas Source File: search_package.py
def python_path_from_file_path(package, file_path, package_path=None):
    """ Given a package/module name and the absolute path to a python file,
        return a path in the form that python can understand (ie one that
        could be specified in an import statement)  package_path can be
        optionally specified to give the file path to the package.  This
        is automatically determined if not specified, but as determining this
        is an expensive operation, it is best to calculate this outside
        the function if one will be looking up many functions in the same
        module.
    """


    file_path = normalize_path_separator(file_path)
    if package_path == None:
        try:
            package_path = get_module_path(package)
        except:
            # FIXME: We're failing to get the loader, probably because
            # of a bad import or syntax error in __init__.py.
            # Use the old algorithm for now but this should be marked
            # to the user in the future.
            try:
                start = file_path.rindex(package+os.path.sep)
            except ValueError:
                start = file_path.rindex(package)

            stop = file_path.rindex('.')
            python_path = file_path[start:stop]
            python_path = python_path.replace(os.path.sep, '.')
            return python_path

    package_path = normalize_path_separator(package_path)

    try:
        if file_path.find(package_path) != 0:
            return ''
        file_suffix = file_path[len(package_path):]
        package_suffix = file_suffix.replace(os.path.sep, '.')
        if package_suffix[-3:] == '.py':
            package_suffix = package_suffix[:-3]
        return package + package_suffix
    except:
        return ''

Example 37

Project: gtg Source File: setup.py
Function: find_package_data
def find_package_data():
    """ Generate list of data files within a package """
    packages = {
        package.replace('.', os.path.sep) for package in find_packages()}
    package_data = {}

    for folder, __, files in os.walk('GTG'):
        # Find package
        closest_package = folder
        while closest_package and closest_package not in packages:
            # Try one level up
            closest_package = os.path.dirname(closest_package)

        if not closest_package:
            continue

        allowed_extensions = [
            '', '.gtg-plugin', '.png', '.svg', '.ui', '.html', '.tex', '.txt']
        is_this_package = folder == closest_package
        if not is_this_package:
            allowed_extensions.append('.py')

        for filename in files:
            ext = os.path.splitext(filename)[-1]
            if ext not in allowed_extensions:
                continue

            # Find path relative to package
            filename = os.path.join(folder, filename)
            assert filename.startswith(closest_package)
            filename = filename[len(closest_package + os.path.sep):]

            # Assign data file to package name
            package_name = closest_package.replace(os.path.sep, '.')
            if package_name in package_data:
                package_data[package_name].append(filename)
            else:
                package_data[package_name] = [filename]

    return package_data

Example 38

Project: regulations-parser Source File: api_writer_tests.py
    def test_write(self):
        """Integration test."""
        p3a = Node('(a) Par a', label=['1111', '3', 'a'])
        p3b = Node('(b) Par b', label=['1111', '3', 'b'])
        p3 = Node('Things like: ', label=['1111', '3'], title='Section 3',
                  children=[p3a, p3b])
        sub = Node('', label=['1111', 'Subpart', 'E'], title='Subpart E',
                   node_type=Node.SUBPART, children=[p3])
        a3a = Node('Appendix A-3(a)', label=['1111', 'A', '3(a)'],
                   title='A-3(a) - Some Title', node_type=Node.APPENDIX)
        app = Node('', label=['1111', 'A'], title='Appendix A',
                   node_type=Node.APPENDIX, children=[a3a])
        i3a1 = Node('1. P1', label=['1111', '3', 'a', 'Interp', '1'],
                    node_type=Node.INTERP)
        i3a = Node('', label=['1111', '3', 'a', 'Interp'],
                   node_type=Node.INTERP, children=[i3a1],
                   title='Paragraph 3(a)')
        i31 = Node('1. Section 3', label=['1111', '3', 'Interp', '1'],
                   node_type=Node.INTERP)
        i3 = Node('', label=['1111', '3', 'Interp'], node_type=Node.INTERP,
                  title='Section 1111.3', children=[i3a, i31])
        i = Node('', label=['1111', 'Interp'], node_type=Node.INTERP,
                 title='Supplement I', children=[i3])
        tree = Node('Root text', label=['1111'], title='Regulation Joe',
                    children=[sub, app, i])

        writer = GitWriteContent("/regulation/1111/v1v1")
        writer.write(tree)

        dir_path = settings.GIT_OUTPUT_DIR + "regulation" + os.path.sep
        dir_path += '1111' + os.path.sep

        self.assertTrue(os.path.exists(dir_path + '.git'))
        dirs, files = [], []
        for dirname, child_dirs, filenames in os.walk(dir_path):
            if ".git" not in dirname:
                dirs.extend(os.path.join(dirname, c) for c in child_dirs
                            if c != '.git')
                files.extend(os.path.join(dirname, f) for f in filenames)
        for path in (('Subpart-E',), ('Subpart-E', '3'),
                     ('Subpart-E', '3', 'a'), ('Subpart-E', '3', 'b'),
                     ('A',), ('A', '3(a)'),
                     ('Interp',), ('Interp', '3-Interp'),
                     ('Interp', '3-Interp', '1'),
                     ('Interp', '3-Interp', 'a-Interp'),
                     ('Interp', '3-Interp', 'a-Interp', '1')):
            path = dir_path + os.path.join(*path)
            self.assertTrue(path in dirs)
            self.assertTrue(path + os.path.sep + 'index.md' in files)

        p3c = p3b
        p3c.text = '(c) Moved!'
        p3c.label = ['1111', '3', 'c']

        writer = GitWriteContent("/regulation/1111/v2v2")
        writer.write(tree)

        dir_path = settings.GIT_OUTPUT_DIR + "regulation" + os.path.sep
        dir_path += '1111' + os.path.sep

        self.assertTrue(os.path.exists(dir_path + '.git'))
        dirs, files = [], []
        for dirname, child_dirs, filenames in os.walk(dir_path):
            if ".git" not in dirname:
                dirs.extend(os.path.join(dirname, c) for c in child_dirs
                            if c != '.git')
                files.extend(os.path.join(dirname, f) for f in filenames)
        for path in (('Subpart-E',), ('Subpart-E', '3'),
                     ('Subpart-E', '3', 'a'), ('Subpart-E', '3', 'c'),
                     ('A',), ('A', '3(a)'),
                     ('Interp',), ('Interp', '3-Interp'),
                     ('Interp', '3-Interp', '1'),
                     ('Interp', '3-Interp', 'a-Interp'),
                     ('Interp', '3-Interp', 'a-Interp', '1')):
            path = dir_path + os.path.join(*path)
            self.assertTrue(path in dirs)
            self.assertTrue(path + os.path.sep + 'index.md' in files)
        self.assertFalse(dir_path + os.path.join('Subpart-E', '3', 'b')
                         in dirs)

        commit = Repo(dir_path).head.commit
        self.assertTrue('v2v2' in commit.message)
        self.assertEqual(1, len(commit.parents))
        commit = commit.parents[0]
        self.assertTrue('v1v1' in commit.message)
        self.assertEqual(1, len(commit.parents))
        commit = commit.parents[0]
        self.assertTrue('1111' in commit.message)
        self.assertEqual(0, len(commit.parents))

Example 39

Project: cgstudiomap Source File: start.py
Function: run
    def run(self, cmdargs):
        parser = argparse.ArgumentParser(
            prog="%s start" % sys.argv[0].split(os.path.sep)[-1],
            description=self.__doc__
        )
        parser.add_argument('--path', default=".",
            help="Directory where your project's modules are stored (will autodetect from current dir)")
        parser.add_argument("-d", "--database", dest="db_name", default=None,
                         help="Specify the database name (default to project's directory name")


        args, unknown = parser.parse_known_args(args=cmdargs)

        project_path = os.path.abspath(os.path.expanduser(os.path.expandvars(args.path)))
        module_root = get_module_root(project_path)
        db_name = None
        if module_root:
            # started in a module so we choose this module name for database
            db_name = project_path.split(os.path.sep)[-1]
            # go to the parent's directory of the module root
            project_path = os.path.abspath(os.path.join(project_path, os.pardir))

        # check if one of the subfolders has at least one module
        mods = self.get_module_list(project_path)
        if mods and '--addons-path' not in cmdargs:
            cmdargs.append('--addons-path=%s' % project_path)

        if not args.db_name:
            args.db_name = db_name or project_path.split(os.path.sep)[-1]
            cmdargs.extend(('-d', args.db_name))

        # TODO: forbid some database names ? eg template1, ...
        try:
            _create_empty_database(args.db_name)
        except DatabaseExists, e:
            pass
        except Exception, e:
            die("Could not create database `%s`. (%s)" % (args.db_name, e))

        if '--db-filter' not in cmdargs:
            cmdargs.append('--db-filter=^%s$' % args.db_name)

        # Remove --path /-p options from the command arguments
        def to_remove(i, l):
            return l[i] == '-p' or l[i].startswith('--path') or \
                (i > 0 and l[i-1] in ['-p', '--path'])
        cmdargs = [v for i, v in enumerate(cmdargs)
                   if not to_remove(i, cmdargs)]

        main(cmdargs)

Example 40

Project: syncthing-gtk Source File: nautilusplugin.py
Function: build_class
def build_class(plugin_module):
	"""
	Builds extension class based on provided plugin module.
	This allows sharing code between extensions and creating
	extensions for Nautilus forks just by doing:
	
	from syncthing_gtk import nautilusplugin
	from gi.repository import Nemo
	NemoExtensionCls = nautilusplugin.build_class(Nemo)
	"""

	class __NautiluslikeExtension(GObject.GObject, plugin_module.InfoProvider, plugin_module.MenuProvider):
		def __init__(self):
			# Prepare stuff
			init_logging()
			set_logging_level(VERBOSE, DEBUG)
			log.info("Initializing...")
			# ready field is set to True while connection to Syncthing
			# daemon is maintained.
			self.ready = False
			try:
				self.daemon = Daemon()
			except Exception, e:
				# Syncthing is not configured, most likely never launched.
				log.error("%s", e)
				log.error("Failed to read Syncthing configuration.")
				return
			# List of known repos + their states
			self.repos = {}
			self.rid_to_path = {}
			self.path_to_rid = {}
			# Dict of known repos -> set of associated devices
			self.rid_to_dev = {}
			# Set of online devices
			self.online_nids = set()
			# Set of online repos (at least one associated device connected)
			self.onlide_rids = set()
			# List (cache) for folders that are known to be placed below
			# some syncthing repo
			self.subfolders = set()
			# List (cache) for files that plugin were asked about
			self.files = {}
			self.downloads = set()
			# Connect to Daemon object signals
			self.daemon.connect("connected", self.cb_connected)
			self.daemon.connect("connection-error", self.cb_syncthing_con_error)
			self.daemon.connect("disconnected", self.cb_syncthing_disconnected)
			self.daemon.connect("device-connected", self.cb_device_connected)
			self.daemon.connect("device-disconnected", self.cb_device_disconnected)
			self.daemon.connect("folder-added", self.cb_syncthing_folder_added)
			self.daemon.connect("folder-sync-started", self.cb_syncthing_folder_state_changed, STATE_SYNCING)
			self.daemon.connect("folder-sync-finished", self.cb_syncthing_folder_state_changed, STATE_IDLE)
			self.daemon.connect("folder-stopped", self.cb_syncthing_folder_stopped)
			self.daemon.connect("item-started", self.cb_syncthing_item_started)
			self.daemon.connect("item-updated", self.cb_syncthing_item_updated)
			
			log.info("Initialized.")
			# Let Daemon object connect to Syncthing
			self.daemon.reconnect()
		
		### Internal stuff
		def _clear_emblems(self):
			""" Clear emblems on all files that had emblem added """
			for path in self.files:
				self._invalidate(path)
		
		def _clear_emblems_in_dir(self, path):
			"""
			Same as _clear_emblems, but only for one directory and its
			subdirectories.
			"""
			for f in self.files:
				if f.startswith(path + os.path.sep) or f == path	:
					self._invalidate(f)
		
		def _invalidate(self, path):
			""" Forces Nautils to re-read emblems on specified file """
			if path in self.files:
				file = self.files[path]
				file.invalidate_extension_info()
		
		def _get_parent_repo_state(self, path):
			"""
			If file belongs to any known repository, returns state of if.
			Returns None otherwise.
			"""
			# TODO: Probably convert to absolute paths and check for '/' at
			# end. It shouldn't be needed, in theory.
			for x in self.repos:
				if path.startswith(x + os.path.sep):
					return self.repos[x]
			return None
		
		def _get_path(self, file):
			""" Returns path for provided FileInfo object """
			if hasattr(file, "get_location"):
				if not file.get_location().get_path() is None:
					return file.get_location().get_path().decode('utf-8')
			return urllib.unquote(file.get_uri().replace("file://", ""))
		
		### Daemon callbacks
		def cb_connected(self, *a):
			"""
			Called when connection to Syncthing daemon is created.
			Clears list of known folders and all caches.
			Also asks Nautilus to clear all emblems.
			"""
			self.repos = {}
			self.rid_to_dev = {}
			self.online_nids = set()
			self.onlide_rids = set()
			self.subfolders = set()
			self.downloads = set()
			self._clear_emblems()
			self.ready = True
			log.info("Connected to Syncthing daemon")
		
		def cb_device_connected(self, daemon, nid):
			self.online_nids.add(nid)
			# Mark any repo attached to this device online
			for rid in self.rid_to_dev:
				if not rid in self.onlide_rids:
					if nid in self.rid_to_dev[rid]:
						log.debug("Repo '%s' now online", rid)
						self.onlide_rids.add(rid)
						if self.repos[self.rid_to_path[rid]] == STATE_OFFLINE:
							self.repos[self.rid_to_path[rid]] = STATE_IDLE
						self._clear_emblems_in_dir(self.rid_to_path[rid])
		
		def cb_device_disconnected(self, daemon, nid):
			self.online_nids.remove(nid)
			# Check for all online repos atached to this device
			for rid in self.rid_to_dev:
				if rid in self.onlide_rids:
					# Check if repo is atached to any other, online device
					if len([ x for x in self.rid_to_dev[rid] if x in self.online_nids ]) == 0:
						# Nope
						log.debug("Repo '%s' now offline", rid)
						self.onlide_rids.remove(rid)
						self.repos[self.rid_to_path[rid]] = STATE_OFFLINE
						self._clear_emblems_in_dir(self.rid_to_path[rid])
		
		def cb_syncthing_folder_added(self, daemon, rid, r):
			"""
			Called when folder is readed from configuration (by syncthing
			daemon, not locally).
			Adds path to list of known repositories and asks Nautilus to
			re-read emblem.
			"""
			path = os.path.expanduser(r["path"])
			if path.endswith(os.path.sep):
				path = path.rstrip("/")
			self.rid_to_path[rid] = path
			self.path_to_rid[path] = rid
			self.repos[path] = STATE_OFFLINE
			self._invalidate(path)
			# Store repo id in dict of associated devices
			self.rid_to_dev[rid] = set()
			for d in r['devices']:
				self.rid_to_dev[rid].add(d['deviceID'])
		
		def cb_syncthing_con_error(self, *a):
			pass
		
		def cb_syncthing_disconnected(self, *a):
			"""
			Called when connection to Syncthing daemon is lost or Daemon
			object fails to (re)connect.
			Check if connection was already finished before and clears up
			stuff in that case.
			"""
			if self.ready:
				log.info("Connection to Syncthing daemon lost")
				self.ready = False
				self._clear_emblems()
			self.daemon.reconnect()
		
		def cb_syncthing_folder_state_changed(self, daemon, rid, state):
			""" Called when folder synchronization starts or stops """
			if rid in self.rid_to_path:
				path = self.rid_to_path[rid]
				if self.repos[path] != STATE_OFFLINE:
					self.repos[path] = state
					log.debug("State of %s changed to %s", path, state)
					self._invalidate(path)
					# Invalidate all files in repository as well
					self._clear_emblems_in_dir(path)
		
		def cb_syncthing_folder_stopped(self, daemon, rid, *a):
			""" Called when synchronization error is detected """
			self.cb_syncthing_folder_state_changed(daemon, rid, STATE_STOPPED)
		
		def cb_syncthing_item_started(self, daemon, rid, filename, *a):
			""" Called when file download starts """
			if rid in self.rid_to_path:
				path = self.rid_to_path[rid]
				filepath = os.path.join(path, filename)
				log.debug("Download started %s", filepath)
				self.downloads.add(filepath)
				self._invalidate(filepath)
				placeholderpath = os.path.join(path, ".syncthing.%s.tmp" % filename)
				if placeholderpath in self.files:
					self._invalidate(placeholderpath)
				
		
		def cb_syncthing_item_updated(self, daemon, rid, filename, *a):
			""" Called after file is downloaded """
			if rid in self.rid_to_path:
				path = self.rid_to_path[rid]
				filepath = os.path.join(path, filename)
				log.debug("Download finished %s", filepath)
				if filepath in self.downloads:
					self.downloads.remove(filepath)
					self._invalidate(filepath)
		
		### InfoProvider stuff
		def update_file_info(self, file):
			# TODO: This remembers every file user ever saw in Nautilus.
			# There *has* to be memory effecient alternative...
			path = self._get_path(file)
			pathonly, filename = os.path.split(path)
			self.files[path] = file
			if not self.ready: return plugin_module.OperationResult.COMPLETE
			# Check if folder is one of repositories managed by syncthing
			if path in self.downloads:
				file.add_emblem("syncthing-active")
			if filename.startswith(".syncthing.") and filename.endswith(".tmp"):
				# Check for placeholder files
				realpath = os.path.join(pathonly, filename[11:-4])
				if realpath in self.downloads:
					file.add_emblem("syncthing-active")
					return plugin_module.OperationResult.COMPLETE
			elif path in self.repos:
				# Determine what emblem should be used
				state = self.repos[path]
				if state == STATE_IDLE:
					# File manager probably shoudn't care about folder being scanned
					file.add_emblem("syncthing")
				elif state == STATE_STOPPED:
					file.add_emblem("syncthing-error")
				elif state == STATE_SYNCING:
					file.add_emblem("syncthing-active")
				else:
					# Default (i-have-no-idea-what-happened) state
					file.add_emblem("syncthing-offline")
			else:
				state = self._get_parent_repo_state(path)
				if state is None:
					# _get_parent_repo_state returns None if file doesn't
					# belongs to repo
					pass
				elif state in (STATE_IDLE, STATE_SYNCING):
					# File manager probably shoudn't care about folder being scanned
					file.add_emblem("syncthing")
				else:
					# Default (i-have-no-idea-what-happened) state
					file.add_emblem("syncthing-offline")
			return plugin_module.OperationResult.COMPLETE
		
		### MenuProvider stuff
		def get_file_items(self, window, sel_items):
			if len(sel_items) == 1:
				# Display context menu only if one item is selected and
				# that item is directory
				return self.get_background_items(window, sel_items[0])
			return []
		
		def cb_remove_repo_menu(self, menuitem, path):
			if path in self.path_to_rid:
				path = os.path.abspath(os.path.expanduser(path))
				path = path.replace("'", "\'")
				os.system("syncthing-gtk --remove-repo '%s' &" % path)
		
		def cb_add_repo_menu(self, menuitem, path):
			path = os.path.abspath(os.path.expanduser(path))
			path = path.replace("'", "\'")
			os.system("syncthing-gtk --add-repo '%s' &" % path)
		
		def get_background_items(self, window, item):
			if not item.is_directory():
				# Context menu is enabled only for directories
				# (file can't be used as repo)
				return []
			path = self._get_path(item).rstrip("/")
			if path in self.repos:
				# Folder is already repository.
				# Add 'remove from ST' item
				menu = plugin_module.MenuItem(name='STPlugin::remove_repo',
										 label='Remove Directory from Syncthing',
										 tip='Remove selected directory from Syncthing',
										 icon='syncthing-offline')
				menu.connect('activate', self.cb_remove_repo_menu, path)
				return [menu]
			elif self._get_parent_repo_state(path) is None:
				# Folder doesn't belongs to any repository.
				# Add 'add to ST' item
				menu = plugin_module.MenuItem(name='STPlugin::add_repo',
										 label='Synchronize with Syncthing',
										 tip='Add selected directory to Syncthing',
										 icon='syncthing')
				menu.connect('activate', self.cb_add_repo_menu, path)
				return [menu]
			# Folder belongs to some repository.
			# Don't add anything
			return []
		
	return __NautiluslikeExtension

Example 41

Project: caffe-gui-tool Source File: IOloadprototxt.py
Function: parse
    def parse(self):
        node = nodeclass()
        node.weight_params = fclass()
        node.bias_params = fclass()
        chunkstring = self.chunkstring
        self.type = findfirst('type: "{}"',
                              chunkstring)  # the use of the default parse search gets the first instance of type
        findsetbeforecolon('name', node, chunkstring)
        node.include_in = findfirst('phase: {}\n', chunkstring)
        self.bottoms = findmultiple('bottom: "{}"', chunkstring)
        self.tops = findmultiple('top: "{}"', chunkstring)
        #################### Many layer specific
        decaymults = findmultiple('decay_mult: {:g}\n', chunkstring)
        lrmults = findmultiple('lr_mult: {:g}\n', chunkstring)
        if decaymults:
            node.extra_params = True
            node.weight_params.decay_mult = decaymults[0]
            if len(decaymults)==2:
                node.bias_params.decay_mult = decaymults[1]
        if lrmults:
            node.extra_params = True
            node.weight_params.lr_mult = lrmults[0]
            if len(lrmults)==2:
                node.bias_params.lr_mult = lrmults[1]
        findsetbeforecolon('kernel_size', node, chunkstring, True)
        if not node.kernel_size:
            findsetbeforecolon('kernel_h', node, chunkstring, True)
            findsetbeforecolon('kernel_w', node, chunkstring, True)
            if node.kernel_h and node.kernel_w:
                node.square_kernel = 0
        findsetbeforecolon('pad', node, chunkstring, True)
        if not node.pad:
            findsetbeforecolon('pad_h', node, chunkstring, True)
            findsetbeforecolon('pad_w', node, chunkstring, True)
            if node.pad_h and node.pad_w:
                node.square_padding = 0
        findsetbeforecolon('stride', node, chunkstring, True)
        if not node.stride:
            findsetbeforecolon('stride_h', node, chunkstring, True)
            findsetbeforecolon('stride_w', node, chunkstring, True)
            if node.stride_h and node.stride_w:
                node.square_stride = 0
        ################################ fillers
        node.bias_filler = self.getfiller('bias')
        node.weight_filler = self.getfiller('weight')
        if self.type == 'Pooling':
            node.mode = findfirst('pool: {}\n', chunkstring)
        ################################ Database
        source = findfirst('source: "{}"\n', chunkstring)
        db_end = 'Null'
        for line in chunkstring.split('\n'):
            if 'data_param' in line:
                if 'image' in line:
                    node.db_type = 'ImageData'
                elif 'hdf5' in line:
                    node.db_type = 'HDF5Data'
                else:
                    db_end = 'db'
        if db_end == 'db':
            node.db_type = findfirst('backend: {}\n', chunkstring)
        batch_size = findfirst('batch_size: {:g}\n', chunkstring)
        if batch_size:
            if node.include_in == 'TRAIN':
                node.train_batch_size = batch_size
                node.train_path = source
                node.train_data = source
            else:
                node.test_batch_size = batch_size
                node.test_path = source
                node.test_data = source

        ######################################### Other params
        SimpleNumberProperties = ['test_initialization', 'base_lr', 'display', 'average_loss', 'max_iter', 'iter_size',
                                  'momentum', 'weight_decay', 'snapshot', 'snapshot_diff', 'debug_info',
                                  'snapshot_after_train', 'alpha', 'beta', 'negative_slope', 'dropout_ratio',
                                  'random_seed',
                                  'stepsize', 'gamma', 'delta', 'power', 'base', 'scale', 'shift', 'channel_shared',
                                  'num_output', 'axis', 'stable_prod_grad', 'coeff', 'eps', 'across_channels',
                                  'normalize_variance', 'mirror', 'is_color', 'new_width', 'new_height', 'shuffle',
                                  'rand_skip', 'test_compute_loss', 'test_iter']
        SimpleStringProperties = ['solver_mode', 'lr_policy', 'solver_type', 'regularization_type', 'operation',
                                  'mean_file','module','layer']
        for prop in SimpleNumberProperties:
            findsetbeforecolon(prop, node, chunkstring, True)
        for prop in SimpleStringProperties:
            findsetbeforecolon(prop, node, chunkstring, False)
        if node.mean_file:
            node.use_mean_file = 1
        if node.random_seed:
            node.use_random_seed = True
        if self.type == 'Solver':
            if findfirst(os.path.sep + '{}' + '_train', chunkstring):
                node.solvername = format_filename(findfirst(os.path.sep + '{}' + '_train', chunkstring))
                if len(node.solvername) > 15:
                    node.solvername = node.solvername[15:]
            else:
                node.solvername = 'CGTLoaded'
        node.w = findfirst('loss_weight: {:g}\n',chunkstring)
        node.OutMaxVal = findfirst('out_max_val: {:g}\n', chunkstring)  ################
        node.TopK = findfirst('top_k: {:g}\n', chunkstring)  ###################
        node.filename = findfirst('file_name: {}', chunkstring)  #######################
        node.slice_points = findmultiple('slice_point {:g}\n', chunkstring)
        sp = findfirst('snapshot_prefix: "{}"\n', chunkstring)
        if sp:
            node.snapshot_prefix = os.path.split(sp)[0] + os.path.sep
        node.size = findfirst('local_size: {:g}\n', chunkstring)
        node.mode = findfirst('norm_region: {}\n', chunkstring)
        if self.type == 'Concat':
            node.input_amount = len(self.bottoms)
        for parametername in dir(node)[dir(node).index('__weakref__') + 1:]:
            if node.__getattribute__(parametername) == None:
                node.__delattr__(parametername)

        self.node = node

Example 42

Project: peppy Source File: utils.py
Function: normalize
def normalize(ref, base=None):
    """Normalize a url string into a reference and fix windows shenanigans"""
    if not isinstance(ref, Reference):
        if ref.startswith('file:'):
            # URLs always use /, so change windows path separators to forward
            # slashes
            try:
                ref = unicode(ref)
            except UnicodeDecodeError:
                try:
                    ref = str(ref).decode(sys.getfilesystemencoding())
                except UnicodeDecodeError:
                    ref = str(ref).decode('utf-8')
            #dprint(repr(ref))
            if os.path.sep == '\\':
                ref = ref.replace(os.path.sep, '/')
        ref = get_reference(ref)
    # If the reference is absolute (i.e.  contains a scheme), we return;
    # otherwise we assume it's a file:// URI
    #dprint(str(ref))
    if ref.scheme:
        return ref

    # Default to the current working directory
    if base is None:
        try:
            base = os.getcwd().decode(sys.getfilesystemencoding())
        except UnicodeDecodeError:
            base = os.getcwd().decode('utf-8')

    # URLs always use /
    if os.path.sep == '\\':
        base = base.replace(os.path.sep, '/')
    #dprint(base)
    # Check windows drive letters and add extra slash for correct URL syntax
    if len(base) > 1 and base[1] == ':':
        base = "/%s:%s" % (base[0].lower(), base[2:])
    baseref = get_reference(u'file://%s/' % base)
    try:
        path = unicode(ref.path)
    except UnicodeDecodeError:
        try:
            path = str(ref.path).decode(sys.getfilesystemencoding())
        except UnicodeDecodeError:
            path = str(ref.path).decode('utf-8')
    #dprint(repr(path))
    
    # Add the query string and fragment if they exist
    newref = baseref.resolve(path)
    newref.query = ref.query
    newref.fragment = ref.fragment
    #dprint(newref)
    return newref

Example 43

Project: dpa-pipe Source File: spec.py
Function: get
    @classmethod
    def get(cls, in_str, relative_to=None):
        """Evaluate the input string to a PTaskSpec object.

        A relative_to PTaskSpec/str may also be supplied for evaluating partial
        input specs.

        """

        if os.path.sep in in_str:
            raise PTaskSpecError(
                "Invalid character in ptask spec: '" + os.path.sep + "'")

        if relative_to and os.path.sep in relative_to:
            raise PTaskSpecError(
                "Invalid character in relative_to ptask spec: '" + \
                os.path.sep + "'"
            )

        # remove whitespace and separators from head/tail
        in_str = in_str.strip().strip(PTaskSpec.SEPARATOR)

        if in_str.lower() == "none":
            return PTaskSpec("")

        # if the spec string starts with PTaskSpec.ROOT, it eliminates the
        # relative part (it implies that what follows is relative to the top
        # level).
        if in_str.startswith(PTaskSpec.ROOT):
            relative_to = ""
            in_str = in_str.lstrip(PTaskSpec.ROOT)

        in_str = in_str.strip(PTaskSpec.SEPARATOR)
        in_str_parts = in_str.strip().split(PTaskSpec.SEPARATOR)

        # if relative_to has a value, use it as the base for full output spec
        if relative_to:
            full_spec_parts = relative_to.strip().split(PTaskSpec.SEPARATOR)
        else:
            full_spec_parts = []

        # expand full spec by evaluating each bit of the input spec in order
        while len(in_str_parts) > 0:
        
            part = in_str_parts.pop(0)

            # if the part is the parent string, go up one level in the spec
            if part == PTaskSpec.PARENT:
                try:
                    full_spec_parts.pop()
                except IndexError:
                    raise PTaskSpecError(
                        "Could not find parent task name for: '" + \
                        PTaskSpec.SEPARATOR.join(full_spec_parts) + "'"
                    )

            # if the part is the current level, just ignore it
            elif part == PTaskSpec.CURRENT:
                continue

            # next level of the spec, add it to the full spec parts
            else:
                full_spec_parts.append(part)

        # join the parts and make sure there aren't any colons on either end.
        full_spec_str = PTaskSpec.SEPARATOR.join(full_spec_parts).\
            lstrip(PTaskSpec.SEPARATOR).rstrip(PTaskSpec.SEPARATOR)

        return PTaskSpec(full_spec_str)

Example 44

Project: countershape Source File: model.py
    def getPageFrom(self, fromPage, toPage):
        if isinstance(toPage, BasePage):
            return toPage
        elif utils.isStringLike(toPage):
            exact, isParent, isChild, isSibling, isLocal = False, False, False, False, False
            if toPage.startswith("/") or toPage.startswith(os.path.sep):
                exact = True
            elif toPage.startswith("./") or toPage.startswith("." + os.path.sep):
                isChild = True
                toPage = toPage[2:]
            elif toPage.startswith("^/") or toPage.startswith("^" + os.path.sep):
                isParent = True
                toPage = toPage[2:]
            elif toPage.startswith("-/") or toPage.startswith("-" + os.path.sep):
                isSibling = True
                toPage = toPage[2:]
            elif toPage.startswith("$/") or toPage.startswith("$" + os.path.sep):
                isLocal = True
                toPage = toPage[2:]
            if any([isParent, isChild, isSibling, isLocal]) and not fromPage:
                s = "Relative page link '%s' outside of page call context."%toPage
                raise exceptions.ApplicationError(s)
            path = [i for i in os.path.normpath(toPage).split(os.path.sep) if i and i != "."]
            if not path:
                return self.root
            pname = path[-1]
            pagelist = self._pages.get(pname, None)
            if pagelist:
                match = None
                for p in pagelist:
                    if isChild:
                        if not fromPage.isDescendantOf(p):
                            continue
                    elif isParent:
                        if not p.isDescendantOf(fromPage):
                            continue
                    elif isSibling:
                        if not fromPage.isSiblingOf(p):
                            continue
                    elif isLocal:
                        values = [
                            fromPage.isDescendantOf(p),
                            p.isDescendantOf(fromPage),
                            fromPage.isSiblingOf(p)
                        ]
                        if not any(values):
                            continue
                    if p.match(path, exact):
                        if match:
                            raise exceptions.ApplicationError(
                                "Ambiguous path specification: %s."%toPage
                            )
                        match = p
                return match
            else:
                return None
        else:
            s = "Invalid argument to getPage: %s."%repr(toPage) +\
                " Must be either a string or a Page object."
            raise exceptions.ApplicationError(s)

Example 45

Project: CouchPotatoServer Source File: browser.py
Function: view
    def view(self, path = '/', show_hidden = True, **kwargs):

        soft_chroot = Env.get('softchroot')
        
        home = getUserDir()
        if soft_chroot.enabled:
            if not soft_chroot.is_subdir(home):
                home = soft_chroot.get_chroot()

        if not path:
            path = home
            if path.endswith(os.path.sep):
                path = path.rstrip(os.path.sep)
        else:
            path = soft_chroot.chroot2abs(path)

        try:
            dirs = self.getDirectories(path = path, show_hidden = show_hidden)
        except:
            log.error('Failed getting directory "%s" : %s', (path, traceback.format_exc()))
            dirs = []

        if soft_chroot.enabled:
            dirs = map(soft_chroot.abs2chroot, dirs)

        parent = os.path.dirname(path.rstrip(os.path.sep))
        if parent == path.rstrip(os.path.sep):
            parent = '/'
        elif parent != '/' and parent[-2:] != ':\\':
            parent += os.path.sep

        # TODO : check on windows:
        is_root = path == '/'

        if soft_chroot.enabled:
            is_root = soft_chroot.is_root_abs(path)

            # fix paths:
            if soft_chroot.is_subdir(parent):
                parent = soft_chroot.abs2chroot(parent)
            else:
                parent = os.path.sep

            home = soft_chroot.abs2chroot(home)

        return {
            'is_root': is_root,
            'empty': len(dirs) == 0,
            'parent': parent,
            'home': home,
            'platform': os.name,
            'dirs': dirs,
        }

Example 46

Project: odoo Source File: start.py
Function: run
    def run(self, cmdargs):
        parser = argparse.ArgumentParser(
            prog="%s start" % sys.argv[0].split(os.path.sep)[-1],
            description=self.__doc__
        )
        parser.add_argument('--path', default=".",
            help="Directory where your project's modules are stored (will autodetect from current dir)")
        parser.add_argument("-d", "--database", dest="db_name", default=None,
                         help="Specify the database name (default to project's directory name")


        args, unknown = parser.parse_known_args(args=cmdargs)

        project_path = os.path.abspath(os.path.expanduser(os.path.expandvars(args.path)))
        module_root = get_module_root(project_path)
        db_name = None
        if module_root:
            # started in a module so we choose this module name for database
            db_name = project_path.split(os.path.sep)[-1]
            # go to the parent's directory of the module root
            project_path = os.path.abspath(os.path.join(project_path, os.pardir))

        # check if one of the subfolders has at least one module
        mods = self.get_module_list(project_path)
        if mods and '--addons-path' not in cmdargs:
            cmdargs.append('--addons-path=%s' % project_path)

        if not args.db_name:
            args.db_name = db_name or project_path.split(os.path.sep)[-1]
            cmdargs.extend(('-d', args.db_name))

        # TODO: forbid some database names ? eg template1, ...
        try:
            _create_empty_database(args.db_name)
        except DatabaseExists, e:
            pass
        except Exception, e:
            die("Could not create database `%s`. (%s)" % (args.db_name, e))

        if '--db-filter' not in cmdargs:
            cmdargs.append('--db-filter=^%s$' % args.db_name)

        main(cmdargs)

Example 47

Project: PyClassLessons Source File: wheel.py
Function: move_wheel_files
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
                     pycompile=True, scheme=None):
    """Install a wheel"""

    if not scheme:
        scheme = distutils_scheme(name, user=user, home=home, root=root)

    if root_is_purelib(name, wheeldir):
        lib_dir = scheme['purelib']
    else:
        lib_dir = scheme['platlib']

    info_dir = []
    data_dirs = []
    source = wheeldir.rstrip(os.path.sep) + os.path.sep

    # Record details of the files moved
    #   installed = files copied from the wheel to the destination
    #   changed = files changed while installing (scripts #! line typically)
    #   generated = files newly generated during the install (script wrappers)
    installed = {}
    changed = set()
    generated = []

    # Compile all of the pyc files that we're going to be installing
    if pycompile:
        compileall.compile_dir(source, force=True, quiet=True)

    def normpath(src, p):
        return make_path_relative(src, p).replace(os.path.sep, '/')

    def record_installed(srcfile, destfile, modified=False):
        """Map archive RECORD paths to installation RECORD paths."""
        oldpath = normpath(srcfile, wheeldir)
        newpath = normpath(destfile, lib_dir)
        installed[oldpath] = newpath
        if modified:
            changed.add(destfile)

    def clobber(source, dest, is_base, fixer=None, filter=None):
        if not os.path.exists(dest): # common for the 'include' path
            os.makedirs(dest)

        for dir, subdirs, files in os.walk(source):
            basedir = dir[len(source):].lstrip(os.path.sep)
            destdir = os.path.join(dest, basedir)
            if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
                continue
            for s in subdirs:
                destsubdir = os.path.join(dest, basedir, s)
                if is_base and basedir == '' and destsubdir.endswith('.data'):
                    data_dirs.append(s)
                    continue
                elif (is_base
                    and s.endswith('.dist-info')
                    # is self.req.project_name case preserving?
                    and s.lower().startswith(req.project_name.replace('-', '_').lower())):
                    assert not info_dir, 'Multiple .dist-info directories'
                    info_dir.append(destsubdir)
            for f in files:
                # Skip unwanted files
                if filter and filter(f):
                    continue
                srcfile = os.path.join(dir, f)
                destfile = os.path.join(dest, basedir, f)
                # directory creation is lazy and after the file filtering above
                # to ensure we don't install empty dirs; empty dirs can't be
                # uninstalled.
                if not os.path.exists(destdir):
                    os.makedirs(destdir)
                # use copy2 (not move) to be extra sure we're not moving
                # directories over; copy2 fails for directories.  this would
                # fail tests (not during released/user execution)
                shutil.copy2(srcfile, destfile)
                changed = False
                if fixer:
                    changed = fixer(destfile)
                record_installed(srcfile, destfile, changed)

    clobber(source, lib_dir, True)

    assert info_dir, "%s .dist-info directory not found" % req

    # Get the defined entry points
    ep_file = os.path.join(info_dir[0], 'entry_points.txt')
    console, gui = get_entrypoints(ep_file)

    def is_entrypoint_wrapper(name):
        # EP, EP.exe and EP-script.py are scripts generated for
        # entry point EP by setuptools
        if name.lower().endswith('.exe'):
            matchname = name[:-4]
        elif name.lower().endswith('-script.py'):
            matchname = name[:-10]
        elif name.lower().endswith(".pya"):
            matchname = name[:-4]
        else:
            matchname = name
        # Ignore setuptools-generated scripts
        return (matchname in console or matchname in gui)

    for datadir in data_dirs:
        fixer = None
        filter = None
        for subdir in os.listdir(os.path.join(wheeldir, datadir)):
            fixer = None
            if subdir == 'scripts':
                fixer = fix_script
                filter = is_entrypoint_wrapper
            source = os.path.join(wheeldir, datadir, subdir)
            dest = scheme[subdir]
            clobber(source, dest, False, fixer=fixer, filter=filter)

    maker = ScriptMaker(None, scheme['scripts'])

    # Ensure we don't generate any variants for scripts because this is almost
    # never what somebody wants.
    # See https://bitbucket.org/pypa/distlib/issue/35/
    maker.variants = set(('', ))

    # This is required because otherwise distlib creates scripts that are not
    # executable.
    # See https://bitbucket.org/pypa/distlib/issue/32/
    maker.set_mode = True

    # Simplify the script and fix the fact that the default script swallows
    # every single stack trace.
    # See https://bitbucket.org/pypa/distlib/issue/34/
    # See https://bitbucket.org/pypa/distlib/issue/33/
    def _get_script_text(entry):
        return maker.script_template % {
            "module": entry.prefix,
            "import_name": entry.suffix.split(".")[0],
            "func": entry.suffix,
        }

    maker._get_script_text = _get_script_text
    maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
"""

    # Special case pip and setuptools to generate versioned wrappers
    #
    # The issue is that some projects (specifically, pip and setuptools) use
    # code in setup.py to create "versioned" entry points - pip2.7 on Python
    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
    # the wheel metadata at build time, and so if the wheel is installed with
    # a *different* version of Python the entry points will be wrong. The
    # correct fix for this is to enhance the metadata to be able to describe
    # such versioned entry points, but that won't happen till Metadata 2.0 is
    # available.
    # In the meantime, projects using versioned entry points will either have
    # incorrect versioned entry points, or they will not be able to distribute
    # "universal" wheels (i.e., they will need a wheel per Python version).
    #
    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
    # override the versioned entry points in the wheel and generate the
    # correct ones. This code is purely a short-term measure until Metadat 2.0
    # is available.
    #
    # To add the level of hack in this section of code, in order to support
    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
    # variable which will control which version scripts get installed.
    #
    # ENSUREPIP_OPTIONS=altinstall
    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
    # ENSUREPIP_OPTIONS=install
    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
    #     not altinstall
    # DEFAULT
    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
    #     and easy_install-X.Y.
    pip_script = console.pop('pip', None)
    if pip_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'pip = ' + pip_script
            generated.extend(maker.make(spec))

        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
            spec = 'pip%s = %s' % (sys.version[:1], pip_script)
            generated.extend(maker.make(spec))

        spec = 'pip%s = %s' % (sys.version[:3], pip_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned pip entry points
        pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
        for k in pip_ep:
            del console[k]
    easy_install_script = console.pop('easy_install', None)
    if easy_install_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'easy_install = ' + easy_install_script
            generated.extend(maker.make(spec))

        spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned easy_install entry points
        easy_install_ep = [k for k in console
                if re.match(r'easy_install(-\d\.\d)?$', k)]
        for k in easy_install_ep:
            del console[k]

    # Generate the console and GUI entry points specified in the wheel
    if len(console) > 0:
        generated.extend(maker.make_multiple(['%s = %s' % kv for kv in console.items()]))
    if len(gui) > 0:
        generated.extend(maker.make_multiple(['%s = %s' % kv for kv in gui.items()], {'gui': True}))

    record = os.path.join(info_dir[0], 'RECORD')
    temp_record = os.path.join(info_dir[0], 'RECORD.pip')
    with open_for_csv(record, 'r') as record_in:
        with open_for_csv(temp_record, 'w+') as record_out:
            reader = csv.reader(record_in)
            writer = csv.writer(record_out)
            for row in reader:
                row[0] = installed.pop(row[0], row[0])
                if row[0] in changed:
                    row[1], row[2] = rehash(row[0])
                writer.writerow(row)
            for f in generated:
                h, l = rehash(f)
                writer.writerow((f, h, l))
            for f in installed:
                writer.writerow((installed[f], '', ''))
    shutil.move(temp_record, record)

Example 48

Project: iris Source File: generate_package_rst.py
Function: do_package
def do_package(package_name):
    out_dir = package_name + os.path.sep

    # Import the root package. If this fails then an import error will be
    # raised.
    module = __import__(package_name)
    root_package = package_name
    rootdir = os.path.dirname(module.__file__)

    package_folder = []
    module_folders = {}
    for root, subFolders, files in os.walk(rootdir):
        for fname in files:
            name, ext = os.path.splitext(fname)

            # Skip some non-relevant files.
            if (fname.startswith('.') or fname.startswith('#') or
                    re.search('^_[^_]', fname) or fname.find('.svn') >= 0 or
                    not (ext in ['.py', '.so'])):
                continue

            # Handle new shared library naming conventions
            if ext == '.so':
                name = name.split('.', 1)[0]

            rel_path = root_package + \
                os.path.join(root, fname).split(rootdir)[-1]
            mod_folder = root_package + \
                os.path.join(root).split(rootdir)[-1].replace('/', '.')

            # Only add this package to folder list if it contains an __init__
            # script.
            if name == '__init__':
                package_folder.append([mod_folder, rel_path])
            else:
                import_name = mod_folder + '.' + name
                mf_list = module_folders.setdefault(mod_folder, [])
                mf_list.append((import_name, rel_path))
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)

    for package, package_path in package_folder:
        if '._' in package or 'test' in package:
            continue

        paths = []
        for spackage, spackage_path in package_folder:
            # Ignore this packages, packages that are not children of this
            # one, test packages, private packages, and packages that are
            # subpackages of subpackages (they'll be part of the subpackage).
            if spackage == package:
                continue
            if not spackage.startswith(package):
                continue
            if spackage.count('.') > package.count('.') + 1:
                continue
            if 'test' in spackage:
                continue

            split_path = spackage.rsplit('.', 2)[-2:]
            if any(part[0] == '_' for part in split_path):
                continue

            paths.append(os.path.join(*split_path) + '.rst')

        paths.extend(os.path.join(os.path.basename(os.path.dirname(path)),
                                  os.path.basename(path).split('.', 1)[0])
                     for imp_name, path in module_folders.get(package, []))

        paths.sort()
        doc = auto_doc_package(package_path, package, root_package, paths)

        package_dir = out_dir + package.replace('.', os.path.sep)
        if not os.path.exists(package_dir):
            os.makedirs(out_dir + package.replace('.', os.path.sep))

        out_path = package_dir + '.rst'
        if not os.path.exists(out_path):
            print('Creating non-existent docuement {} ...'.format(out_path))
            with open(out_path, 'w') as fh:
                fh.write(doc)
        else:
            with open(out_path, 'r') as fh:
                existing_content = ''.join(fh.readlines())
            if doc != existing_content:
                print('Creating out of date docuement {} ...'.format(
                    out_path))
                with open(out_path, 'w') as fh:
                    fh.write(doc)

        for import_name, module_path in module_folders.get(package, []):
            doc = auto_doc_module(module_path, import_name, root_package)
            out_path = out_dir + import_name.replace('.', os.path.sep) + '.rst'
            if not os.path.exists(out_path):
                print('Creating non-existent docuement {} ...'.format(
                    out_path))
                with open(out_path, 'w') as fh:
                    fh.write(doc)
            else:
                with open(out_path, 'r') as fh:
                    existing_content = ''.join(fh.readlines())
                if doc != existing_content:
                    print('Creating out of date docuement {} ...'.format(
                        out_path))
                    with open(out_path, 'w') as fh:
                        fh.write(doc)

Example 49

Project: WAPT Source File: wheel.py
Function: move_wheel_files
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
        pycompile=True):
    """Install a wheel"""

    scheme = distutils_scheme(name, user=user, home=home, root=root)

    if root_is_purelib(name, wheeldir):
        lib_dir = scheme['purelib']
    else:
        lib_dir = scheme['platlib']

    info_dir = []
    data_dirs = []
    source = wheeldir.rstrip(os.path.sep) + os.path.sep

    # Record details of the files moved
    #   installed = files copied from the wheel to the destination
    #   changed = files changed while installing (scripts #! line typically)
    #   generated = files newly generated during the install (script wrappers)
    installed = {}
    changed = set()
    generated = []

    # Compile all of the pyc files that we're going to be installing
    if pycompile:
        compileall.compile_dir(source, force=True, quiet=True)

    def normpath(src, p):
        return make_path_relative(src, p).replace(os.path.sep, '/')

    def record_installed(srcfile, destfile, modified=False):
        """Map archive RECORD paths to installation RECORD paths."""
        oldpath = normpath(srcfile, wheeldir)
        newpath = normpath(destfile, lib_dir)
        installed[oldpath] = newpath
        if modified:
            changed.add(destfile)

    def clobber(source, dest, is_base, fixer=None, filter=None):
        if not os.path.exists(dest): # common for the 'include' path
            os.makedirs(dest)

        for dir, subdirs, files in os.walk(source):
            basedir = dir[len(source):].lstrip(os.path.sep)
            if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
                continue
            for s in subdirs:
                destsubdir = os.path.join(dest, basedir, s)
                if is_base and basedir == '' and destsubdir.endswith('.data'):
                    data_dirs.append(s)
                    continue
                elif (is_base
                    and s.endswith('.dist-info')
                    # is self.req.project_name case preserving?
                    and s.lower().startswith(req.project_name.replace('-', '_').lower())):
                    assert not info_dir, 'Multiple .dist-info directories'
                    info_dir.append(destsubdir)
                if not os.path.exists(destsubdir):
                    os.makedirs(destsubdir)
            for f in files:
                # Skip unwanted files
                if filter and filter(f):
                    continue
                srcfile = os.path.join(dir, f)
                destfile = os.path.join(dest, basedir, f)
                shutil.move(srcfile, destfile)
                changed = False
                if fixer:
                    changed = fixer(destfile)
                record_installed(srcfile, destfile, changed)

    clobber(source, lib_dir, True)

    assert info_dir, "%s .dist-info directory not found" % req

    # Get the defined entry points
    ep_file = os.path.join(info_dir[0], 'entry_points.txt')
    console, gui = get_entrypoints(ep_file)

    def is_entrypoint_wrapper(name):
        # EP, EP.exe and EP-script.py are scripts generated for
        # entry point EP by setuptools
        if name.lower().endswith('.exe'):
            matchname = name[:-4]
        elif name.lower().endswith('-script.py'):
            matchname = name[:-10]
        elif name.lower().endswith(".pya"):
            matchname = name[:-4]
        else:
            matchname = name
        # Ignore setuptools-generated scripts
        return (matchname in console or matchname in gui)

    for datadir in data_dirs:
        fixer = None
        filter = None
        for subdir in os.listdir(os.path.join(wheeldir, datadir)):
            fixer = None
            if subdir == 'scripts':
                fixer = fix_script
                filter = is_entrypoint_wrapper
            source = os.path.join(wheeldir, datadir, subdir)
            dest = scheme[subdir]
            clobber(source, dest, False, fixer=fixer, filter=filter)

    maker = ScriptMaker(None, scheme['scripts'])

    # Ensure we don't generate any variants for scripts because this is almost
    # never what somebody wants.
    # See https://bitbucket.org/pypa/distlib/issue/35/
    maker.variants = set(('', ))

    # This is required because otherwise distlib creates scripts that are not
    # executable.
    # See https://bitbucket.org/pypa/distlib/issue/32/
    maker.set_mode = True

    # Simplify the script and fix the fact that the default script swallows
    # every single stack trace.
    # See https://bitbucket.org/pypa/distlib/issue/34/
    # See https://bitbucket.org/pypa/distlib/issue/33/
    def _get_script_text(entry):
        return maker.script_template % {
            "module": entry.prefix,
            "import_name": entry.suffix.split(".")[0],
            "func": entry.suffix,
        }

    maker._get_script_text = _get_script_text
    maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
"""

    # Special case pip and setuptools to generate versioned wrappers
    #
    # The issue is that some projects (specifically, pip and setuptools) use
    # code in setup.py to create "versioned" entry points - pip2.7 on Python
    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
    # the wheel metadata at build time, and so if the wheel is installed with
    # a *different* version of Python the entry points will be wrong. The
    # correct fix for this is to enhance the metadata to be able to describe
    # such versioned entry points, but that won't happen till Metadata 2.0 is
    # available.
    # In the meantime, projects using versioned entry points will either have
    # incorrect versioned entry points, or they will not be able to distribute
    # "universal" wheels (i.e., they will need a wheel per Python version).
    #
    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
    # override the versioned entry points in the wheel and generate the
    # correct ones. This code is purely a short-term measure until Metadat 2.0
    # is available.
    #
    # To add the level of hack in this section of code, in order to support
    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
    # variable which will control which version scripts get installed.
    #
    # ENSUREPIP_OPTIONS=altinstall
    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
    # ENSUREPIP_OPTIONS=install
    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
    #     not altinstall
    # DEFAULT
    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
    #     and easy_install-X.Y.
    pip_script = console.pop('pip', None)
    if pip_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'pip = ' + pip_script
            generated.extend(maker.make(spec))

        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
            spec = 'pip%s = %s' % (sys.version[:1], pip_script)
            generated.extend(maker.make(spec))

        spec = 'pip%s = %s' % (sys.version[:3], pip_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned pip entry points
        pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
        for k in pip_ep:
            del console[k]
    easy_install_script = console.pop('easy_install', None)
    if easy_install_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'easy_install = ' + easy_install_script
            generated.extend(maker.make(spec))

        spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned easy_install entry points
        easy_install_ep = [k for k in console
                if re.match(r'easy_install(-\d\.\d)?$', k)]
        for k in easy_install_ep:
            del console[k]

    # Generate the console and GUI entry points specified in the wheel
    if len(console) > 0:
        generated.extend(maker.make_multiple(['%s = %s' % kv for kv in console.items()]))
    if len(gui) > 0:
        generated.extend(maker.make_multiple(['%s = %s' % kv for kv in gui.items()], {'gui': True}))

    record = os.path.join(info_dir[0], 'RECORD')
    temp_record = os.path.join(info_dir[0], 'RECORD.pip')
    with open_for_csv(record, 'r') as record_in:
        with open_for_csv(temp_record, 'w+') as record_out:
            reader = csv.reader(record_in)
            writer = csv.writer(record_out)
            for row in reader:
                row[0] = installed.pop(row[0], row[0])
                if row[0] in changed:
                    row[1], row[2] = rehash(row[0])
                writer.writerow(row)
            for f in generated:
                h, l = rehash(f)
                writer.writerow((f, h, l))
            for f in installed:
                writer.writerow((installed[f], '', ''))
    shutil.move(temp_record, record)

Example 50

Project: deluge Source File: addtorrentdialog.py
Function: on_filename_edited
    def _on_filename_edited(self, renderer, path, new_text):
        index = self.files_treestore[path][3]

        new_text = new_text.strip(os.path.sep).strip()

        # Return if the text hasn't changed
        if new_text == self.files_treestore[path][1]:
            return

        # Get the tree iter
        itr = self.files_treestore.get_iter(path)

        # Get the torrent_id
        (model, row) = self.listview_torrents.get_selection().get_selected()
        torrent_id = model[row][0]

        if 'mapped_files' not in self.options[torrent_id]:
            self.options[torrent_id]['mapped_files'] = {}

        if index > -1:
            # We're renaming a file! Yay! That's easy!
            if not new_text:
                return
            parent = self.files_treestore.iter_parent(itr)
            file_path = os.path.join(self.get_file_path(parent), new_text)
            # Don't rename if filename exists
            if parent:
                for row in self.files_treestore[parent].iterchildren():
                    if new_text == row[1]:
                        return
            if os.path.sep in new_text:
                # There are folders in this path, so we need to create them
                # and then move the file iter to top
                split_text = new_text.split(os.path.sep)
                for s in split_text[:-1]:
                    parent = self.files_treestore.append(parent, [True, s, 0, -1, False, gtk.STOCK_DIRECTORY])

                self.files_treestore[itr][1] = split_text[-1]
                reparent_iter(self.files_treestore, itr, parent)
            else:
                # Update the row's text
                self.files_treestore[itr][1] = new_text

            # Update the mapped_files dict in the options with the index and new
            # file path.
            # We'll send this to the core when adding the torrent so it knows
            # what to rename before adding.
            self.options[torrent_id]['mapped_files'][index] = file_path
            self.files[torrent_id][index]['path'] = file_path
        else:
            # Folder!
            def walk_tree(row):
                if not row:
                    return

                # Get the file path base once, since it will be the same for
                # all siblings
                file_path_base = self.get_file_path(
                    self.files_treestore.iter_parent(row)
                )

                # Iterate through all the siblings at this level
                while row:
                    # We recurse if there are children
                    if self.files_treestore.iter_has_child(row):
                        walk_tree(self.files_treestore.iter_children(row))

                    index = self.files_treestore[row][3]

                    if index > -1:
                        # Get the new full path for this file
                        file_path = file_path_base + self.files_treestore[row][1]

                        # Update the file path in the mapped_files dict
                        self.options[torrent_id]['mapped_files'][index] = file_path
                        self.files[torrent_id][index]['path'] = file_path

                    # Get the next siblings iter
                    row = self.files_treestore.iter_next(row)

            # Update the treestore row first so that when walking the tree
            # we can construct the new proper paths

            # We need to check if this folder has been split
            if os.path.sep in new_text:
                # It's been split, so we need to add new folders and then re-parent
                # itr.
                parent = self.files_treestore.iter_parent(itr)
                split_text = new_text.split(os.path.sep)
                for s in split_text[:-1]:
                    # We don't iterate over the last item because we'll just use
                    # the existing itr and change the text
                    parent = self.files_treestore.append(parent, [
                        True, s + os.path.sep, 0, -1, False, gtk.STOCK_DIRECTORY
                    ])

                self.files_treestore[itr][1] = split_text[-1] + os.path.sep

                # Now re-parent itr to parent
                reparent_iter(self.files_treestore, itr, parent)
                itr = parent

                # We need to re-expand the view because it might contracted
                # if we change the root iter
                self.listview_files.expand_row('0', False)
            else:
                # This was a simple folder rename without any splits, so just
                # change the path for itr
                self.files_treestore[itr][1] = new_text + os.path.sep

            # Walk through the tree from 'itr' and add all the new file paths
            # to the 'mapped_files' option
            walk_tree(itr)
See More Examples - Go to Next Page
Page 1 Selected Page 2 Page 3