os.path.abspath

Here are the examples of the python api os.path.abspath taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: PyClassLessons
Source File: install.py
View license
    def run(self, options, args):

        if (
            options.no_install or
            options.no_download or
            (options.build_dir != build_prefix) or
            options.no_clean
        ):
            logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, '
                              'and --no-clean are deprecated.  See https://github.com/pypa/pip/issues/906.')

        if options.download_dir:
            options.no_install = True
            options.ignore_installed = True
        options.build_dir = os.path.abspath(options.build_dir)
        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if virtualenv_no_global():
                raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.")
            install_options.append('--user')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
                raise CommandError("Target path exists but is not a directory, will not continue.")
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
            index_urls = []

        if options.use_mirrors:
            logger.deprecated("1.7",
                        "--use-mirrors has been deprecated and will be removed"
                        " in the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")

        if options.mirrors:
            logger.deprecated("1.7",
                        "--mirrors has been deprecated and will be removed in "
                        " the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")
            index_urls += options.mirrors

        session = self._build_session(options)

        finder = self._build_package_finder(options, index_urls, session)

        requirement_set = RequirementSet(
            build_dir=options.build_dir,
            src_dir=options.src_dir,
            download_dir=options.download_dir,
            download_cache=options.download_cache,
            upgrade=options.upgrade,
            as_egg=options.as_egg,
            ignore_installed=options.ignore_installed,
            ignore_dependencies=options.ignore_dependencies,
            force_reinstall=options.force_reinstall,
            use_user_site=options.use_user_site,
            target_dir=temp_target_dir,
            session=session,
            pycompile=options.compile,
        )
        for name in args:
            requirement_set.add_requirement(
                InstallRequirement.from_line(name, None))
        for name in options.editables:
            requirement_set.add_requirement(
                InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
        for filename in options.requirements:
            for req in parse_requirements(filename, finder=finder, options=options, session=session):
                requirement_set.add_requirement(req)
        if not requirement_set.has_requirements:
            opts = {'name': self.name}
            if options.find_links:
                msg = ('You must give at least one requirement to %(name)s '
                       '(maybe you meant "pip %(name)s %(links)s"?)' %
                       dict(opts, links=' '.join(options.find_links)))
            else:
                msg = ('You must give at least one requirement '
                       'to %(name)s (see "pip help %(name)s")' % opts)
            logger.warn(msg)
            return

        try:
            if not options.no_download:
                requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
            else:
                requirement_set.locate_files()

            if not options.no_install and not self.bundle:
                requirement_set.install(install_options, global_options, root=options.root_path)
                installed = ' '.join([req.name for req in
                                      requirement_set.successfully_installed])
                if installed:
                    logger.notify('Successfully installed %s' % installed)
            elif not self.bundle:
                downloaded = ' '.join([req.name for req in
                                       requirement_set.successfully_downloaded])
                if downloaded:
                    logger.notify('Successfully downloaded %s' % downloaded)
            elif self.bundle:
                requirement_set.create_bundle(self.bundle_filename)
                logger.notify('Created bundle in %s' % self.bundle_filename)
        except PreviousBuildDirError:
            options.no_clean = True
            raise
        finally:
            # Clean up
            if (not options.no_clean) and ((not options.no_install) or options.download_dir):
                requirement_set.cleanup_files(bundle=self.bundle)

        if options.target_dir:
            if not os.path.exists(options.target_dir):
                os.makedirs(options.target_dir)
            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
            for item in os.listdir(lib_dir):
                shutil.move(
                    os.path.join(lib_dir, item),
                    os.path.join(options.target_dir, item)
                    )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 2

Project: pip
Source File: install.py
View license
    def run(self, options, args):
        cmdoptions.resolve_wheel_no_use_binary(options)
        cmdoptions.check_install_build_global(options)

        if options.as_egg:
            warnings.warn(
                "--egg has been deprecated and will be removed in the future. "
                "This flag is mutually exclusive with large parts of pip, and "
                "actually using it invalidates pip's ability to manage the "
                "installation process.",
                RemovedInPip10Warning,
            )

        if options.allow_external:
            warnings.warn(
                "--allow-external has been deprecated and will be removed in "
                "the future. Due to changes in the repository protocol, it no "
                "longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_all_external:
            warnings.warn(
                "--allow-all-external has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_unverified:
            warnings.warn(
                "--allow-unverified has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.download_dir:
            warnings.warn(
                "pip install --download has been deprecated and will be "
                "removed in the future. Pip now has a download command that "
                "should be used instead.",
                RemovedInPip10Warning,
            )
            options.ignore_installed = True

        if options.build_dir:
            options.build_dir = os.path.abspath(options.build_dir)

        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if options.prefix_path:
                raise CommandError(
                    "Can not combine '--user' and '--prefix' as they imply "
                    "different installation locations"
                )
            if virtualenv_no_global():
                raise InstallationError(
                    "Can not perform a '--user' install. User site-packages "
                    "are not visible in this virtualenv."
                )
            install_options.append('--user')
            install_options.append('--prefix=')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if (os.path.exists(options.target_dir) and not
                    os.path.isdir(options.target_dir)):
                raise CommandError(
                    "Target path exists but is not a directory, will not "
                    "continue."
                )
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []

        with self._build_session(options) as session:

            finder = self._build_package_finder(options, session)
            build_delete = (not (options.no_clean or options.build_dir))
            wheel_cache = WheelCache(options.cache_dir, options.format_control)
            if options.cache_dir and not check_path_owner(options.cache_dir):
                logger.warning(
                    "The directory '%s' or its parent directory is not owned "
                    "by the current user and caching wheels has been "
                    "disabled. check the permissions and owner of that "
                    "directory. If executing pip with sudo, you may want "
                    "sudo's -H flag.",
                    options.cache_dir,
                )
                options.cache_dir = None

            with BuildDirectory(options.build_dir,
                                delete=build_delete) as build_dir:
                requirement_set = RequirementSet(
                    build_dir=build_dir,
                    src_dir=options.src_dir,
                    download_dir=options.download_dir,
                    upgrade=options.upgrade,
                    upgrade_strategy=options.upgrade_strategy,
                    as_egg=options.as_egg,
                    ignore_installed=options.ignore_installed,
                    ignore_dependencies=options.ignore_dependencies,
                    ignore_requires_python=options.ignore_requires_python,
                    force_reinstall=options.force_reinstall,
                    use_user_site=options.use_user_site,
                    target_dir=temp_target_dir,
                    session=session,
                    pycompile=options.compile,
                    isolated=options.isolated_mode,
                    wheel_cache=wheel_cache,
                    require_hashes=options.require_hashes,
                )

                self.populate_requirement_set(
                    requirement_set, args, options, finder, session, self.name,
                    wheel_cache
                )

                if not requirement_set.has_requirements:
                    return

                try:
                    if (options.download_dir or not wheel or not
                            options.cache_dir):
                        # on -d don't do complex things like building
                        # wheels, and don't try to build wheels when wheel is
                        # not installed.
                        requirement_set.prepare_files(finder)
                    else:
                        # build wheels before install.
                        wb = WheelBuilder(
                            requirement_set,
                            finder,
                            build_options=[],
                            global_options=[],
                        )
                        # Ignore the result: a failed wheel will be
                        # installed from the sdist/vcs whatever.
                        wb.build(autobuilding=True)

                    if not options.download_dir:
                        requirement_set.install(
                            install_options,
                            global_options,
                            root=options.root_path,
                            prefix=options.prefix_path,
                        )

                        possible_lib_locations = get_lib_location_guesses(
                            user=options.use_user_site,
                            home=temp_target_dir,
                            root=options.root_path,
                            prefix=options.prefix_path,
                            isolated=options.isolated_mode,
                        )
                        reqs = sorted(
                            requirement_set.successfully_installed,
                            key=operator.attrgetter('name'))
                        items = []
                        for req in reqs:
                            item = req.name
                            try:
                                installed_version = get_installed_version(
                                    req.name, possible_lib_locations
                                )
                                if installed_version:
                                    item += '-' + installed_version
                            except Exception:
                                pass
                            items.append(item)
                        installed = ' '.join(items)
                        if installed:
                            logger.info('Successfully installed %s', installed)
                    else:
                        downloaded = ' '.join([
                            req.name
                            for req in requirement_set.successfully_downloaded
                        ])
                        if downloaded:
                            logger.info(
                                'Successfully downloaded %s', downloaded
                            )
                except PreviousBuildDirError:
                    options.no_clean = True
                    raise
                finally:
                    # Clean up
                    if not options.no_clean:
                        requirement_set.cleanup_files()

        if options.target_dir:
            ensure_dir(options.target_dir)

            # Checking both purelib and platlib directories for installed
            # packages to be moved to target directory
            lib_dir_list = []

            purelib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
            platlib_dir = distutils_scheme('', home=temp_target_dir)['platlib']

            if os.path.exists(purelib_dir):
                lib_dir_list.append(purelib_dir)
            if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
                lib_dir_list.append(platlib_dir)

            for lib_dir in lib_dir_list:
                for item in os.listdir(lib_dir):
                    target_item_dir = os.path.join(options.target_dir, item)
                    if os.path.exists(target_item_dir):
                        if not options.upgrade:
                            logger.warning(
                                'Target directory %s already exists. Specify '
                                '--upgrade to force replacement.',
                                target_item_dir
                            )
                            continue
                        if os.path.islink(target_item_dir):
                            logger.warning(
                                'Target directory %s already exists and is '
                                'a link. Pip will not automatically replace '
                                'links, please remove if replacement is '
                                'desired.',
                                target_item_dir
                            )
                            continue
                        if os.path.isdir(target_item_dir):
                            shutil.rmtree(target_item_dir)
                        else:
                            os.remove(target_item_dir)

                    shutil.move(
                        os.path.join(lib_dir, item),
                        target_item_dir
                    )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 3

Project: pytrainer
Source File: googlemaps.py
View license
    def createHtml_api3(self,polyline, minlat, minlon, maxlat, maxlon, startinfo, finishinfo, laps, linetype):
        '''
        Generate a Google maps html file using the v3 api
            documentation at http://code.google.com/apis/maps/documentation/v3
        '''
        logging.debug(">>")
        if self.waypoint is not None:
            waypoints = self.waypoint.getAllWaypoints()
            #TODO waypoints not supported in this function yet
            #TODO sort polyline encoding (not supported in v3?)
            #TODO check http://code.google.com/apis/maps/documentation/v3/overlays.html#Polylines for MVArray??
        content = '''
        <html>
        <head>
        <style type="text/css">
            div.info_content { font-family: sans-serif; font-size: 10px; }
        </style>
        <meta name="viewport" content="initial-scale=1.0, user-scalable=no" />
        <script type="text/javascript" src="http://maps.google.com/maps/api/js?sensor=false"></script>
        <script type="text/javascript">
          function initialize() {\n'''
        content += "            var startlatlng = %s ;\n" % (polyline[0][0])
        content += "            var centerlatlng = new google.maps.LatLng(%f, %f);\n" % ((minlat+maxlat)/2., (minlon+maxlon)/2.)
        content += "            var endlatlng = %s;\n" % (polyline[-1][0])
        content += "            var swlatlng = new google.maps.LatLng(%f, %f);\n" % (minlat,minlon)
        content += "            var nelatlng = new google.maps.LatLng(%f, %f);\n" % (maxlat,maxlon)
        content += "            var startcontent = \"%s\";\n" % (startinfo)
        content += "            var finishcontent = \"%s\";\n" % (finishinfo)
        content += "            var startimageloc = \"%s/glade/start.png\";\n" % (os.path.abspath(self.data_path))
        content += "            var finishimageloc = \"%s/glade/finish.png\";\n" % (os.path.abspath(self.data_path))
        content += "            var lapimageloc = \"%s/glade/waypoint.png\";\n" % (os.path.abspath(self.data_path))
        content +='''
            var myOptions = {
              zoom: 8,
              center: centerlatlng,
              scaleControl: true,
              mapTypeId: google.maps.MapTypeId.ROADMAP
            };

            var startimage = new google.maps.MarkerImage(startimageloc,\n
              // This marker is 32 pixels wide by 32 pixels tall.
              new google.maps.Size(32, 32),
              // The origin for this image is 0,0.
              new google.maps.Point(0,0),
              // The anchor for this image is the base of the flagpole
              new google.maps.Point(16, 32));\n\n
            var finishimage = new google.maps.MarkerImage(finishimageloc,\n
              // This marker is 32 pixels wide by 32 pixels tall.
              new google.maps.Size(32, 32),
              // The origin for this image is 0,0.
              new google.maps.Point(0,0),
              // The anchor for this image is the base of the flagpole
              new google.maps.Point(16, 32));\n

            var lapimage = new google.maps.MarkerImage(lapimageloc,\n
              // This marker is 32 pixels wide by 32 pixels tall.
              new google.maps.Size(32, 32),
              // The origin for this image is 0,0.
              new google.maps.Point(0,0),
              // The anchor for this image is the base of the flagpole
              new google.maps.Point(16, 32));\n

            var map = new google.maps.Map(document.getElementById("map_canvas"), myOptions);
            var startmarker = new google.maps.Marker({
              position: startlatlng,
              map: map,
              icon: startimage,
              title:"Start"});

            var finishmarker = new google.maps.Marker({
              position: endlatlng,
              icon: finishimage,
              map: map,
              title:"End"}); \n

            //Add an infowindows
            var startinfo = new google.maps.InfoWindow({
                content: startcontent
            });

            var finishinfo = new google.maps.InfoWindow({
                content: finishcontent
            });

            google.maps.event.addListener(startmarker, 'click', function() {
              startinfo.open(map,startmarker);
            });

            google.maps.event.addListener(finishmarker, 'click', function() {
              finishinfo.open(map,finishmarker);
            });\n'''

        #"id_lap, record, elapsed_time, distance, start_lat, start_lon, end_lat, end_lon, calories, lap_number",
        for lap in laps:
            lapNumber = int(lap['lap_number'])+1
            elapsedTime = float(lap['elapsed_time'])
            elapsedTimeHours = int(elapsedTime/3600)
            elapsedTimeMins = int((elapsedTime - (elapsedTimeHours * 3600)) / 60)
            elapsedTimeSecs = elapsedTime - (elapsedTimeHours * 3600) - (elapsedTimeMins * 60)
            if elapsedTimeHours > 0:
                strElapsedTime = "%0.0dh:%0.2dm:%0.2fs" % (elapsedTimeHours, elapsedTimeMins, elapsedTimeSecs)
            elif elapsedTimeMins > 0:
                strElapsedTime = "%0.0dm:%0.2fs" % (elapsedTimeMins, elapsedTimeSecs)
            else:
                strElapsedTime = "%0.0fs" % (elapsedTimeSecs)
            #process lat and lon for this lap
            try:
                lapLat = float(lap['end_lat'])
                lapLon = float(lap['end_lon'])
                content += "var lap%dmarker = new google.maps.Marker({position: new google.maps.LatLng(%f, %f), icon: lapimage, map: map,  title:\"Lap%d\"}); \n " % (lapNumber, lapLat, lapLon, lapNumber)
                content += "var lap%d = new google.maps.InfoWindow({content: \"<div class='info_content'>End of lap:%s<br>Elapsed time:%s<br>Distance:%0.2f km<br>Calories:%s</div>\" });\n" % (lapNumber, lapNumber, strElapsedTime, float(lap['distance'])/1000, lap['calories'])
                content += "google.maps.event.addListener(lap%dmarker, 'click', function() { lap%d.open(map,lap%dmarker); });\n" % (lapNumber,lapNumber,lapNumber)
            except Exception as e:
                #Error processing lap lat or lon
                #dont show this lap
                logging.debug( "Error processing lap "+ str(lap) )
                logging.debug(str(e))

        content += '''

            var boundsBox = new google.maps.LatLngBounds(swlatlng, nelatlng );\n
            map.fitBounds(boundsBox);\n'''
            
        pre = 0
        for point in polyline:
            if pre:
                content += '''var polylineCoordinates = [\n'''
                content += "                                       %s,\n" % (pre[0])
                content += "                                       %s,\n" % (point[0])
                content += '''            ];\n
                    // Add a polyline.\n
                    var polyline = new google.maps.Polyline({\n
                            path: polylineCoordinates,\n
                            strokeColor: \"%s\",\n
                            strokeOpacity: 0.9,\n
                            strokeWeight: 5,\n
                            });\n
                polyline.setMap(map);\n''' % point[2]
                
                contenttemplate = [
                	"%s",
                	"Speed: %0.1f km/h",
                	"HR: %d bpm",
                	"Cadence: %d",
                ]
                
                content += '''
                    google.maps.event.addListener(polyline, 'click', function(event) {
                        var marker = new google.maps.InfoWindow({
                          position: event.latLng, 
                          content: "%s"
                        });
                        marker.setMap(map);
                    });
                    ''' % contenttemplate[linetype] % point[1]
            pre = point
        
        content += '''
          }

        </script>
        </head>
        <body onload="initialize()">
          <div id="map_canvas" style="width:100%; height:100%"></div>
        </body>
        </html>'''
        file = fileUtils(self.htmlfile,content)
        file.run()
        logging.debug("<<")

Example 4

Project: lorax
Source File: cmdline.py
View license
def lmc_parser(dracut_default=""):
    """ Return a ArgumentParser object for live-media-creator."""
    parser = argparse.ArgumentParser(description="Create Live Install Media",
                                     fromfile_prefix_chars="@")

    # These are mutually exclusive, one is required
    action = parser.add_mutually_exclusive_group(required=True)
    action.add_argument("--make-iso", action="store_true",
                        help="Build a live iso")
    action.add_argument("--make-disk", action="store_true",
                        help="Build a partitioned disk image")
    action.add_argument("--make-fsimage", action="store_true",
                        help="Build a filesystem image")
    action.add_argument("--make-appliance", action="store_true",
                        help="Build an appliance image and XML description")
    action.add_argument("--make-ami", action="store_true",
                        help="Build an ami image")
    action.add_argument("--make-tar", action="store_true",
                        help="Build a tar of the root filesystem")
    action.add_argument("--make-pxe-live", action="store_true",
                        help="Build a live pxe boot squashfs image")
    action.add_argument("--make-ostree-live", action="store_true",
                        help="Build a live pxe boot squashfs image of Atomic Host")
    action.add_argument("--make-oci", action="store_true",
                        help="Build an Open Container Initiative image")
    action.add_argument("--make-vagrant", action="store_true",
                        help="Build a Vagrant Box image")

    parser.add_argument("--iso", type=os.path.abspath,
                        help="Anaconda installation .iso path to use for qemu")
    parser.add_argument("--iso-only", action="store_true",
                        help="Remove all iso creation artifacts except the boot.iso, "
                             "combine with --iso-name to rename the boot.iso")
    parser.add_argument("--iso-name", default=None,
                        help="Name of output iso file for --iso-only. Default is boot.iso")
    parser.add_argument("--ks", action="append", type=os.path.abspath,
                        help="Kickstart file defining the install.")
    parser.add_argument("--image-only", action="store_true",
                        help="Exit after creating fs/disk image.")

    parser.add_argument("--no-virt", action="store_true",
                        help="Run anaconda directly on host instead of using qemu")
    parser.add_argument("--proxy",
                        help="proxy URL to use for the install")
    parser.add_argument("--anaconda-arg", action="append", dest="anaconda_args",
                        help="Additional argument to pass to anaconda (no-virt "
                             "mode). Pass once for each argument")
    parser.add_argument("--armplatform",
                        help="the platform to use when creating images for ARM, "
                             "i.e., highbank, mvebu, omap, tegra, etc.")
    parser.add_argument("--location", default=None, type=os.path.abspath,
                        help="location of iso directory tree with initrd.img "
                             "and vmlinuz. Used to run qemu with a newer initrd "
                             "than the iso.")

    parser.add_argument("--logfile", default="./livemedia.log",
                        type=os.path.abspath,
                        help="Name and path for primary logfile, other logs will "
                             "be created in the same directory.")
    parser.add_argument("--lorax-templates", default=None,
                        type=os.path.abspath,
                        help="Path to mako templates for lorax")
    parser.add_argument("--tmp", default="/var/tmp", type=os.path.abspath,
                        help="Top level temporary directory")
    parser.add_argument("--resultdir", default=None, dest="result_dir",
                        type=os.path.abspath,
                        help="Directory to copy the resulting images and iso into. "
                             "Defaults to the temporary working directory")

    parser.add_argument("--macboot", action="store_true", default=True,
                        dest="domacboot")
    parser.add_argument("--nomacboot", action="store_false",
                        dest="domacboot")

    image_group = parser.add_argument_group("disk/fs image arguments")
    image_group.add_argument("--disk-image", type=os.path.abspath,
                             help="Path to existing disk image to use for creating final image.")
    image_group.add_argument("--keep-image", action="store_true",
                             help="Keep raw disk image after .iso creation")
    image_group.add_argument("--fs-image", type=os.path.abspath,
                             help="Path to existing filesystem image to use for creating final image.")
    image_group.add_argument("--image-name", default=None,
                             help="Name of output file to create. Used for tar, fs and disk image. Default is a random name.")
    image_group.add_argument("--fs-label", default="Anaconda",
                             help="Label to set on fsimage, default is 'Anaconda'")
    image_group.add_argument("--image-type", default=None,
                             help="Create an image with qemu-img. See qemu-img --help for supported formats.")
    image_group.add_argument("--qemu-arg", action="append", dest="qemu_args", default=[],
                             help="Arguments to pass to qemu-img. Pass once for each argument, they will be used for ALL calls to qemu-img.")
    image_group.add_argument("--qcow2", action="store_true",
                             help="Create qcow2 image instead of raw sparse image when making disk images.")
    image_group.add_argument("--qcow2-arg", action="append", dest="qemu_args", default=[],
                             help="Arguments to pass to qemu-img. Pass once for each argument, they will be used for ALL calls to qemu-img.")
    image_group.add_argument("--compression", default="xz",
                             help="Compression binary for make-tar. xz, lzma, gzip, and bzip2 are supported. xz is the default.")
    image_group.add_argument("--compress-arg", action="append", dest="compress_args", default=[],
                             help="Arguments to pass to compression. Pass once for each argument")
    # Group of arguments for appliance creation
    app_group = parser.add_argument_group("appliance arguments")
    app_group.add_argument("--app-name", default=None,
                           help="Name of appliance to pass to template")
    app_group.add_argument("--app-template", default=None,
                           help="Path to template to use for appliance data.")
    app_group.add_argument("--app-file", default="appliance.xml",
                           help="Appliance template results file.")

    # Group of arguments to pass to qemu
    virt_group = parser.add_argument_group("qemu arguments")
    virt_group.add_argument("--ram", metavar="MEMORY", type=int, default=1024,
                            help="Memory to allocate for installer in megabytes.")
    virt_group.add_argument("--vcpus", type=int, default=None,
                            help="Passed to qemu -smp command")
    virt_group.add_argument("--vnc",
                            help="Passed to qemu -display command. eg. vnc=127.0.0.1:5, default is to "
                                 "choose the first unused vnc port.")
    virt_group.add_argument("--arch", default=None,
                            help="System arch to build for. Used to select qemu-system-* command. "
                                 "Defaults to qemu-system-<arch>")
    virt_group.add_argument("--kernel-args",
                            help="Additional argument to pass to the installation kernel")
    virt_group.add_argument("--ovmf-path", default="/usr/share/edk2/ovmf/",
                            help="Path to OVMF firmware")
    virt_group.add_argument("--virt-uefi", action="store_true", default=False,
                            help="Use OVMF firmware to boot the VM in UEFI mode")
    virt_group.add_argument("--no-kvm", action="store_true", default=False,
                            help="Skip using kvm with qemu even if it is available.")

    # dracut arguments
    dracut_group = parser.add_argument_group("dracut arguments")
    dracut_group.add_argument("--dracut-arg", action="append", dest="dracut_args",
                              help="Argument to pass to dracut when "
                                   "rebuilding the initramfs. Pass this "
                                   "once for each argument. NOTE: this "
                                   "overrides the default. (default: %s)" % dracut_default)

    # pxe to live arguments
    pxelive_group = parser.add_argument_group("pxe to live arguments")
    pxelive_group.add_argument("--live-rootfs-size", type=int, default=0,
                                help="Size of root filesystem of live image in GiB")
    pxelive_group.add_argument("--live-rootfs-keep-size", action="store_true",
                                help="Keep the original size of root filesystem in live image")

    # OCI specific commands
    oci_group = parser.add_argument_group("OCI arguments")
    oci_group.add_argument("--oci-config",
                              help="config.json OCI configuration file")
    oci_group.add_argument("--oci-runtime",
                              help="runtime.json OCI configuration file")

    # Vagrant specific commands
    vagrant_group = parser.add_argument_group("Vagrant arguments")
    vagrant_group.add_argument("--vagrant-metadata",
                               help="optional metadata.json file")
    vagrant_group.add_argument("--vagrantfile",
                               help="optional vagrantfile")

    parser.add_argument("--title", default="Linux Live Media",
                        help="Substituted for @[email protected] in bootloader config files")
    parser.add_argument("--project", default="Linux",
                        help="substituted for @[email protected] in bootloader config files")
    parser.add_argument("--releasever", default="25",
                        help="substituted for @[email protected] in bootloader config files")
    parser.add_argument("--volid", default=None, help="volume id")
    parser.add_argument("--squashfs_args",
                        help="additional squashfs args")
    parser.add_argument("--timeout", default=None, type=int,
                        help="Cancel installer after X minutes")

    return parser

Example 5

Project: python-dirtt
Source File: __init__.py
View license
    def startElement(self, name, attrs):
        """
        When an XML element is first read, this function is run
        to process it's attributes and content before moving on
        to it's contents and then endElement
        """

        # set the current directory
        self.current_dir = os.path.abspath(".")

        # get the basename attribute or None
        basename = attrs.get("basename", None)

        # get the permissions and ownership
        perms, uid, gid = self._return_perms_uid_gid(attrs)

        # get the directory name attribute or None
        self.dirname = attrs.get("dirname", None)

        # if xml elementname is dirtt let's get started
        if name == 'dirtt':
            self.logger.debug("Starting Directory Tree Template Build...")
            self.logger.debug("Changing current directory to: %s" % self.dirname)
            # change to our starting directory
            if not basename:
                self.dirname, basename = os.path.split(self.dirname)
            os.chdir(self.dirname)
            self.current_dir = os.path.abspath(".")
            self.idrefs[attrs.get("id", "root-dir")] = self.current_dir

        if basename:
            if self.skip_entity: self.skip_entity += 1

            # if the entity is our main dirtt entity or a directory proceed here
            if name in ('dirtt','dir'):
                dirname = attrs.get("dirname", None)
                if self.interactive:
                    if not self.skip_entity:
                        if not raw_input("Create Directory %s (yes/no)?" % os.path.join(self.current_dir,basename)) in ("yes","Yes","YES","Y","y"):
                            self.skip_entity += 1
                            self.logger.debug("Skipping dir: %s" % os.path.join(self.current_dir,basename))
                        else:
                            self.logger.debug("Creating dir: %s/%s (perms:%s uid:%i gid:%i)" % (self.current_dir, basename, oct(perms), uid, gid))
                            if dirname:
                                if name == 'dirtt':
                                        # When dealding with a 'dirtt' tag use self.dirname as the current dirname
                                        # as at this point self.dirname has been properly set (i.e if no basename was
                                        # provided then the value for the basename it's inferred from dirname)
                                        dirname = self.dirname
                                newdir = os.path.join(dirname,basename)
                            else:
                                newdir = basename

                        try:
                                create_dir(newdir, perms, uid, gid, self.warn)
                        except OSError as oserror:
                                if oserror.errno == errno.EISDIR:
                                        print >> sys.stderr, "A directory exists with that name ('%s'). \
                                         \nAborting directory creation." % basename
                                        sys.exit(-1)
                                elif oserror.errno == errno.EISDIR:
                                        print >> sys.stderr, "A file exists with the name of the desired dir ('%s'). \
                                         \nAborting directory creation." % basename
                                        sys.exit(-2)

                        self._push_dir()
                        os.chdir(basename)
                        self.current_dir = os.path.abspath(".")

                else:
                    self.logger.debug("Creating dir: %s/%s (perms:%s uid:%i gid:%i)" % (self.current_dir, basename, oct(perms), uid, gid))
                    if dirname:
                        if name == 'dirtt':
                                # When dealding with a 'dirtt' tag use self.dirname as the current dirname
                                # as at this point self.dirname has been properly set (i.e if no basename was
                                # provided then the value for the basename it's inferred from dirname)
                                dirname = self.dirname
                        newdir = os.path.join(dirname,basename)
                    else:
                        newdir = basename

                    try:
                        create_dir(newdir, perms, uid, gid, self.warn)
                    except OSError as oserror:
                        if oserror.errno == errno.EISDIR:
                                print >> sys.stderr, "A directory exists with that name ('%s'). \
                                        \nAborting directory creation." % basename
                                sys.exit(-1)
                        elif oserror.errno == errno.EISDIR:
                                print >> sys.stderr, "A file exists with the name of the desired dir ('%s'). \
                                        \nAborting directory creation." % basename
                                sys.exit(-2)

                    self._push_dir()
                    os.chdir(newdir)
                    self.current_dir = os.path.abspath(".")

                if attrs.get("id"):
                    self.idrefs[attrs.get("id")] = self.current_dir

            if name == 'file':
                self.logger.debug("Creating file: %s/%s (perms:%s uid:%i gid:%i)" % (self.current_dir, basename, oct(perms), uid, gid))
                href = attrs.get("href",None)
                content = ""
                if not href is None:
                    template_file = os.path.join(TEMPLATES_DIR,href)
                    template_str = self._read_template(template_file)
                    content = self._parse_template(template_str, template_file)
                create_file(basename, content, perms, uid, gid)

        else:
            if name in ('dirtt', 'dir'):
                if not self.skip_entity:
                    self.skip_entity += 1

        if name == 'link':
            try:
                if (not attrs.get("idref", attrs.get("ref", None))) or (not attrs.get("basename", None)):
                     return
                ref = attrs.get("idref", attrs.get("ref"))
                link_name = attrs.get("basename")
                if ref == attrs.get("idref", None):
                    ref = self.idrefs[ref]
                target_dir = attrs.get("dirname",self.current_dir)
                self.links.append({'basename': link_name, 'parent_dir': target_dir, 'ref': ref})
            except:
                pass

        if name == 'xi:include':
            href = attrs.get("href")
            # Check for an HTTP url or an absolute file location
            if href[0:7] in ('http://'):
                template_loc = href
            elif href[0:8] in ('file:///'):
                template_loc = href
            else:
                template_loc = os.path.join(self.tree_template_loc,href)
            c = DirectoryTreeHandler(self.verbose, template_loc, self.kwargs, self.interactive, self.warn, self.processed_templates)
            c.run()
        return

Example 6

View license
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'prerelease' script")
    add_argument_config_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_build_name(parser, 'source')
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_output_dir(parser, required=True)

    group = parser.add_argument_group(
        'Repositories in underlay workspace',
        description='The repositories in the underlay workspace will be ' +
                    'built and installed as well as built and tested. ' +
                    'Dependencies will be provided by binary packages.')
    group.add_argument(
        'source_repos',
        nargs='*',
        default=[],
        metavar='REPO_NAME',
        help="A name of a 'repository' from the distribution file")
    group.add_argument(
        '--custom-branch',
        nargs='*',
        type=_repository_name_and_branch,
        default=[],
        metavar='REPO_NAME:BRANCH_OR_TAG_NAME',
        help="A name of a 'repository' from the distribution file followed " +
             'by a colon and a branch / tag name')
    group.add_argument(
        '--custom-repo',
        nargs='*',
        type=_repository_name_and_type_and_url_and_branch,
        default=[],
        metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME',
        help='The name, type, url and branch / tag name of a repository')

    add_overlay_arguments(parser)

    args = parser.parse_args(argv)

    print('Fetching buildfarm configuration...')
    config = get_config_index(args.config_url)
    build_files = get_source_build_files(config, args.rosdistro_name)
    build_file = build_files[args.source_build_name]

    print('Fetching rosdistro cache...')
    # Targets defined by source build file are subset of targets
    # defined by release build files. To increase the number of supported
    # pre-release targets, we combine all targets defined by all release
    # build files and use that when configuring the devel job.
    release_build_files = get_release_build_files(config, args.rosdistro_name)
    release_targets_combined = {}
    if release_build_files:
        release_targets_combined[args.os_name] = {}
        for build_name, rel_obj in release_build_files.items():
            if args.os_name not in rel_obj.targets:
                continue
            for dist_name, targets in rel_obj.targets[args.os_name].items():
                if dist_name not in release_targets_combined[args.os_name]:
                    release_targets_combined[args.os_name][dist_name] = {}
                release_targets_combined[args.os_name][dist_name].update(targets)

    index = get_index(config.rosdistro_index_url)
    dist_cache = get_distribution_cache(index, args.rosdistro_name)
    dist_file = dist_cache.distribution_file

    # determine source repositories for underlay workspace
    repositories = {}
    for repo_name in args.source_repos:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            repositories[repo_name] = \
                dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1

    for repo_name, custom_version in args.custom_branch:
        if repo_name in repositories:
            print("The repository '%s' appears multiple times" % repo_name,
                  file=sys.stderr)
            return 1
        try:
            source_repo = dist_file.repositories[repo_name].source_repository
        except KeyError:
            print(("The repository '%s' was not found in the distribution " +
                   "file") % repo_name, file=sys.stderr)
            return 1
        source_repo = deepcopy(source_repo)
        source_repo.version = custom_version
        repositories[repo_name] = source_repo

    for repo_name, repo_type, repo_url, version in args.custom_repo:
        if repo_name in repositories and repositories[repo_name]:
            print("custom_repos option overriding '%s' to pull via '%s' "
                  "from '%s' with version '%s'. " %
                  (repo_name, repo_type, repo_url, version),
                  file=sys.stderr)
        source_repo = RepositorySpecification(
            repo_name, {
                'type': repo_type,
                'url': repo_url,
                'version': version,
            })
        repositories[repo_name] = source_repo

    scms = [(repositories[k], 'catkin_workspace/src/%s' % k)
            for k in sorted(repositories.keys())]

    # collect all template snippets of specific types
    class IncludeHook(Hook):

        def __init__(self):
            Hook.__init__(self)
            self.scripts = []

        def beforeInclude(self, *args, **kwargs):
            template_path = kwargs['file'].name
            if template_path.endswith('/snippet/builder_shell.xml.em'):
                self.scripts.append(kwargs['locals']['script'])

    hook = IncludeHook()
    from ros_buildfarm import templates
    templates.template_hooks = [hook]

    # use random source repo to pass to devel job template
    source_repository = deepcopy(list(repositories.values())[0])
    if not source_repository:
        print(("The repository '%s' does not have a source entry in the distribution " +
               'file. We cannot generate a prerelease without a source entry.') % repo_name,
              file=sys.stderr)
        return 1
    source_repository.name = 'prerelease'
    print('Evaluating job templates...')
    configure_devel_job(
        args.config_url, args.rosdistro_name, args.source_build_name,
        None, args.os_name, args.os_code_name, args.arch,
        config=config, build_file=build_file,
        index=index, dist_file=dist_file, dist_cache=dist_cache,
        jenkins=False, views=False,
        source_repository=source_repository,
        build_targets=release_targets_combined)

    templates.template_hooks = None

    # derive scripts for overlay workspace from underlay
    overlay_scripts = []
    for script in hook.scripts:
        # skip cloning of ros_buildfarm repository
        if 'git clone' in script and '.git ros_buildfarm' in script:
            continue
        # skip build-and-install step
        if 'build and install' in script:
            continue

        # add prerelease overlay flag
        run_devel_job = '/run_devel_job.py'
        if run_devel_job in script:
            script = script.replace(
                run_devel_job, run_devel_job + ' --prerelease-overlay')

        # replace mounted workspace volume with overlay and underlay
        # used by:
        # - create_devel_task_generator.py needs to find packages in both
        # the underlay as well as the overlay workspace
        # - catkin_make_isolated_and_test.py needs to source the environment of
        # the underlay before building the overlay
        mount_volume = '-v $WORKSPACE/catkin_workspace:/tmp/catkin_workspace'
        if mount_volume in script:
            script = script.replace(
                mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' +
                'catkin_workspace_overlay:/tmp/catkin_workspace_overlay')

        # relocate all docker files
        docker_path = '$WORKSPACE/docker_'
        if docker_path in script:
            script = script.replace(
                docker_path, docker_path + 'overlay_')

        # rename all docker images
        name_suffix = '_prerelease'
        if name_suffix in script:
            script = script.replace(
                name_suffix, name_suffix + '_overlay')

        overlay_scripts.append(script)

    from ros_buildfarm import __file__ as ros_buildfarm_file
    data = deepcopy(args.__dict__)
    data.update({
        'scms': scms,
        'scripts': hook.scripts,
        'overlay_scripts': overlay_scripts,
        'ros_buildfarm_python_path': os.path.dirname(
            os.path.dirname(os.path.abspath(ros_buildfarm_file))),
        'python_executable': sys.executable,
        'prerelease_script_path': os.path.dirname(os.path.abspath(__file__))})

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    # generate multiple scripts
    for script_name in [
            'prerelease',
            'prerelease_build_overlay',
            'prerelease_build_underlay',
            'prerelease_clone_overlay',
            'prerelease_clone_underlay']:
        content = expand_template(
            'prerelease/%s_script.sh.em' % script_name, data,
            options={BANGPATH_OPT: False})
        script_file = os.path.join(args.output_dir, script_name + '.sh')
        with open(script_file, 'w') as h:
            h.write(content)
        os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC)

    print('')
    print('Generated prerelease script - to execute it run:')
    if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir):
        print('  cd %s' % args.output_dir)
    print('  ./prerelease.sh')

Example 7

Project: circlator
Source File: all.py
View license
def run():
    parser = argparse.ArgumentParser(
        description = 'Run mapreads, bam2reads, assemble, merge, clean, fixstart',
        usage = 'circlator all [options] <assembly.fasta> <reads.fasta> <output directory>')
    parser.add_argument('--threads', type=int, help='Number of threads [%(default)s]', default=1, metavar='INT')
    parser.add_argument('--verbose', action='store_true', help='Be verbose')
    parser.add_argument('--unchanged_code', type=int, help='Code to return when the input assembly is not changed [%(default)s]', default=0, metavar='INT')
    parser.add_argument('assembly', help='Name of original assembly', metavar='assembly.fasta')
    parser.add_argument('reads', help='Name of corrected reads FASTA file', metavar='reads.fasta')
    parser.add_argument('outdir', help='Name of output directory (must not already exist)', metavar='output directory')

    mapreads_group = parser.add_argument_group('mapreads options')
    mapreads_group.add_argument('--bwa_opts', help='BWA options, in quotes [%(default)s]', default='-x pacbio', metavar='STRING')

    bam2reads_group = parser.add_argument_group('bam2reads options')
    bam2reads_group.add_argument('--b2r_discard_unmapped', action='store_true', help='Use this to not keep unmapped reads')
    bam2reads_group.add_argument('--b2r_only_contigs', help='File of contig names (one per line). Only reads that map to these contigs are kept (and unmapped reads, unless --b2r_discard_unmapped is used). Note: the whole assembly is still used as a reference when mapping', metavar='FILENAME')
    bam2reads_group.add_argument('--b2r_length_cutoff', type=int, help='All reads mapped to contigs shorter than this will be kept [%(default)s]', default=100000, metavar='INT')
    bam2reads_group.add_argument('--b2r_min_read_length', type=int, help='Minimum length of read to output [%(default)s]', default=250, metavar='INT')

    assemble_group = parser.add_argument_group('assemble options')
    parser.add_argument('--assemble_spades_k', help='Comma separated list of kmers to use when running SPAdes. Max kmer is 127 and each kmer should be an odd integer [%(default)s]', default='127,117,107,97,87,77', metavar='k1,k2,k3,...')
    parser.add_argument('--assemble_spades_use_first', action='store_true', help='Use the first successful SPAdes assembly. Default is to try all kmers and use the assembly with the largest N50')
    parser.add_argument('--assemble_not_careful', action='store_true', help='Do not use the --careful option with SPAdes (used by default)')
    parser.add_argument('--assemble_not_only_assembler', action='store_true', help='Do not use the --assemble-only option with SPAdes (used by default)')

    merge_group = parser.add_argument_group('merge options')
    merge_group.add_argument('--merge_diagdiff', type=int, help='Nucmer diagdiff option [%(default)s]', metavar='INT', default=25)
    merge_group.add_argument('--merge_min_id', type=float, help='Nucmer minimum percent identity [%(default)s]', metavar='FLOAT', default=95)
    merge_group.add_argument('--merge_min_length', type=int, help='Minimum length of hit for nucmer to report [%(default)s]', metavar='INT', default=500)
    merge_group.add_argument('--merge_min_length_merge', type=int, help='Minimum length of nucmer hit to use when merging [%(default)s]', metavar='INT', default=4000)
    merge_group.add_argument('--merge_min_spades_circ_pc', type=float, help='Min percent of contigs needed to be covered by nucmer hits to spades circular contigs [%(default)s]', metavar='FLOAT', default=95)
    merge_group.add_argument('--merge_breaklen', type=int, help='breaklen option used by nucmer [%(default)s]', metavar='INT', default=500)
    merge_group.add_argument('--merge_ref_end', type=int, help='max distance allowed between nucmer hit and end of input assembly contig [%(default)s]', metavar='INT', default=15000)
    merge_group.add_argument('--merge_reassemble_end', type=int, help='max distance allowed between nucmer hit and end of reassembly contig [%(default)s]', metavar='INT', default=1000)
    merge_group.add_argument('--no_pair_merge', action='store_true', help='Do not merge pairs of contigs when running merge task')

    clean_group = parser.add_argument_group('clean options')
    clean_group.add_argument('--clean_min_contig_length', type=int, help='Contigs shorter than this are discarded (unless specified using --keep) [%(default)s]', default=2000, metavar='INT')
    clean_group.add_argument('--clean_min_contig_percent', type=int, help='If length of nucmer hit is at least this percentage of length of contig, then contig is removed. (unless specified using --keep) [%(default)s]', default=95, metavar='FLOAT')
    clean_group.add_argument('--clean_diagdiff', type=int, help='Nucmer diagdiff option [%(default)s]', metavar='INT', default=25)
    clean_group.add_argument('--clean_min_nucmer_id', type=float, help='Nucmer minimum percent identity [%(default)s]', metavar='FLOAT', default=95)
    clean_group.add_argument('--clean_min_nucmer_length', type=int, help='Minimum length of hit for nucmer to report [%(default)s]', metavar='INT', default=500)
    clean_group.add_argument('--clean_breaklen', type=int, help='breaklen option used by nucmer [%(default)s]', metavar='INT', default=500)

    fixstart_group = parser.add_argument_group('fixstart options')
    fixstart_group.add_argument('--genes_fa', help='FASTA file of genes to search for to use as start point. If this option is not used, a built-in set of dnaA genes is used', metavar='FILENAME')
    fixstart_group.add_argument('--fixstart_mincluster', type=int, help='The -c|mincluster option of promer. If this option is used, it overrides promer\'s default value', metavar='INT')
    fixstart_group.add_argument('--fixstart_min_id', type=float, help='Minimum percent identity of promer match between contigs and gene(s) to use as start point [%(default)s]', default=70, metavar='FLOAT')

    options = parser.parse_args()

    print_message('{:_^79}'.format(' Checking external programs '), options)
    if options.verbose:
        circlator.versions.get_all_versions(sys.stdout, raise_error=True)
    else:
        circlator.versions.get_all_versions(None, raise_error=True)


    files_to_check = [options.assembly, options.reads]
    if options.b2r_only_contigs:
        files_to_check.append(options.b2r_only_contigs)
        options.b2r_only_contigs = os.path.abspath(options.b2r_only_contigs)

    if options.genes_fa:
        files_to_check.append(options.genes_fa)

    circlator.common.check_files_exist(files_to_check)

    if options.genes_fa:
        options.genes_fa = os.path.abspath(options.genes_fa)

    original_assembly = os.path.abspath(options.assembly)
    original_reads = os.path.abspath(options.reads)


    try:
        os.mkdir(options.outdir)
    except:
        print('Error making output directory', options.outdir, file=sys.stderr)
        sys.exit(1)

    os.chdir(options.outdir)

    with open('00.info.txt', 'w') as f:
        print(sys.argv[0], 'all', ' '.join(sys.argv[1:]), file=f)
        circlator.versions.get_all_versions(f)

    original_assembly_renamed = '00.input_assembly.fasta'
    bam = '01.mapreads.bam'
    filtered_reads_prefix = '02.bam2reads'
    filtered_reads =  filtered_reads_prefix + '.fasta'
    assembly_dir = '03.assemble'
    reassembly = os.path.join(assembly_dir, 'contigs.fasta')
    merge_prefix = '04.merge'
    merged_fasta = merge_prefix + '.fasta'
    clean_prefix = '05.clean'
    clean_fasta = clean_prefix + '.fasta'
    fixstart_prefix = '06.fixstart'
    fixstart_fasta = fixstart_prefix + '.fasta'

    pyfastaq.tasks.to_fasta(
        original_assembly,
        original_assembly_renamed,
        strip_after_first_whitespace=True,
        check_unique=True
    )

    #-------------------------------- mapreads -------------------------------
    print_message('{:_^79}'.format(' Running mapreads '), options)
    circlator.mapping.bwa_mem(
      original_assembly_renamed,
      original_reads,
      bam,
      threads=options.threads,
      bwa_options=options.bwa_opts,
      verbose=options.verbose,
    )


    #-------------------------------- bam2reads ------------------------------
    print_message('{:_^79}'.format(' Running bam2reads '), options)
    bam_filter = circlator.bamfilter.BamFilter(
        bam,
        filtered_reads_prefix,
        length_cutoff=options.b2r_length_cutoff,
        min_read_length=options.b2r_min_read_length,
        contigs_to_use=options.b2r_only_contigs,
        discard_unmapped=options.b2r_discard_unmapped,
        verbose=options.verbose,
    )
    bam_filter.run()


    #-------------------------------- assemble -------------------------------
    print_message('{:_^79}'.format(' Running assemble '), options)
    a = circlator.assemble.Assembler(
        filtered_reads,
        assembly_dir,
        threads=options.threads,
        careful=not options.assemble_not_careful,
        only_assembler=not options.assemble_not_only_assembler,
        spades_kmers=options.assemble_spades_k,
        spades_use_first_success=options.assemble_spades_use_first,
        verbose=options.verbose
    )
    a.run()


    #------------------------------ filter original assembly -----------------
    if options.b2r_only_contigs:
        print_message('{:_^79}'.format(' --b2r_only_contigs used - filering contigs '), options)
        assembly_to_use = merge_prefix + '.00.filtered_assembly.fa'
        pyfastaq.tasks.filter(original_assembly_renamed, assembly_to_use, ids_file=options.b2r_only_contigs)
    else:
        assembly_to_use = original_assembly_renamed


    #-------------------------------- merge ----------------------------------
    print_message('{:_^79}'.format(' Running merge '), options)
    if not options.no_pair_merge:
        merge_reads = filtered_reads
    else:
        merge_reads = None

    m = circlator.merge.Merger(
        assembly_to_use,
        reassembly,
        merge_prefix,
        nucmer_diagdiff=options.merge_diagdiff,
        nucmer_min_id=options.merge_min_id,
        nucmer_min_length=options.merge_min_length,
        nucmer_min_length_for_merges=options.merge_min_length_merge,
        min_spades_circular_percent=options.merge_min_spades_circ_pc,
        spades_kmers=options.assemble_spades_k,
        spades_use_first_success=options.assemble_spades_use_first,
        spades_careful=not options.assemble_not_careful,
        spades_only_assembler=not options.assemble_not_only_assembler,
        nucmer_breaklen=options.merge_breaklen,
        ref_end_tolerance=options.merge_ref_end,
        qry_end_tolerance=options.merge_reassemble_end,
        threads=options.threads,
        verbose=options.verbose,
        reads=merge_reads
    )
    m.run()


    #-------------------------------- clean ----------------------------------
    merge_log = merge_prefix + '.circularise.log'
    contigs_to_keep = []
    contigs_to_not_fix_start = []
    with open(merge_log) as f:
        for line in f:
            if not line.startswith('[merge circularised]\t'):
                continue
            if line.rstrip() == '\t'.join(['[merge circularised]', '#Contig', 'repetitive_deleted', 'circl_using_nucmer', 'circl_using_spades', 'circularised']):
                continue

            x, name, u, y, z, circularised = line.rstrip().split('\t')
            if circularised == '1':
                contigs_to_keep.append(name)
            else:
                contigs_to_not_fix_start.append(name)

    clean_keep_file = clean_prefix + '.contigs_to_keep'
    with open(clean_keep_file, 'w') as f:
        if len(contigs_to_keep) > 0:
            print('\n'.join(contigs_to_keep), file=f)

    not_fix_start_file = fixstart_prefix + '.contigs_to_not_change'
    with open(not_fix_start_file, 'w') as f:
        if len(contigs_to_not_fix_start) > 0:
            print('\n'.join(contigs_to_not_fix_start), file=f)

    print_message('{:_^79}'.format(' Running clean '), options)

    cleaner = circlator.clean.Cleaner(
        merged_fasta,
        clean_prefix,
        min_contig_length=options.clean_min_contig_length,
        min_contig_percent_match=options.clean_min_contig_percent,
        nucmer_diagdiff=options.clean_diagdiff,
        nucmer_min_id=options.clean_min_nucmer_id,
        nucmer_min_length=options.clean_min_nucmer_length,
        nucmer_breaklen=options.clean_breaklen,
        keepfile=clean_keep_file,
        verbose=options.verbose
    )
    cleaner.run()


    #-------------------------------- fixstart -------------------------------
    print_message('{:_^79}'.format(' Running fixstart '), options)
    fixer = circlator.start_fixer.StartFixer(
        clean_fasta,
        fixstart_prefix,
        min_percent_identity=options.fixstart_min_id,
        promer_mincluster=options.fixstart_mincluster,
        genes_fa=options.genes_fa,
        ignore=not_fix_start_file,
        verbose=options.verbose
    )
    fixer.run()

    #-------------------------------- summary -------------------------------
    print_message('{:_^79}'.format(' Summary '), options)
    number_of_input_contigs = pyfastaq.tasks.count_sequences(original_assembly_renamed)
    final_number_of_contigs = pyfastaq.tasks.count_sequences(fixstart_fasta)
    number_circularized = len(contigs_to_keep)
    print_message('Number of input contigs: ' + str(number_of_input_contigs), options)
    print_message('Number of contigs after merging: ' + str(final_number_of_contigs), options)
    print_message(' '.join(['Circularized', str(number_circularized), 'of', str(final_number_of_contigs), 'contig(s)']), options)

    with open(fixstart_prefix + '.ALL_FINISHED', 'w') as f:
        pass

    if number_of_input_contigs == final_number_of_contigs and number_circularized == 0:
        sys.exit(options.unchanged_code)

Example 8

Project: perf-benchmarks
Source File: network_io.py
View license
def network_io_test(itype1, image1, region1, itype2, image2, region2, filesize=64, iteration=1, timeout=600): 
    
    ssh_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../.ssh/')

    if itype1 in ec2.ec2_instance_types:
        inst1 = ec2.EC2Inst(itype1, image1, region1, 'ubuntu', '%s/perf-bench-%s.pem'\
                            % (ssh_path, region1), 'perf-bench-%s' % region1)
    if itype1 in gce.gce_instance_types:
        inst1 = gce.GCEInst(itype1, image1, region1, os.environ['USER'], '%s/google_compute_engine' % ssh_path)

    if itype2 in ec2.ec2_instance_types:
        inst2 = ec2.EC2Inst(itype2, image2, region2, 'ubuntu', '%s/perf-bench-%s.pem'\
                            % (ssh_path, region2), 'perf-bench-%s' % region2)
    if itype2 in gce.gce_instance_types:
        inst2 = gce.GCEInst(itype2, image2, region2, os.environ['USER'], '%s/google_compute_engine' % ssh_path)

    inst1.launch()
    inst2.launch()

    try:

        print '[IP] waiting'
        for i in range(150):
            inst1.update()
            inst2.update()
            if inst1.remote_ip != None and inst2.remote_ip != None:
                print '[IP] ok'
                break
            time.sleep(2)
        
        print '[SSH] waiting'
        for i in range(120):
            try:
                telnetlib.Telnet(inst1.remote_ip, 22, 1)
                telnetlib.Telnet(inst2.remote_ip, 22, 1)
                print '[SSH] ok'
                break
            except:
                time.sleep(2)
        
        print '[UP] %s | %s | %s' % (inst1.itype, inst1.region, inst1.remote_ip)
        print '[UP] %s | %s | %s' % (inst2.itype, inst2.region, inst2.remote_ip)

        util.instances_prepare([inst1, inst2], ['iperf', 'screen'])

        ssh_cli = paramiko.SSHClient()
        ssh_cli.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh_cli.connect(inst1.remote_ip, username=inst1.user, key_filename=inst1.ssh_key)
        scp_cli = scp.SCPClient(ssh_cli.get_transport())
        file_path = os.path.dirname(os.path.abspath(__file__))
        scp_cli.put('%s/netcattest.py' % file_path, '/tmp/netcattest.py')
        scp_cli.put('%s/scptest.py' % file_path, '/tmp/scptest.py')
        scp_cli.put('%s/iperftest.py' % file_path, '/tmp/iperftest.py')

        print '[START TESTS] %s %s <-----> %s %s'\
                % (inst1.itype, inst1.region, inst2.itype, inst2.region)

        for i in range(iteration):

            print '[START ITERATION %s]' % i

            print '[START] netcat'
            stdin, stdout, stderr = ssh_cli.exec_command('python2.7 /tmp/netcattest.py -i %s -u %s -k %s -s %s -t %s'
                                 % (inst2.remote_ip, inst2.user, inst2.ssh_key, filesize, timeout))
            time.sleep(10)
            for _ in range(timeout / 5 + 1):
                stdin, stdout, stderr = ssh_cli.exec_command('[ -f netcat.report ]; echo $?')
                out = stdout.read()
                if out.strip() == '0':
                    stdin, stdout, stderr = ssh_cli.exec_command('cat netcat.report')
                    out = stdout.read()
                    report = json.loads(out)
                    report.update({'inst1':inst1.itype, 'inst2':inst2.itype})
                    report.update({'region1':inst1.region, 'region2':inst2.region})
                    report.update({'cloud1':inst1.cloud, 'cloud2':inst2.cloud})
                    report_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../results/network-io/netcat')
                    if not os.path.exists(report_path):
                        cmd = 'mkdir -p %s' % report_path
                        subps.call(cmd.split())
                        #os.mkdir(report_path)
                    with open('%s/%s-%s__%s-%s' % (report_path, inst1.itype, inst1.region, inst2.itype, inst2.region), 'a+') as f:
                        f.write(json.dumps(report, indent=4, sort_keys=True))
                        f.write('\n')
                    print report['time']
                    break
                else:
                    time.sleep(5)
            print '[END] netcat'

            print '[START] scp'
            stdin, stdout, stderr = ssh_cli.exec_command('python2.7 /tmp/scptest.py -i %s -u %s -k %s -s %s -t %s'
                                 % (inst2.remote_ip, inst2.user, inst2.ssh_key, filesize, timeout))
            time.sleep(10)
            for _ in range(timeout / 5 + 1):
                stdin, stdout, stderr = ssh_cli.exec_command('[ -f scp.report ]; echo $?')
                out = stdout.read()
                if out.strip() == '0':
                    stdin, stdout, stderr = ssh_cli.exec_command('cat scp.report')
                    out = stdout.read()
                    report = json.loads(out)
                    report.update({'inst1':inst1.itype, 'inst2':inst2.itype})
                    report.update({'region1':inst1.region, 'region2':inst2.region})
                    report.update({'cloud1':inst1.cloud, 'cloud2':inst2.cloud})
                    report_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../results/network-io/scp')
                    if not os.path.exists(report_path):
                        cmd = 'mkdir -p %s' % report_path
                        subps.call(cmd.split())
                        #os.mkdir(report_path)
                    with open('%s/%s-%s__%s-%s' % (report_path, inst1.itype, inst1.region, inst2.itype, inst2.region), 'a+') as f:
                        f.write(json.dumps(report, indent=4, sort_keys=True))
                        f.write('\n')
                    print report['time']
                    break
                else:
                    time.sleep(5)
            print '[END] scp'

            print '[START] iperf'
            threads = [1, 4, 8] 
            work_time = 5
            stdin, stdout, stderr = ssh_cli.exec_command('python2.7 /tmp/iperftest.py -i %s -u %s -k %s -p %s -t %s'
                                 % (inst2.remote_ip, inst2.user, inst2.ssh_key, ' '.join(map(str, threads)), work_time))
            time.sleep(10)
            for _ in range(len(threads) * (work_time + 10) / 5 + 1):
                stdin, stdout, stderr = ssh_cli.exec_command('[ -f iperf.report ]; echo $?')
                out = stdout.read()
                if out.strip() == '0':
                    stdin, stdout, stderr = ssh_cli.exec_command('cat iperf.report')
                    out = stdout.read()
                    report = json.loads(out)
                    report.update({'inst1':inst1.itype, 'inst2':inst2.itype})
                    report.update({'region1':inst1.region, 'region2':inst2.region})
                    report.update({'cloud1':inst1.cloud, 'cloud2':inst2.cloud})
                    report_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../results/network-io/iperf')
                    if not os.path.exists(report_path):
                        cmd = 'mkdir -p %s' % report_path
                        subps.call(cmd.split())
                        #os.mkdir(report_path)
                    with open('%s/%s-%s__%s-%s' % (report_path, inst1.itype, inst1.region, inst2.itype, inst2.region), 'a+') as f:
                        f.write(json.dumps(report, indent=4, sort_keys=True))
                        f.write('\n')
                    print report['speed']
                    break
                else:
                    time.sleep(5)
            print '[END] iperf'

        ssh_cli.close()
    except Exception:

        print '[EXCEPTION] %s\n' % traceback.print_exc()

    finally:

        inst1.terminate()
        inst2.terminate()

Example 9

View license
def main():
    """
    TV for me
    """

    # do some preliminary stuff
    sickbeard.MY_FULLNAME = os.path.normpath(os.path.abspath(__file__))
    sickbeard.MY_NAME = os.path.basename(sickbeard.MY_FULLNAME)
    sickbeard.PROG_DIR = os.path.dirname(sickbeard.MY_FULLNAME)
    sickbeard.DATA_DIR = sickbeard.PROG_DIR
    sickbeard.MY_ARGS = sys.argv[1:]
    sickbeard.CREATEPID = False
    sickbeard.DAEMON = False

    sickbeard.SYS_ENCODING = None

    try:
        locale.setlocale(locale.LC_ALL, "")
        sickbeard.SYS_ENCODING = locale.getpreferredencoding()
    except (locale.Error, IOError):
        pass

    # For OSes that are poorly configured I'll just randomly force UTF-8
    if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        sickbeard.SYS_ENCODING = 'UTF-8'

    if not hasattr(sys, "setdefaultencoding"):
        reload(sys)

    try:
        # pylint: disable=E1101
        # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
        sys.setdefaultencoding(sickbeard.SYS_ENCODING)
    except:
        print 'Sorry, you MUST add the Sick Beard folder to the PYTHONPATH environment variable'
        print 'or find another way to force Python to use ' + sickbeard.SYS_ENCODING + ' for string encoding.'
        sys.exit(1)

    # Need console logging for SickBeard.py and SickBeard-console.exe
    consoleLogging = (not hasattr(sys, "frozen")) or (sickbeard.MY_NAME.lower().find('-console') > 0)

    # Rename the main thread
    threading.currentThread().name = "MAIN"

    try:
        opts, args = getopt.getopt(sys.argv[1:], "qfdp::", ['quiet', 'forceupdate', 'daemon', 'port=', 'pidfile=', 'nolaunch', 'config=', 'datadir='])  # @UnusedVariable
    except getopt.GetoptError:
        print "Available Options: --quiet, --forceupdate, --port, --daemon, --pidfile, --config, --datadir"
        sys.exit()

    forceUpdate = False
    forcedPort = None
    noLaunch = False

    for o, a in opts:
        # For now we'll just silence the logging
        if o in ('-q', '--quiet'):
            consoleLogging = False

        # Should we update (from tvdb) all shows in the DB right away?
        if o in ('-f', '--forceupdate'):
            forceUpdate = True

        # Suppress launching web browser
        # Needed for OSes without default browser assigned
        # Prevent duplicate browser window when restarting in the app
        if o in ('--nolaunch',):
            noLaunch = True

        # Override default/configured port
        if o in ('-p', '--port'):
            forcedPort = int(a)

        # Run as a daemon
        if o in ('-d', '--daemon'):
            if sys.platform == 'win32':
                print "Daemonize not supported under Windows, starting normally"
            else:
                consoleLogging = False
                sickbeard.DAEMON = True

        # Specify folder to load the config file from
        if o in ('--config',):
            sickbeard.CONFIG_FILE = os.path.abspath(a)

        # Specify folder to use as the data dir
        if o in ('--datadir',):
            sickbeard.DATA_DIR = os.path.abspath(a)

        # Write a pidfile if requested
        if o in ('--pidfile',):
            sickbeard.PIDFILE = str(a)

            # If the pidfile already exists, sickbeard may still be running, so exit
            if os.path.exists(sickbeard.PIDFILE):
                sys.exit("PID file '" + sickbeard.PIDFILE + "' already exists. Exiting.")

            # The pidfile is only useful in daemon mode, make sure we can write the file properly
            if sickbeard.DAEMON:
                sickbeard.CREATEPID = True
                try:
                    file(sickbeard.PIDFILE, 'w').write("pid\n")
                except IOError, e:
                    raise SystemExit("Unable to write PID file: %s [%d]" % (e.strerror, e.errno))
            else:
                logger.log(u"Not running in daemon mode. PID file creation disabled.")

    # If they don't specify a config file then put it in the data dir
    if not sickbeard.CONFIG_FILE:
        sickbeard.CONFIG_FILE = os.path.join(sickbeard.DATA_DIR, "config.ini")

    # Make sure that we can create the data dir
    if not os.access(sickbeard.DATA_DIR, os.F_OK):
        try:
            os.makedirs(sickbeard.DATA_DIR, 0744)
        except os.error, e:
            raise SystemExit("Unable to create datadir '" + sickbeard.DATA_DIR + "'")

    # Make sure we can write to the data dir
    if not os.access(sickbeard.DATA_DIR, os.W_OK):
        raise SystemExit("Datadir must be writeable '" + sickbeard.DATA_DIR + "'")

    # Make sure we can write to the config file
    if not os.access(sickbeard.CONFIG_FILE, os.W_OK):
        if os.path.isfile(sickbeard.CONFIG_FILE):
            raise SystemExit("Config file '" + sickbeard.CONFIG_FILE + "' must be writeable.")
        elif not os.access(os.path.dirname(sickbeard.CONFIG_FILE), os.W_OK):
            raise SystemExit("Config file root dir '" + os.path.dirname(sickbeard.CONFIG_FILE) + "' must be writeable.")

    os.chdir(sickbeard.DATA_DIR)

    if consoleLogging:
        print "Starting up Sick Beard " + SICKBEARD_VERSION + " from " + sickbeard.CONFIG_FILE

    # Load the config and publish it to the sickbeard package
    if not os.path.isfile(sickbeard.CONFIG_FILE):
        logger.log(u"Unable to find '" + sickbeard.CONFIG_FILE + "' , all settings will be default!", logger.ERROR)

    sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE)

    # Initialize the config and our threads
    sickbeard.initialize(consoleLogging=consoleLogging)

    sickbeard.showList = []

    if sickbeard.DAEMON:
        daemonize()

    # Use this PID for everything
    sickbeard.PID = os.getpid()

    if forcedPort:
        logger.log(u"Forcing web server to port " + str(forcedPort))
        startPort = forcedPort
    else:
        startPort = sickbeard.WEB_PORT

    if sickbeard.WEB_LOG:
        log_dir = sickbeard.LOG_DIR
    else:
        log_dir = None

    # sickbeard.WEB_HOST is available as a configuration value in various
    # places but is not configurable. It is supported here for historic reasons.
    if sickbeard.WEB_HOST and sickbeard.WEB_HOST != '0.0.0.0':
        webhost = sickbeard.WEB_HOST
    else:
        if sickbeard.WEB_IPV6:
            webhost = '::'
        else:
            webhost = '0.0.0.0'

    try:
        initWebServer({
                      'port': startPort,
                      'host': webhost,
                      'data_root': os.path.join(sickbeard.PROG_DIR, 'gui/'+sickbeard.GUI_NAME),
                      'web_root': sickbeard.WEB_ROOT,
                      'log_dir': log_dir,
                      'username': sickbeard.WEB_USERNAME,
                      'password': sickbeard.WEB_PASSWORD,
                      'enable_https': sickbeard.ENABLE_HTTPS,
                      'https_cert': sickbeard.HTTPS_CERT,
                      'https_key': sickbeard.HTTPS_KEY,
                      })
    except IOError:
        logger.log(u"Unable to start web server, is something else running on port %d?" % startPort, logger.ERROR)
        if sickbeard.LAUNCH_BROWSER and not sickbeard.DAEMON:
            logger.log(u"Launching browser and exiting", logger.ERROR)
            sickbeard.launchBrowser(startPort)
        sys.exit()

    # Build from the DB to start with
    logger.log(u"Loading initial show list")
    loadShowsFromDB()

    # Fire up all our threads
    sickbeard.start()

    # Launch browser if we're supposed to
    if sickbeard.LAUNCH_BROWSER and not noLaunch and not sickbeard.DAEMON:
        sickbeard.launchBrowser(startPort)

    # Start an update if we're supposed to
    if forceUpdate:
        sickbeard.showUpdateScheduler.action.run(force=True)  # @UndefinedVariable

    # Stay alive while my threads do the work
    while (True):

        if sickbeard.invoked_command:
            sickbeard.invoked_command()
            sickbeard.invoked_command = None

        time.sleep(1)

    return

Example 10

Project: SickGear
Source File: SickBeard.py
View license
    def start(self):
        # do some preliminary stuff
        sickbeard.MY_FULLNAME = os.path.normpath(os.path.abspath(__file__))
        sickbeard.MY_NAME = os.path.basename(sickbeard.MY_FULLNAME)
        sickbeard.PROG_DIR = os.path.dirname(sickbeard.MY_FULLNAME)
        sickbeard.DATA_DIR = sickbeard.PROG_DIR
        sickbeard.MY_ARGS = sys.argv[1:]
        sickbeard.SYS_ENCODING = None

        try:
            locale.setlocale(locale.LC_ALL, '')
        except (locale.Error, IOError):
            pass
        try:
            sickbeard.SYS_ENCODING = locale.getpreferredencoding()
        except (locale.Error, IOError):
            pass

        # For OSes that are poorly configured I'll just randomly force UTF-8
        if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
            sickbeard.SYS_ENCODING = 'UTF-8'

        if not hasattr(sys, 'setdefaultencoding'):
            moves.reload_module(sys)

        try:
            # pylint: disable=E1101
            # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
            sys.setdefaultencoding(sickbeard.SYS_ENCODING)
        except:
            print('Sorry, you MUST add the SickGear folder to the PYTHONPATH environment variable')
            print('or find another way to force Python to use %s for string encoding.' % sickbeard.SYS_ENCODING)
            sys.exit(1)

        # Need console logging for SickBeard.py and SickBeard-console.exe
        self.consoleLogging = (not hasattr(sys, 'frozen')) or (sickbeard.MY_NAME.lower().find('-console') > 0)

        # Rename the main thread
        threading.currentThread().name = 'MAIN'

        try:
            opts, args = getopt.getopt(sys.argv[1:], 'hfqdp::',
                                       ['help', 'forceupdate', 'quiet', 'nolaunch', 'daemon', 'pidfile=', 'port=',
                                        'datadir=', 'config=', 'noresize'])  # @UnusedVariable
        except getopt.GetoptError:
            sys.exit(self.help_message())

        for o, a in opts:
            # Prints help message
            if o in ('-h', '--help'):
                sys.exit(self.help_message())

            # For now we'll just silence the logging
            if o in ('-q', '--quiet'):
                self.consoleLogging = False

            # Should we update (from indexer) all shows in the DB right away?
            if o in ('-f', '--forceupdate'):
                self.forceUpdate = True

            # Suppress launching web browser
            # Needed for OSes without default browser assigned
            # Prevent duplicate browser window when restarting in the app
            if o in ('--nolaunch',):
                self.noLaunch = True

            # Override default/configured port
            if o in ('-p', '--port'):
                try:
                    self.forcedPort = int(a)
                except ValueError:
                    sys.exit('Port: %s is not a number. Exiting.' % a)

            # Run as a double forked daemon
            if o in ('-d', '--daemon'):
                self.runAsDaemon = True
                # When running as daemon disable consoleLogging and don't start browser
                self.consoleLogging = False
                self.noLaunch = True

                if sys.platform == 'win32':
                    self.runAsDaemon = False

            # Write a pidfile if requested
            if o in ('--pidfile',):
                self.CREATEPID = True
                self.PIDFILE = str(a)

                # If the pidfile already exists, sickbeard may still be running, so exit
                if os.path.exists(self.PIDFILE):
                    sys.exit('PID file: %s already exists. Exiting.' % self.PIDFILE)

            # Specify folder to load the config file from
            if o in ('--config',):
                sickbeard.CONFIG_FILE = os.path.abspath(a)

            # Specify folder to use as the data dir
            if o in ('--datadir',):
                sickbeard.DATA_DIR = os.path.abspath(a)

            # Prevent resizing of the banner/posters even if PIL is installed
            if o in ('--noresize',):
                sickbeard.NO_RESIZE = True

        # The pidfile is only useful in daemon mode, make sure we can write the file properly
        if self.CREATEPID:
            if self.runAsDaemon:
                pid_dir = os.path.dirname(self.PIDFILE)
                if not os.access(pid_dir, os.F_OK):
                    sys.exit(u"PID dir: %s doesn't exist. Exiting." % pid_dir)
                if not os.access(pid_dir, os.W_OK):
                    sys.exit(u'PID dir: %s must be writable (write permissions). Exiting.' % pid_dir)

            else:
                if self.consoleLogging:
                    print(u'Not running in daemon mode. PID file creation disabled')

                self.CREATEPID = False

        # If they don't specify a config file then put it in the data dir
        if not sickbeard.CONFIG_FILE:
            sickbeard.CONFIG_FILE = os.path.join(sickbeard.DATA_DIR, 'config.ini')

        # Make sure that we can create the data dir
        if not os.access(sickbeard.DATA_DIR, os.F_OK):
            try:
                os.makedirs(sickbeard.DATA_DIR, 0o744)
            except os.error:
                sys.exit(u'Unable to create data directory: %s Exiting.' % sickbeard.DATA_DIR)

        # Make sure we can write to the data dir
        if not os.access(sickbeard.DATA_DIR, os.W_OK):
            sys.exit(u'Data directory: %s must be writable (write permissions). Exiting.' % sickbeard.DATA_DIR)

        # Make sure we can write to the config file
        if not os.access(sickbeard.CONFIG_FILE, os.W_OK):
            if os.path.isfile(sickbeard.CONFIG_FILE):
                sys.exit(u'Config file: %s must be writeable (write permissions). Exiting.' % sickbeard.CONFIG_FILE)
            elif not os.access(os.path.dirname(sickbeard.CONFIG_FILE), os.W_OK):
                sys.exit(u'Config file directory: %s must be writeable (write permissions). Exiting'
                         % os.path.dirname(sickbeard.CONFIG_FILE))
        os.chdir(sickbeard.DATA_DIR)

        if self.consoleLogging:
            print(u'Starting up SickGear from %s' % sickbeard.CONFIG_FILE)

        # Load the config and publish it to the sickbeard package
        if not os.path.isfile(sickbeard.CONFIG_FILE):
            print(u'Unable to find "%s", all settings will be default!' % sickbeard.CONFIG_FILE)

        sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE)

        # check all db versions
        for d, min_v, max_v, mo in [
            ('failed.db', sickbeard.failed_db.MIN_DB_VERSION, sickbeard.failed_db.MAX_DB_VERSION, 'FailedDb'),
            ('cache.db', sickbeard.cache_db.MIN_DB_VERSION, sickbeard.cache_db.MAX_DB_VERSION, 'CacheDb'),
            ('sickbeard.db', sickbeard.mainDB.MIN_DB_VERSION, sickbeard.mainDB.MAX_DB_VERSION, 'MainDb')
        ]:
            cur_db_version = db.DBConnection(d).checkDBVersion()

            if cur_db_version > 0:
                if cur_db_version < min_v:
                    print(u'Your [%s] database version (%s) is too old to migrate from with this version of SickGear'
                          % (d, cur_db_version))
                    sys.exit(u'Upgrade using a previous version of SG first,'
                             + u' or start with no database file to begin fresh')
                if cur_db_version > max_v:
                    print(u'Your [%s] database version (%s) has been incremented past'
                          u' what this version of SickGear supports. Trying to rollback now. Please wait...' %
                          (d, cur_db_version))
                    try:
                        rollback_loaded = db.get_rollback_module()
                        if None is not rollback_loaded:
                            rollback_loaded.__dict__[mo]().run(max_v)
                        else:
                            print(u'ERROR: Could not download Rollback Module.')
                    except (StandardError, Exception):
                        pass
                    if db.DBConnection(d).checkDBVersion() > max_v:
                        print(u'Rollback failed.')
                        sys.exit(u'If you have used other forks, your database may be unusable due to their changes')
                    print(u'Rollback of [%s] successful.' % d)

        # Initialize the config and our threads
        sickbeard.initialize(consoleLogging=self.consoleLogging)

        if self.runAsDaemon:
            self.daemonize()

        # Get PID
        sickbeard.PID = os.getpid()

        if self.forcedPort:
            logger.log(u'Forcing web server to port %s' % self.forcedPort)
            self.startPort = self.forcedPort
        else:
            self.startPort = sickbeard.WEB_PORT

        if sickbeard.WEB_LOG:
            self.log_dir = sickbeard.LOG_DIR
        else:
            self.log_dir = None

        # sickbeard.WEB_HOST is available as a configuration value in various
        # places but is not configurable. It is supported here for historic reasons.
        if sickbeard.WEB_HOST and sickbeard.WEB_HOST != '0.0.0.0':
            self.webhost = sickbeard.WEB_HOST
        else:
            if sickbeard.WEB_IPV6:
                self.webhost = '::'
            else:
                self.webhost = '0.0.0.0'

        # web server options
        self.web_options = {
            'port': int(self.startPort),
            'host': self.webhost,
            'data_root': os.path.join(sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME),
            'web_root': sickbeard.WEB_ROOT,
            'log_dir': self.log_dir,
            'username': sickbeard.WEB_USERNAME,
            'password': sickbeard.WEB_PASSWORD,
            'enable_https': sickbeard.ENABLE_HTTPS,
            'handle_reverse_proxy': sickbeard.HANDLE_REVERSE_PROXY,
            'https_cert': os.path.join(sickbeard.PROG_DIR, sickbeard.HTTPS_CERT),
            'https_key': os.path.join(sickbeard.PROG_DIR, sickbeard.HTTPS_KEY),
        }

        # start web server
        try:
            # used to check if existing SG instances have been started
            sickbeard.helpers.wait_for_free_port(self.web_options['host'], self.web_options['port'])

            self.webserver = WebServer(self.web_options)
            self.webserver.start()
        except Exception:
            logger.log(u'Unable to start web server, is something else running on port %d?' % self.startPort,
                       logger.ERROR)
            if sickbeard.LAUNCH_BROWSER and not self.runAsDaemon:
                logger.log(u'Launching browser and exiting', logger.ERROR)
                sickbeard.launch_browser(self.startPort)
            os._exit(1)

        # Check if we need to perform a restore first
        restoreDir = os.path.join(sickbeard.DATA_DIR, 'restore')
        if os.path.exists(restoreDir):
            if self.restore(restoreDir, sickbeard.DATA_DIR):
                logger.log(u'Restore successful...')
            else:
                logger.log_error_and_exit(u'Restore FAILED!')

        # Build from the DB to start with
        self.loadShowsFromDB()

        # Fire up all our threads
        sickbeard.start()

        # Build internal name cache
        name_cache.buildNameCache()

        # refresh network timezones
        network_timezones.update_network_dict()

        # load all ids from xem
        startup_background_tasks = threading.Thread(name='FETCH-XEMDATA', target=sickbeard.scene_exceptions.get_xem_ids)
        startup_background_tasks.start()

        # sure, why not?
        if sickbeard.USE_FAILED_DOWNLOADS:
            failed_history.trimHistory()

        # Start an update if we're supposed to
        if self.forceUpdate or sickbeard.UPDATE_SHOWS_ON_START:
            sickbeard.showUpdateScheduler.action.run(force=True)  # @UndefinedVariable

        # Launch browser
        if sickbeard.LAUNCH_BROWSER and not (self.noLaunch or self.runAsDaemon):
            sickbeard.launch_browser(self.startPort)

        # main loop
        while True:
            time.sleep(1)

Example 11

Project: Subterfuge
Source File: attackctrl.py
View license
def attack(method):
    notification_attackctrl("init")
    print "Starting Pwn Ops..."
    
        #Determine Active Vectors
    acp, apgenatk, wpad = getvectors()
    target = ""
    
        #Launch Attacks
        #ARP Cache Poison
    if acp == "yes":
            #Auto Pwn Method
        if (method == "auto"):
            print "Running AutoPwn Method..."
                #AutoConfig
            autoconfig()
            interface, gateway, attackerip, routermac, smartarp, proxymode = getinfo()
            
                #Begin Attack Setup
            print "Automatically Configuring Subterfuge..."
            iptablesconfig(proxymode)
            print "Initiating ARP Poison With ARPMITM..."
            
               #Get Poison Options
            for info in arppoison.objects.all():
               target     = info.target
               method     = info.method
               
                #Check for poison single/all
            if (method == "single"):
               try:
                  print "Poisoning: " + target
                  command = 'python ' + os.path.dirname(os.path.abspath(__file__)) + '/utilities/arpmitm.py -s ' + target + " " + gateway + ' &'
               except:
                  notification_attackctrl("no-single-target")
                  print "Could not poison single target: no target found!"
            else:
               print "Poisoning: Network"
               command = 'python ' + os.path.dirname(os.path.abspath(__file__)) + '/utilities/arpmitm.py ' + gateway + ' &'
               
                #ARP Cache Poison through Subterfuge:
            os.system(command)
            if proxymode == "sslstrip":
               print "Starting up SSLstrip..."
               sslstrip()
            elif proxymode == "mitmproxy":
               print "Starting up the MITM Attack Proxy..."
               mitmproxy()
            sessionhijack()

                #Get & Log Router Mac
            if (os.path.exists(os.path.dirname(os.path.abspath(__file__)) + "/arpmitm.txt")):
                f = open(os.path.dirname(os.path.abspath(__file__)) + "/arpmitm.txt", 'r')
                mac = f.readline()
                macaddr = mac.rstrip("\n")
                setup.objects.update(routermac = macaddr)

            #os.system("python " + str(os.path.dirname(__file__)) + "/mitm.py -a &")
                
                 #Check for ARPWatch
            if (smartarp == "yes"):
               try:
                  os.system("python " + str(os.path.dirname(__file__)) + "/utilities/arpwatch.py " + gateway + " " + routermac + " " + attackerip + " &")
                
               except:
                   notification_attackctrl("arpwatch-no-rmac")
                   print "Encountered an error configuring arpwatch: Router MAC Address Unknown."
            
            #Standard Attack Method
        else:
            interface, gateway, attackerip, routermac, smartarp, proxymode = getinfo()
            
                #Begin Attack Setup
            print "Automatically Configuring Subterfuge..."
            iptablesconfig(proxymode)
            print "Initiating ARP Poison With ARPMITM..."
            
               #Get Poison Options
            for info in arppoison.objects.all():
               target     = info.target
               
                #Check for poison single/all
            if (method == "single"):
               command = 'python ' + os.path.dirname(os.path.abspath(__file__)) + '/utilities/arpmitm.py -s ' + target + " " + gateway + ' &'
            else:
               command = 'python ' + os.path.dirname(os.path.abspath(__file__)) + '/utilities/arpmitm.py ' + gateway + ' &'
               
                #ARP Cache Poison through Subterfuge:
            os.system(command)
            iptablesconfig(proxymode)
            if proxymode == "sslstrip":
               print "Starting up SSLstrip..."
               sslstrip()
            elif proxymode == "mitmproxy":
               print "Starting up the MITM Attack Proxy..."
               mitmproxy()
            sessionhijack()
    
                #Get & Log Router Mac
            if (os.path.exists(os.path.dirname(os.path.abspath(__file__)) + "/arpmitm.txt")):
                f = open(os.path.dirname(os.path.abspath(__file__)) + "/arpmitm.txt", 'r')
                mac = f.readline()
                macaddr = mac.rstrip("\n")
                setup.objects.update(routermac = macaddr)
                
                #Check for ARPWatch
            if (smartarp == "yes"):
                os.system("python " + str(os.path.dirname(__file__)) + "/utilities/arpwatch.py " + gateway + " " + routermac + " " + attackerip + " &")
                
            else:
                print "Dynamic ARP Retention is disabled."
                
        #Wireless AP Generator
    if apgenatk == "yes":
            #Get Attack Info
        for info in apgen.objects.all():
            essid     = info.essid
            channel   = info.channel
            atknic    = info.atknic
            netnic    = info.netnic
        
        print "Launching Access Point Generation Attack..."
        cmd = "xterm -e sh -c 'python " + str(os.path.dirname(__file__)) + "/utilities/apgen.py " + essid + " " + atknic + " " + netnic + "' &"
        print cmd
        os.system(cmd)
        
            #Begin MITM Attack Setup
        print "Automatically Configuring Subterfuge..."
        iptablesconfig(proxymode)
        print "Starting up SSLstrip..."
        if proxymode == "sslstrip":
           sslstrip()
        elif proxymode == "mitmproxy":
           print "Starting up the MITM Attack Proxy..."
           mitmproxy()

        sessionhijack()
        
        #WPAD Hijacking
    if wpad == "yes":
        #Auto Pwn Method
        print "Running AutoPwn Method..."
            #AutoConfig
        autoconfig()
        interface, gateway, attackerip, routermac, smartarp, proxymode = getinfo()
        
            #Begin MITM Attack Setup
            #Begin Attack Setup
            #No IPTables SSLStrip Configuration necessary for WPAD Hijacking
        #print "Automatically Configuring Subterfuge..."
        #iptablesconfig(proxymode)
            #Flush IPTables
        print "Flushing IPTables for WPAD Hijacking"
        os.system("iptables -t nat -F")
        print "Starting up SSLstrip..."
        sslstrip()
        sessionhijack()
            #Execute WPAD Hijacking
        os.system("python " + str(os.path.dirname(__file__)) + "/utilities/wpadhijack.py " + gateway + " " + routermac + " " + attackerip + " &")
        
        #Start Up Modules
    modules()

Example 12

Project: WAPT
Source File: install.py
View license
    def run(self, options, args):

        if (
            options.no_install or
            options.no_download or
            (options.build_dir != build_prefix) or
            options.no_clean
        ):
            logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, '
                              'and --no-clean are deprecated.  See https://github.com/pypa/pip/issues/906.')

        if options.download_dir:
            options.no_install = True
            options.ignore_installed = True
        options.build_dir = os.path.abspath(options.build_dir)
        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if virtualenv_no_global():
                raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.")
            install_options.append('--user')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
                raise CommandError("Target path exists but is not a directory, will not continue.")
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
            index_urls = []

        if options.use_mirrors:
            logger.deprecated("1.7",
                        "--use-mirrors has been deprecated and will be removed"
                        " in the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")

        if options.mirrors:
            logger.deprecated("1.7",
                        "--mirrors has been deprecated and will be removed in "
                        " the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")
            index_urls += options.mirrors

        session = self._build_session(options)

        finder = self._build_package_finder(options, index_urls, session)

        requirement_set = RequirementSet(
            build_dir=options.build_dir,
            src_dir=options.src_dir,
            download_dir=options.download_dir,
            download_cache=options.download_cache,
            upgrade=options.upgrade,
            as_egg=options.as_egg,
            ignore_installed=options.ignore_installed,
            ignore_dependencies=options.ignore_dependencies,
            force_reinstall=options.force_reinstall,
            use_user_site=options.use_user_site,
            target_dir=temp_target_dir,
            session=session,
            pycompile=options.compile,
        )
        for name in args:
            requirement_set.add_requirement(
                InstallRequirement.from_line(name, None))
        for name in options.editables:
            requirement_set.add_requirement(
                InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
        for filename in options.requirements:
            for req in parse_requirements(filename, finder=finder, options=options, session=session):
                requirement_set.add_requirement(req)
        if not requirement_set.has_requirements:
            opts = {'name': self.name}
            if options.find_links:
                msg = ('You must give at least one requirement to %(name)s '
                       '(maybe you meant "pip %(name)s %(links)s"?)' %
                       dict(opts, links=' '.join(options.find_links)))
            else:
                msg = ('You must give at least one requirement '
                       'to %(name)s (see "pip help %(name)s")' % opts)
            logger.warn(msg)
            return

        try:
            if not options.no_download:
                requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
            else:
                requirement_set.locate_files()

            if not options.no_install and not self.bundle:
                requirement_set.install(install_options, global_options, root=options.root_path)
                installed = ' '.join([req.name for req in
                                      requirement_set.successfully_installed])
                if installed:
                    logger.notify('Successfully installed %s' % installed)
            elif not self.bundle:
                downloaded = ' '.join([req.name for req in
                                       requirement_set.successfully_downloaded])
                if downloaded:
                    logger.notify('Successfully downloaded %s' % downloaded)
            elif self.bundle:
                requirement_set.create_bundle(self.bundle_filename)
                logger.notify('Created bundle in %s' % self.bundle_filename)
        except PreviousBuildDirError:
            options.no_clean = True
            raise
        finally:
            # Clean up
            if (not options.no_clean) and ((not options.no_install) or options.download_dir):
                requirement_set.cleanup_files(bundle=self.bundle)

        if options.target_dir:
            if not os.path.exists(options.target_dir):
                os.makedirs(options.target_dir)
            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
            for item in os.listdir(lib_dir):
                shutil.move(
                    os.path.join(lib_dir, item),
                    os.path.join(options.target_dir, item)
                    )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 13

Project: nesoni
Source File: shrimp.py
View license
def main(args):
    grace.require_shrimp_1()

    n_cpus = grace.how_many_cpus()
        
    solid, args = grace.get_flag(args, '--solid')
    verbose, args = grace.get_flag(args, '--verbose')

    threshold, args = grace.get_option_value(args, '--threshold', str, '68%')
    
    stride, args = grace.get_option_value(args, '--stride', int, 1)
    max_shrimps, args = grace.get_option_value(args, '--cpus', int, n_cpus)
    batch_size, args = grace.get_option_value(args, '--batch-size', int, 5000000)
        
    input_reference_filenames = [ ]
    reads_filenames = [ ]
    
    shrimp_options = [ '-h', threshold ]
    if threshold.endswith('%'):
        threshold = -float(threshold[:-1])/100.0
    else:
        threshold = int(threshold)
    
    output_dir = [ ]  #As list so can write to from function. Gah.
    
    def front_command(args):
        grace.expect_no_further_options(args)
        
        if len(args) < 1:
            return
        
        output_dir.append(args[0])        
        input_reference_filenames.extend(
            [ os.path.abspath(filename) for filename in args[1:] ])
    def reads_command(args):
        grace.expect_no_further_options(args)
        reads_filenames.extend([ [ os.path.abspath(filename) ] for filename in args])
    def pairs_command(args):
        grace.expect_no_further_options(args)
        assert len(args) == 2, 'Expected exactly two files in "pairs"'
        reads_filenames.append([ os.path.abspath(filename) for filename in args ])
    def shrimp_options_command(args):
        shrimp_options.extend(args)
    
    grace.execute(args, {
        'reads': reads_command,
        '--reads': reads_command,
        'pairs': pairs_command,
        'shrimp-options': shrimp_options_command,
        '--shrimp-options': shrimp_options_command,
    }, front_command)
    
    
    if not output_dir:
        print >> sys.stderr, USAGE % n_cpus
        return 1
    
    output_dir = output_dir[0]
    
    assert input_reference_filenames, 'No reference files given'
    assert reads_filenames, 'No read files given'
    
    for filename in itertools.chain(input_reference_filenames, *reads_filenames):
        assert os.path.exists(filename), '%s does not exist' % filename

    if not os.path.isdir(output_dir):
        os.mkdir(output_dir)
    
    if solid:
        shrimp = 'rmapper-cs'
    else:
        shrimp = 'rmapper-ls'
    
    
    reference_filename = os.path.join(output_dir,'reference.fa')
    reference_file = open(reference_filename,'wb')
    total_reference_sequences = 0
    total_reference_bases = 0
    for input_reference_filename in input_reference_filenames:
        for name, sequence in io.read_sequences(input_reference_filename):
            #Don't retain any comment
            name = name.split()[0]
            io.write_fasta(reference_file, name, sequence)
            
            total_reference_sequences += 1
            total_reference_bases += len(sequence)
            
    reference_file.close()
    
    print '%s base%s in %s reference sequence%s' % (
        grace.pretty_number(total_reference_bases), 's' if total_reference_bases != 1 else '',
        grace.pretty_number(total_reference_sequences), 's' if total_reference_sequences != 1 else '')
    
    assert total_reference_bases, 'Reference sequence file is empty' 
    
    config = {
        'references' : input_reference_filenames,
        'reads' : reads_filenames,
        'stride' : stride,
        'solid': solid,
        'threshold': threshold,
    }
    config_file = open(os.path.join(output_dir, 'config.txt'), 'wb')
    pprint.pprint(config, config_file)
    config_file.close()
    
    output_filename = os.path.join(output_dir, 'shrimp_hits.txt.gz')
    output_file = gzip.open(output_filename, 'wb')
    
    unmapped_filename = os.path.join(output_dir, 'unmapped.fa.gz')
    unmapped_file = gzip.open(unmapped_filename, 'wb')
    
    dirty_filenames = set()
    dirty_filenames.add(output_filename)
    dirty_filenames.add(unmapped_filename)
    
    #warn_low_threshold = True
    
    try: #Cleanup temporary files
        
        N = [0]
        def do_shrimp(read_set):
            my_number = N[0]
            N[0] += 1
            
            tempname = os.path.join(output_dir,'temp%d-%d.fa' % (os.getpid(),my_number))
            tempname_out = os.path.join(output_dir,'temp%d-%d.txt' % (os.getpid(),my_number))
            
            dirty_filenames.add(tempname)
            dirty_filenames.add(tempname_out)
            
            f = open(tempname,'wb')
            for read_name, read_seq in read_set:
                print >> f, '>' + read_name
                print >> f, read_seq
            f.close()
        
            command = shrimp + ' ' + ' '.join(shrimp_options) + ' ' + \
                      tempname + ' ' + reference_filename + ' >' + tempname_out
            if not verbose:
                command += ' 2>/dev/null'
            #f = os.popen(command, 'r')
            child_pid = os.spawnl(os.P_NOWAIT,'/bin/sh','/bin/sh','-c',command)
            #print 'SHRiMP %d running' % my_number
            
            def finalize():
                exit_status = os.waitpid(child_pid, 0)[1]
                assert exit_status == 0, 'Shrimp indicated an error'
                
                hits = { } # read_name -> [ hit line ]
                
                f = open(tempname_out,'rb')
                for line in f:
                    if line.startswith('>'):
                        read_name = line.split(None,1)[0][1:]
                        if read_name not in hits:
                            hits[read_name] = [ ]
                        hits[read_name].append(line)
                f.close()
                                
                for read_name, read_seq in read_set:
                    if read_name in hits:
                        for hit in hits[read_name]:
                            output_file.write(hit)
                    else:
                        print >> unmapped_file, '>' + read_name
                        print >> unmapped_file, read_seq

                output_file.flush()
                unmapped_file.flush()
        
                os.unlink(tempname)
                dirty_filenames.remove(tempname)
                os.unlink(tempname_out)
                dirty_filenames.remove(tempname_out)
                #print 'SHRiMP %d finished' % my_number
            return finalize
        
        
        shrimps = [ ]
        
        reader = iter_reads(config)
        read_count = 0
        
        while True:
            read_set = [ ]
            read_set_bases = 0

            #Read name should not include comment cruft
            # - SHRIMP passes this through
            # - might stuff up identification of pairs
            
            for read_name, read_seq in reader:
                read_name = read_name.split()[0]                
                read_set.append((read_name, read_seq))
                read_set_bases += len(read_seq)
                
                #if warn_low_threshold and len(read_seq)*7 < threshold: #Require 70% exact match
                #    sys.stderr.write('\n*** WARNING: Short reads, consider reducing --threshold ***\n\n')                    
                #    warn_low_threshold = False
            
                read_count += 1
                if read_set_bases >= batch_size: break
                
            if not read_set: break
        
            if len(shrimps) >= max_shrimps:
                shrimps.pop(0)()
            shrimps.append( do_shrimp(read_set) )
            
            grace.status('SHRiMPing %s' % grace.pretty_number(read_count))
        
        while shrimps:
            grace.status('Waiting for SHRiMPs to finish %d ' % len(shrimps) )
            shrimps.pop(0)()
        
        grace.status('')
        
        output_file.close()
        dirty_filenames.remove(output_filename)
        unmapped_file.close()
        dirty_filenames.remove(unmapped_filename)
        
        return 0

    finally:
        for filename in dirty_filenames:
            if os.path.exists(filename):
                os.unlink(filename)

Example 14

Project: vmflib
Source File: buildbsp.py
View license
def main():
    parser = _make_arg_parser()
    args = parser.parse_args()
    game = GAMES[args.game]
    username = args.username  # May be None
    vmf_file = os.path.abspath(args.map)
    path, filename = os.path.split(vmf_file)
    mapname = filename[:-4]
    mappath = os.path.join(path, mapname)
    bsp_file = os.path.join(path, mapname + ".bsp")
    sourcesdk = None
    winsteam = args.steam_windows_path
    if not winsteam:
        winsteam = os.getenv('winsteam')

    # We need to find out where the SteamApps directory is.
    if winsteam:
        steamapps = os.path.join(winsteam, "Steamapps")
        if not os.path.isdir(steamapps):  # Try lowercase
            steamapps = os.path.join(winsteam, "steamapps")
        if not os.path.isdir(steamapps):
            raise Exception(
                "The provided Steam directory does not contain a Steamapps directory: %s" %
                os.path.abspath(winsteam)
            )
    elif WIN32 or CYGWIN:
        sourcesdk = os.getenv('sourcesdk')
        if CYGWIN:
            def cygwin2dos(path):
                return subprocess.check_output(["cygpath", '-w', '%s' % path], universal_newlines=True).strip()
            sourcesdk = subprocess.check_output(["cygpath", sourcesdk], universal_newlines=True).strip()
        sourcesdk = os.path.abspath(sourcesdk)
        steamapps = os.path.dirname(os.path.dirname(sourcesdk))
        if not os.path.isdir(steamapps):
            raise Exception("Steamapps directory could not be found. Please specify using --steam-windows-path or see --help.")
        if not username:
            username = os.path.basename(os.path.dirname(sourcesdk))
    else:
        raise Exception("Unable to determine where your (Windows) Steam installation is located. See --help.")
    steamapps = os.path.abspath(steamapps)

    # Prepare some useful paths
    gamedir = os.path.join(steamapps, game.get_game_dir(username))
    mapsdir = os.path.join(gamedir, "maps")

    # Get path to correct bin tools directory (game or SDK)
    if game.uses_sdk:
        if not sourcesdk:
            # Try finding SDK within Steamapps
            # TODO
            raise Exception("Sorry, SDK games aren't implemented right now unless you're on Windows.")
        toolsdir = os.path.join(sourcesdk, "bin", "orangebox", "bin")
    else:
        toolsdir = os.path.abspath(os.path.join(gamedir, "..", "bin"))
    
    # Make sure gamedir path seems legit
    if not os.path.isfile(os.path.join(gamedir, "gameinfo.txt")):
        raise Exception("Game directory does not contain a gameinfo.txt: %s" % gamedir)

    if WIN32 or CYGWIN:
        # Convert some paths if using Cygwin
        if CYGWIN:
            gamedir = cygwin2dos(gamedir)
            mappath = cygwin2dos(mappath)

        # Change working directory first because VBSP is dumb
        os.chdir(os.path.join(sourcesdk, 'bin', 'orangebox'))

        # Run the SDK tools
        vbsp_exe = os.path.join(toolsdir, "vbsp.exe")
        code = subprocess.call([vbsp_exe, '-game', gamedir, mappath])
        print("VBSP finished with status %s." % code)

        if code == 1:
            print("Looks like SteamService isn't working. Try reopening Steam.")
            exit(code)
        elif code == -11:
            print("Looks like you might have gotten the 'material not found' " +
                "error messages. Try signing into Steam, or restarting it " +
                "and signing in.")
            exit(code)
        elif code != 0:
            print("Looks like VBSP crashed, but I'm not sure why.")
            exit(code)

        vvis_exe = os.path.join(toolsdir, "vvis.exe")
        opts = [vvis_exe]
        if args.fast:
            opts.append('-fast')
        opts.extend(['-game', gamedir, mappath])
        subprocess.call(opts)

        vrad_exe = os.path.join(toolsdir, "vrad.exe")
        opts = [vrad_exe]
        if args.fast:
            opts.extend(['-bounce', '2', '-noextra'])
        if args.hdr:
            opts.append('-both')
        if args.hdr and args.final:
            opts.append('-final')
        opts.extend(['-game', gamedir, mappath])
        subprocess.call(opts)

        # Install the map to the game's map directory (unless --no-install)
        if not args.no_install:
            print("Copying map %s to %s" % (mapname, mapsdir))
            shutil.copy(bsp_file, mapsdir)
        else:
            print("Not installing map")

        # Launch the game (unless --no-run or --no-install)
        if not args.no_run and not args.no_install:
            params = urllib.parse.quote("-dev -console -allowdebug +map %s" % mapname)
            run_url = "steam://run/%d//%s" % (game['id'], params)
            print(run_url)
            webbrowser.open(run_url)
            if cygwin:
                print("\nYou're running cygwin, so I can't launch the game for you.")
                print("Double-click the URL above, right-click, and click 'Open'.")
                print("Or paste the URL above into the Windows 'Run...' dialog.")
                print("Or, just run 'map %s' in the in-game console." % mapname)
        else:
            print("Not launching game")
    elif LINUX:
        # Environment to use with wine calls
        env = os.environ.copy()
        env['WINEPREFIX'] = os.path.expanduser("~/.winesteam")
        
        # Define path-converting helper function
        def unix2wine(path):
            return subprocess.check_output(["winepath", '-w', '%s' % path], env=env).strip()
        
        # Wine-ify some of our paths
        gamedir = unix2wine(gamedir)
        mappath = unix2wine(mappath)

        # Tell wine to look for DLLs here
        #env['WINEDLLPATH'] = os.path.join(sourcesdk, "bin")
        
        #print("WINEDLLPATH is as follows: ", env['WINEDLLPATH'])

        # Use native maps directory instead of the Wine installation's
        mapsdir = os.path.join('~', '.steam', 'steam', 'SteamApps', game.get_game_dir(username), "maps")
        mapsdir = os.path.expanduser(mapsdir)

        # Change working directory first because VBSP is dumb
        #os.chdir(os.path.join(sourcesdk, 'bin', 'orangebox'))
        
        print("Using -game dir: %s" % gamedir)
        
        # We now need to set the VPROJECT env variable
        env['VPROJECT'] = gamedir

        # Run the SDK tools
        vbsp_exe = os.path.join(toolsdir, "vbsp.exe")
        code = subprocess.call(['wine', vbsp_exe, '-game', gamedir, mappath], env=env)
        print("VBSP finished with status %s." % code)

        # Handle various exit status codes VBPS may have returned
        if code == 1:
            print("\nLooks like VBSP crashed, possibly due to invalid geometry in the map. Check the output above.")
            print("\It could also be related to SteamService isn't working. Try re(launching) wine's Steam:")
            steambin = os.path.join(os.path.dirname(steamapps), 'steam.exe')
            print('\nWINEPREFIX="%s" wine "%s" -no-dwrite' % (env['WINEPREFIX'], steambin))
            exit(code)
        elif code == -11:
            print("\nLooks like you might have gotten the 'material not found' " +
                "error messages. Try signing into Steam, or restarting it " +
                "and signing in.")
            exit(code)
        elif code != 0:
            print("\nLooks like VBSP crashed, but I'm not sure why.")
            exit(code)

        vvis_exe = os.path.join(toolsdir, "vvis.exe")
        opts = ['wine', vvis_exe]
        if args.fast:
            opts.append('-fast')
        opts.extend(['-game', gamedir, mappath])
        code = subprocess.call(opts, env=env)

        if code != 0:
            print("\nLooks like VVIS crashed, but I'm not sure why.")
            exit(code)

        vrad_exe = os.path.join(toolsdir, "vrad.exe")
        opts = ['wine', vrad_exe]
        if args.fast:
            opts.extend(['-bounce', '2', '-noextra'])
        if args.hdr:
            opts.append('-both')
        if args.hdr and args.final:
            opts.append('-final')
        opts.extend(['-game', gamedir, mappath])
        code = subprocess.call(opts, env=env)
        
        if code != 0:
            print("\nLooks like VRAD crashed, but I'm not sure why.")
            exit(code)

        # Install the map to the game's map directory (unless --no-install)
        if not args.no_install:
            shutil.copy(bsp_file, mapsdir)
        else:
            print("Not installing map")

        # Launch the game (unless --no-run or --no-install)
        if not args.no_run and not args.no_install:
            params = urllib.parse.quote("-dev -console -allowdebug +map %s" % mapname)
            run_url = "steam://run/%d//%s" % (game.id, params)
            print(run_url)
            webbrowser.open(run_url)
        else:
            print("Not launching game")
    else:
        raise OSError('Your OS is not supported yet!')

Example 15

Project: Sick-Beard-TPB
Source File: SickBeard.py
View license
def main():
    """
    TV for me
    """

    # do some preliminary stuff
    sickbeard.MY_FULLNAME = os.path.normpath(os.path.abspath(__file__))
    sickbeard.MY_NAME = os.path.basename(sickbeard.MY_FULLNAME)
    sickbeard.PROG_DIR = os.path.dirname(sickbeard.MY_FULLNAME)
    sickbeard.DATA_DIR = sickbeard.PROG_DIR
    sickbeard.MY_ARGS = sys.argv[1:]
    sickbeard.CREATEPID = False
    sickbeard.DAEMON = False

    sickbeard.SYS_ENCODING = None

    try:
        locale.setlocale(locale.LC_ALL, "")
        sickbeard.SYS_ENCODING = locale.getpreferredencoding()
    except (locale.Error, IOError):
        pass

    # For OSes that are poorly configured I'll just randomly force UTF-8
    if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        sickbeard.SYS_ENCODING = 'UTF-8'

    if not hasattr(sys, "setdefaultencoding"):
        reload(sys)

    try:
        # pylint: disable=E1101
        # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
        sys.setdefaultencoding(sickbeard.SYS_ENCODING)
    except:
        print 'Sorry, you MUST add the Sick Beard folder to the PYTHONPATH environment variable'
        print 'or find another way to force Python to use ' + sickbeard.SYS_ENCODING + ' for string encoding.'
        sys.exit(1)

    # Need console logging for SickBeard.py and SickBeard-console.exe
    consoleLogging = (not hasattr(sys, "frozen")) or (sickbeard.MY_NAME.lower().find('-console') > 0)

    # Rename the main thread
    threading.currentThread().name = "MAIN"

    try:
        opts, args = getopt.getopt(sys.argv[1:], "qfdp::", ['quiet', 'forceupdate', 'daemon', 'port=', 'pidfile=', 'nolaunch', 'config=', 'datadir='])  # @UnusedVariable
    except getopt.GetoptError:
        print "Available Options: --quiet, --forceupdate, --port, --daemon, --pidfile, --config, --datadir"
        sys.exit()

    forceUpdate = False
    forcedPort = None
    noLaunch = False

    for o, a in opts:
        # For now we'll just silence the logging
        if o in ('-q', '--quiet'):
            consoleLogging = False

        # Should we update (from tvdb) all shows in the DB right away?
        if o in ('-f', '--forceupdate'):
            forceUpdate = True

        # Suppress launching web browser
        # Needed for OSes without default browser assigned
        # Prevent duplicate browser window when restarting in the app
        if o in ('--nolaunch',):
            noLaunch = True

        # Override default/configured port
        if o in ('-p', '--port'):
            forcedPort = int(a)

        # Run as a daemon
        if o in ('-d', '--daemon'):
            if sys.platform == 'win32':
                print "Daemonize not supported under Windows, starting normally"
            else:
                consoleLogging = False
                sickbeard.DAEMON = True

        # Specify folder to load the config file from
        if o in ('--config',):
            sickbeard.CONFIG_FILE = os.path.abspath(a)

        # Specify folder to use as the data dir
        if o in ('--datadir',):
            sickbeard.DATA_DIR = os.path.abspath(a)

        # Write a pidfile if requested
        if o in ('--pidfile',):
            sickbeard.PIDFILE = str(a)

            # If the pidfile already exists, sickbeard may still be running, so exit
            if os.path.exists(sickbeard.PIDFILE):
                sys.exit("PID file '" + sickbeard.PIDFILE + "' already exists. Exiting.")

            # The pidfile is only useful in daemon mode, make sure we can write the file properly
            if sickbeard.DAEMON:
                sickbeard.CREATEPID = True
                try:
                    file(sickbeard.PIDFILE, 'w').write("pid\n")
                except IOError, e:
                    raise SystemExit("Unable to write PID file: %s [%d]" % (e.strerror, e.errno))
            else:
                logger.log(u"Not running in daemon mode. PID file creation disabled.")

    # If they don't specify a config file then put it in the data dir
    if not sickbeard.CONFIG_FILE:
        sickbeard.CONFIG_FILE = os.path.join(sickbeard.DATA_DIR, "config.ini")

    # Make sure that we can create the data dir
    if not os.access(sickbeard.DATA_DIR, os.F_OK):
        try:
            os.makedirs(sickbeard.DATA_DIR, 0744)
        except os.error, e:
            raise SystemExit("Unable to create datadir '" + sickbeard.DATA_DIR + "'")

    # Make sure we can write to the data dir
    if not os.access(sickbeard.DATA_DIR, os.W_OK):
        raise SystemExit("Datadir must be writeable '" + sickbeard.DATA_DIR + "'")

    # Make sure we can write to the config file
    if not os.access(sickbeard.CONFIG_FILE, os.W_OK):
        if os.path.isfile(sickbeard.CONFIG_FILE):
            raise SystemExit("Config file '" + sickbeard.CONFIG_FILE + "' must be writeable.")
        elif not os.access(os.path.dirname(sickbeard.CONFIG_FILE), os.W_OK):
            raise SystemExit("Config file root dir '" + os.path.dirname(sickbeard.CONFIG_FILE) + "' must be writeable.")

    os.chdir(sickbeard.DATA_DIR)

    if consoleLogging:
        print "Starting up Sick Beard " + SICKBEARD_VERSION + " from " + sickbeard.CONFIG_FILE

    # Load the config and publish it to the sickbeard package
    if not os.path.isfile(sickbeard.CONFIG_FILE):
        logger.log(u"Unable to find '" + sickbeard.CONFIG_FILE + "' , all settings will be default!", logger.ERROR)

    sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE)

    CUR_DB_VERSION = db.DBConnection().checkDBVersion() 
    if CUR_DB_VERSION > 0:
        if CUR_DB_VERSION < MIN_DB_VERSION:
            raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") is too old to migrate from with this version of Sick Beard (" + str(MIN_DB_VERSION) + ").\n" + \
                             "Upgrade using a previous version of SB first, or start with no database file to begin fresh.")
        if CUR_DB_VERSION > MAX_DB_VERSION:
            raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") has been incremented past what this version of Sick Beard supports (" + str(MAX_DB_VERSION) + ").\n" + \
                             "If you have used other forks of SB, your database may be unusable due to their modifications.")    
            
    # Initialize the config and our threads
    sickbeard.initialize(consoleLogging=consoleLogging)

    sickbeard.showList = []

    if sickbeard.DAEMON:
        daemonize()

    # Use this PID for everything
    sickbeard.PID = os.getpid()

    if forcedPort:
        logger.log(u"Forcing web server to port " + str(forcedPort))
        startPort = forcedPort
    else:
        startPort = sickbeard.WEB_PORT

    if sickbeard.WEB_LOG:
        log_dir = sickbeard.LOG_DIR
    else:
        log_dir = None

    # sickbeard.WEB_HOST is available as a configuration value in various
    # places but is not configurable. It is supported here for historic reasons.
    if sickbeard.WEB_HOST and sickbeard.WEB_HOST != '0.0.0.0':
        webhost = sickbeard.WEB_HOST
    else:
        if sickbeard.WEB_IPV6:
            webhost = '::'
        else:
            webhost = '0.0.0.0'

    try:
        initWebServer({
                      'port': startPort,
                      'host': webhost,
                      'data_root': os.path.join(sickbeard.PROG_DIR, 'gui/'+sickbeard.GUI_NAME),
                      'web_root': sickbeard.WEB_ROOT,
                      'log_dir': log_dir,
                      'username': sickbeard.WEB_USERNAME,
                      'password': sickbeard.WEB_PASSWORD,
                      'enable_https': sickbeard.ENABLE_HTTPS,
                      'https_cert': sickbeard.HTTPS_CERT,
                      'https_key': sickbeard.HTTPS_KEY,
                      })
    except IOError:
        logger.log(u"Unable to start web server, is something else running on port %d?" % startPort, logger.ERROR)
        if sickbeard.LAUNCH_BROWSER and not sickbeard.DAEMON:
            logger.log(u"Launching browser and exiting", logger.ERROR)
            sickbeard.launchBrowser(startPort)
        sys.exit()

    # Build from the DB to start with
    logger.log(u"Loading initial show list")
    loadShowsFromDB()

    # Fire up all our threads
    sickbeard.start()

    # Launch browser if we're supposed to
    if sickbeard.LAUNCH_BROWSER and not noLaunch and not sickbeard.DAEMON:
        sickbeard.launchBrowser(startPort)

    # Start an update if we're supposed to
    if forceUpdate:
        sickbeard.showUpdateScheduler.action.run(force=True)  # @UndefinedVariable

    # Stay alive while my threads do the work
    while (True):

        if sickbeard.invoked_command:
            sickbeard.invoked_command()
            sickbeard.invoked_command = None

        time.sleep(1)

    return

Example 16

Project: thug
Source File: thug.py
View license
    def analyze(self):
        p = getattr(self, 'run_remote', None)

        try:
            options, args = getopt.getopt(self.args,
                                          'hVu:e:w:n:o:r:p:yszNlxvdqmagA:PS:RJ:Kt:ET:BL:Q:W:C:FZMGYUD:b:',
                ['help',
                'version',
                'useragent=',
                'events=',
                'delay=',
                'logdir=',
                'output=',
                'referer=',
                'proxy=',
                'vtquery',
                'vtsubmit',
                'web-tracking',
                'no-honeyagent',
                'local',
                'local-nofetch',
                'verbose',
                'debug',
                'quiet',
                'no-cache',
                'ast-debug',
                'http-debug',
                'adobepdf=',
                'no-adobepdf',
                'shockwave=',
                'no-shockwave',
                'javaplugin=',
                'no-javaplugin',
                'threshold=',
                'extensive',
                'timeout=',
                'broken-url',
                'htmlclassifier=',
                'urlclassifier=',
                'jsclassifier=',
                'sampleclassifier=',
                'file-logging',
                'json-logging',
                'maec11-logging',
                'elasticsearch-logging',
                'no-code-logging',
                'no-cert-logging',
                'mongodb-address=',
                'vt-apikey=',
                ])
        except getopt.GetoptError:
            self.usage()

        if not options and not args:
            self.usage()

        for option in options:
            if option[0] in ('-h', '--help'):
                self.usage()
            elif option[0] in ('-V', '--version'):
                self.version()

        for option in options:
            if option[0] in ('-u', '--useragent', ):
                self.set_useragent(option[1])
            elif option[0] in ('-e', '--events'):
                self.set_events(option[1])
            elif option[0] in ('-w', '--delay'):
                self.set_delay(option[1])
            elif option[0] in ('-r', '--referer', ):
                self.set_referer(option[1])
            elif option[0] in ('-p', '--proxy', ):
                self.set_proxy(option[1])
            elif option[0] in ('-y', '--vtquery', ):
                self.set_vt_query()
            elif option[0] in ('-s', '--vtsubmit', ):
                self.set_vt_submit()
            elif option[0] in ('-b', '--vt-apikey', ):
                self.set_vt_runtime_apikey(option[1])
            elif option[0] in ('-z', '--web-tracking', ):
                self.set_web_tracking()
            elif option[0] in ('-N', '--no-honeyagent', ):
                self.disable_honeyagent()
            elif option[0] in ('-l', '--local', ):
                p = getattr(self, 'run_local')
            elif option[0] in ('-x', '--local-nofetch', ):
                p = getattr(self, 'run_local')
                self.set_no_fetch()
            elif option[0] in ('-v', '--verbose', ):
                self.set_verbose()
            elif option[0] in ('-d', '--debug', ):
                self.set_debug()
            elif option[0] in ('-m', '--no-cache'):
                self.set_no_cache()
            elif option[0] in ('-a', '--ast-debug', ):
                self.set_ast_debug()
            elif option[0] in ('-g', '--http-debug', ):
                self.set_http_debug()
            elif option[0] in ('-A', '--adobepdf', ):
                self.set_acropdf_pdf(option[1])
            elif option[0] in ('-P', '--no-adobepdf', ):
                self.disable_acropdf()
            elif option[0] in ('-S', '--shockwave', ):
                self.set_shockwave_flash(option[1])
            elif option[0] in ('-R', '--no-shockwave', ):
                self.disable_shockwave_flash()
            elif option[0] in ('-J', '--javaplugin', ):
                self.set_javaplugin(option[1])
            elif option[0] in ('-K', '--no-javaplugin', ):
                self.disable_javaplugin()
            elif option[0] in ('-t', '--threshold', ):
                self.set_threshold(option[1])
            elif option[0] in ('-E', '--extensive', ):
                self.set_extensive()
            elif option[0] in ('-T', '--timeout', ):
                self.set_timeout(option[1])
            elif option[0] in ('-L', '--htmlclassifier'):
                for classifier in option[1].split(','):
                    self.add_htmlclassifier(os.path.abspath(classifier))
            elif option[0] in ('-Q', '--urlclassifier'):
                for classifier in option[1].split(','):
                    self.add_urlclassifier(os.path.abspath(classifier))
            elif option[0] in ('-W', '--jsclassifier'):
                for classifier in option[1].split(','):
                    self.add_jsclassifier(os.path.abspath(classifier))
            elif option[0] in ('-C', '--sampleclassifier'):
                for classifier in option[1].split(','):
                    self.add_sampleclassifier(os.path.abspath(classifier))
            elif option[0] in ('-B', '--broken-url', ):
                self.set_broken_url()
            elif option[0] in ('-F', '--file-logging', ):
                self.set_file_logging()
            elif option[0] in ('-Z', '--json-logging', ):
                self.set_json_logging()
            elif option[0] in ('-M', '--maec11-logging', ):
                self.set_maec11_logging()
            elif option[0] in ('-G', '--elasticsearch-logging', ):
                self.set_elasticsearch_logging()
            elif option[0] in ('-Y', '--no-code-logging', ):
                self.disable_code_logging()
            elif option[0] in ('-U', '--no-cert-logging', ):
                self.disable_cert_logging()
            elif option[0] in ('-D', '--mongodb-address', ):
                self.set_mongodb_address(option[1])

        self.log_init(args[0])

        for option in options:
            if option[0] in ('-n', '--logdir'):
                self.set_log_dir(option[1])
            elif option[0] in ('-o', '--output', ):
                self.set_log_output(option[1])
            elif option[0] in ('-q', '--quiet', ):
                self.set_log_quiet()

        if p:
            ThugPlugins(PRE_ANALYSIS_PLUGINS, self)()
            p(args[0])
            ThugPlugins(POST_ANALYSIS_PLUGINS, self)()

        self.log_event()
        return log

Example 17

Project: cgat
Source File: runZinba.py
View license
def main(argv=None):
    """script main.

    parses command line options in sys.argv, unless *argv* is given.
    """

    if not argv:
        argv = sys.argv

    # setup command line parser
    parser = E.OptionParser(version="%prog version: $Id$",
                            usage=globals()["__doc__"])

    parser.add_option("-f", "--input-format", dest="input_format",
                      type="choice",
                      choices=("bed", "bam"),
                      help="input file format [default=%default].")

    parser.add_option("-s", "--fragment-size", dest="fragment_size",
                      type="int",
                      help="fragment size, used for the extension parameter "
                      "in Zinba [default=%default].")

    parser.add_option("-m", "--zinba-mappability-dir", dest="mappability_dir",
                      type="string",
                      help="mappability_dir [default=%default].")

    parser.add_option("-b", "--bit-file", dest="bit_filename",
                      type="string",
                      help="2bit genome filename [default=%default].")

    parser.add_option("-c", "--control-filename", dest="control_filename",
                      type="string",
                      help="filename of input/control data in bed format "
                      "[default=%default].")

    parser.add_option("-i", "--zinba-index-dir", dest="index_dir", type="string",
                      help="index directory [default=%default].")

    parser.add_option("-t", "--threads", dest="threads", type="int",
                      help="number of threads to use [default=%default].")

    parser.add_option("-q", "--fdr-threshold", dest="fdr_threshold",
                      type="float",
                      help="fdr threshold [default=%default].")

    parser.add_option("-a", "--zinba-alignability-threshold",
                      dest="alignability_threshold", type="int",
                      help="alignability threshold [default=%default].")

    parser.add_option("-p", "--aggregate-by-contig", dest="per_contig",
                      action="store_true",
                      help="run analysis per chromosome [default=%default]")

    parser.add_option("-w", "--temp-dir", dest="tempdir", type="string",
                      help="use existing directory as temporary directory "
                      "[default=%default].")

    parser.add_option("--keep-temp", dest="keep_temp", action="store_true",
                      help="keep temporary directory [default=%default]")

    parser.add_option("--action", dest="action", type="choice",
                      choices=("full", "count", "predict", "model"),
                      help="action to perform [default=%default]")

    parser.add_option("--zinba-improvement", dest="improvement", type="float",
                      help="relative improvement of likelihood until "
                      "convergence [default=%default]")

    parser.add_option("--min-insert-size", dest="min_insert_size", type="int",
                      help="minimum insert size for paired end data "
                      "[default=%default]")

    parser.add_option("--max-insert-size", dest="max_insert_size", type="int",
                      help="maximum insert size for paired end data "
                      "[default=%default]")

    parser.set_defaults(
        input_format="bed",
        fragment_size=200,
        mappability_dir=None,
        threads=1,
        alignability_threshold=1,
        bit_filename=None,
        fdr_threshold=0.05,
        tempdir=None,
        winsize=250,
        offset=125,
        cnvWinSize=1e+05,
        cnvOffset=2500,
        per_contig=False,
        keep_temp=False,
        min_insert_size=0,
        max_insert_size=1000,
        filelist="files.list",
        selectchr="chr19",
        action="full",
        improvement=0.00001,
    )

    # add common options (-h/--help, ...) and parse command line
    (options, args) = E.Start(parser, argv=argv)

    if len(args) != 2:
        raise ValueError(
            "please specify a filename with sample data and an output file")

    filename_sample, filename_output = args[0], args[1]

    filename_sample = os.path.abspath(filename_sample)
    filename_output = os.path.abspath(filename_output)

    if options.control_filename:
        filename_control = os.path.abspath(options.control_filename)
    else:
        filename_control = None

    # load Zinba
    R.library('zinba')

    if not options.tempdir:
        tmpdir = tempfile.mkdtemp()
    else:
        tmpdir = options.tempdir

    E.info("changing to temporary directory %s" % tmpdir)
    os.chdir(tmpdir)

    if options.input_format == "bam":
        E.info("converting bam files to bed")
        if not os.path.exists(os.path.join(tmpdir, "sample.bed")):
            filename_sample = bamToBed(
                filename_sample,
                os.path.join(tmpdir, "sample.bed"))
        else:
            E.info("using existing file %(tmpdir)s/sample.bed" %
                   locals())
            filename_sample = os.path.join(
                tmpdir, "sample.bed")
        if filename_control:
            if not os.path.exists(os.path.join(tmpdir, "control.bed")):
                filename_control = bamToBed(
                    filename_control,
                    os.path.join(tmpdir, "control.bed"))
            else:
                E.info("using existing file %(tmpdir)s/control.bed" %
                       locals())
                filename_control = os.path.join(
                    tmpdir, "control.bed")

    fragment_size = options.fragment_size
    threads = options.threads
    bit_filename = options.bit_filename
    mappability_dir = options.mappability_dir
    fdr_threshold = options.fdr_threshold
    tol = options.improvement

    contigs = E.run(
        "twoBitInfo %(bit_filename)s %(tmpdir)s/contig_sizes" % locals())
    contig2size = dict(
        [x.split() for x in IOTools.openFile(
            os.path.join(tmpdir, "contig_sizes"))])

    outdir = filename_output + "_files"
    E.info('saving intermediate results in %s' % outdir)
    if not os.path.exists(outdir):
        os.mkdir(outdir)

    filelist = os.path.join(outdir, filename_output + ".list")
    modelfile = os.path.join(outdir, filename_output + ".model")
    winfile = os.path.join(outdir, filename_output + ".wins")
    winSize = 250
    offset = 125
    cnvWinSize = 100000
    cnvOffset = 0
    winGap = 0
    peakconfidence = 1.0 - fdr_threshold
    selectchr = options.selectchr

    if not os.path.exists(os.path.join(tmpdir, "basecount")):
        E.info("computing counts")

        R('''basealigncount(inputfile='%(filename_sample)s',
        outputfile='%(tmpdir)s/basecount',
        extension=%(fragment_size)i,
        filetype='bed',
        twoBitFile='%(bit_filename)s' )
        ''' % locals())
    else:
        E.info("using existing counts")

    # tried incremental updates
    # for contig, size in contig2size.iteritems():
    #     for size in
    #     fn = os.path.join( tmpdir, "sample_%(contig)s_win%(size)ibp_offset(offset)ibp.txt" % locals() )
    if options.action == "count":

        E.info("computing window counts only - saving results in %s" % outdir)
        R('''buildwindowdata(
                     seq='%(filename_sample)s',
                     align='%(mappability_dir)s',
                     input='%(filename_control)s',
                     twoBit='%(bit_filename)s',
                     winSize=%(winSize)i,
                     offset=%(offset)i,
                     cnvWinSize=%(cnvWinSize)i,
                     cnvOffset=%(cnvOffset)i,
                     filelist='%(filelist)s',
                     filetype='bed',
                     extension=%(fragment_size)s,
                     outdir='%(outdir)s/') ''' % locals())

    elif options.action == "model":

        # The important option is buildwin = 0
        # parameterized for broad == FALSE and input present
        # see zinba.R
        # model selection only on chr19.
        R('''run.zinba( 
                filelist='%(filelist)s',
                formula=NULL,formulaE=NULL,formulaZ=NULL,
                outfile='%(filename_output)s',
                seq='%(filename_sample)s',
                input='%(filename_control)s',
                filetype='bed',  
                align='%(mappability_dir)s',
                twoBit='%(bit_filename)s',
                extension=%(fragment_size)s,
                winSize=%(winSize)i,
                offset=%(offset)i,
                cnvWinSize=%(cnvWinSize)i,
                cnvOffset=%(cnvOffset)i,
                basecountfile='%(tmpdir)s/basecount',
                buildwin=0,
                threshold=%(fdr_threshold)f,
                pquant=1,
                peakconfidence=%(peakconfidence)f,
                winGap=%(winGap)i,
                tol=%(tol)f,
                initmethod="count",
                method="mixture",
                numProc=%(threads)i,
                printFullOut=1,
                interaction=FALSE,
                selectmodel=TRUE,
                selectchr='%(selectchr)s',
                selectcovs=c("input_count"),
                selecttype="complete",
                FDR=TRUE)''' % locals())

    elif options.action == "predict":

        # The important option is buildwin = 0 and selectmodel = FALSE
        # parameterized for broad == FALSE and input present
        # see zinba.R
        # model selection only on chr19.
        if not os.path.exists(modelfile):
            raise OSError("model file %s does not exist" % modelfile)

        E.info("reading model from %s" % modelfile)

        R('''
        final=read.table('%(modelfile)s', header=T, sep="\t")
        final=final[final$fail==0,]
        bestBIC=which.min(final$BIC)
        formula=as.formula(paste("exp_count~",final$formula[bestBIC]))
        formulaE=as.formula(paste("exp_count~",final$formulaE[bestBIC]))
        formulaZ=as.formula(paste("exp_count~",final$formulaZ[bestBIC]))
        cat("Background formula is:\n\t")
        print(formula)
        cat("Enrichment formula is:\n\t")
        print(formulaE)
        cat("Zero-inflated formula is:\n\t")
        print(formulaE)
        ''' % locals())

        E.info("predicting peaks")

        R('''run.zinba(
                filelist='%(filelist)s',
                outfile='%(filename_output)s',
                seq='%(filename_sample)s',
                input='%(filename_control)s',
                filetype='bed',
                align='%(mappability_dir)s',
                twoBit='%(bit_filename)s',
                extension=%(fragment_size)s,
                winSize=%(winSize)i,
                offset=%(offset)i,
                cnvWinSize=%(cnvWinSize)i,
                cnvOffset=%(cnvOffset)i,
                basecountfile='%(tmpdir)s/basecount',
                buildwin=0,
                threshold=%(fdr_threshold)f,
                pquant=1,
                winGap=%(winGap)i,
                initmethod="count",
                selectchr='%(selectchr)s',
                tol=%(tol)f,
                method="mixture",
                numProc=%(threads)i,
                printFullOut=1,
                interaction=FALSE,
                selectmodel=FALSE,
                formula=formula,
                formulaE=formulaE,
                formulaZ=formulaZ,
                peakconfidence=%(peakconfidence)f,
                FDR=TRUE)''' % locals())

    elif options.action == "per_contig":

        E.info("processing per chromosome")
        for contig, size in contig2size.items():
            if contig not in ("chr16",):
                continue

            E.info("processing contig %s" % contig)
            filename_sample_contig = filename_sample + "_%s" % contig
            filename_control_contig = filename_control + "_%s" % contig
            if not os.path.exists(filename_output + "_files"):
                os.mkdir(filename_output + "_files")
            filename_output_contig = os.path.join(
                filename_output + "_files", contig)
            filename_basecounts_contig = os.path.join(
                tmpdir, "basecount_%s" % contig)

            E.run(
                "grep %(contig)s < %(filename_sample)s > %(filename_sample_contig)s" % locals())
            E.run(
                "grep %(contig)s < %(filename_control)s > %(filename_control_contig)s" % locals())

            if not os.path.exists(filename_basecounts_contig):
                E.info("computing counts")

                R('''basealigncount( inputfile='%(filename_sample_contig)s',
                                  outputfile='%(filename_basecounts_contig)s',
                                  extension=%(fragment_size)i,
                                  filetype='bed',
                                  twoBitFile='%(bit_filename)s' )
                                  ''' % locals())
            else:
                E.info("using existing counts")

            # run zinba, do not build window data
            R('''zinba( refinepeaks=1,
            seq='%(filename_sample_contig)s',
            input='%(filename_control_contig)s',
            filetype='bed',
            align='%(mappability_dir)s',
            twoBit='%(bit_filename)s',
            outfile='%(filename_output_contig)s',
            extension=%(fragment_size)s,
            basecountfile='%(filename_basecounts_contig)s',
            numProc=%(threads)i,
            threshold=%(fdr_threshold)f,
            broad=FALSE,
            printFullOut=0,
            interaction=FALSE,
            mode='peaks',
            FDR=TRUE) ''' % locals())
    elif options.action == "full":

        # run zinba, build window data and refine peaks

        # Note that zinba() uses 'chr22' to select model
        # which is not present in mouse. So call run.zinba
        # directly.
        R('''run.zinba(
        refinepeaks=1,
        buildwin=1,
        seq='%(filename_sample)s',
        input='%(filename_control)s',
        filetype='bed',
        align='%(mappability_dir)s',
        twoBit='%(bit_filename)s',
        outfile='%(filename_output)s',
        extension=%(fragment_size)s,
        winSize=%(winSize)i,
        offset=%(offset)i,
        basecountfile='%(tmpdir)s/basecount',
        numProc=%(threads)i,
        threshold=%(fdr_threshold)f,
        pquant=1,
        winGap=%(winGap)i,
        selectchr='%(selectchr)s',
        interaction=FALSE,
        method="mixture",
        cnvWinSize=%(cnvWinSize)i,
        cnvOffset=%(cnvOffset)i,
        selectmodel=TRUE,
        selectcovs=c("input_count"),
        selecttype="complete",
        initmethod="count",
        printFullOut=1,
        diff=0,
        pWinSize=200,
        peakconfidence=%(peakconfidence)f,
        FDR=TRUE) ''' % locals())

    if not (options.tempdir or options.keep_temp):
        shutil.rmtree(tmpdir)

    # write footer and output benchmark information.
    E.Stop()

Example 18

Project: openmoltools
Source File: amber.py
View license
def build_mixture_prmtop(mol2_filenames, frcmod_filenames, box_filename, prmtop_filename, inpcrd_filename, water_model = 'TIP3P'):
    """Create a prmtop and inpcrd from a collection of mol2 and frcmod files
    as well as a single box PDB.  We have used this for setting up
    simulations of neat liquids or binary mixtures.

    Parameters
    ----------
    mol2_filenames : list(str)
        Filenames of GAFF flavored mol2 files.  Each must contain exactly
        ONE ligand.
    frcmod_filenames : str
        Filename of input GAFF frcmod filenames.
    box_filename : str
        Filename of PDB containing an arbitrary box of the mol2 molecules.
    prmtop_filename : str
        output prmtop filename.  Should have suffix .prmtop
    inpcrd_filename : str
        output inpcrd filename.  Should have suffix .inpcrd
    water_model : str, optional. Default: "TIP3P"
        String specifying water model to be used IF water is present as a component of the mixture. Valid options are currently "TIP3P", "SPC", or None. If None is specified, flexible GAFF-water will be used as for any other solute (old behavior).

    Returns
    -------
    tleap_commands : str
        The string of commands piped to tleap for building the prmtop
        and inpcrd files.  This will *already* have been run, but the
        output can be useful for debugging or archival purposes. However,
        this will reflect temporary file names for both input and output
        file as these are used to avoid tleap filename restrictions.

    Notes
    -----
    This can be easily broken if there are missing, duplicated, or
    inconsistent ligand residue names in your box, mol2, and frcmod files.
    You can use mdtraj to edit the residue names with something like
    this: trj.top.residue(0).name = "L1"
    """

    # Check for one residue name per mol2 file and uniqueness between all mol2 files
    all_names = set()
    for filename in mol2_filenames:
        t = md.load(filename)
        names = set([r.name for r in t.top.residues])

        if len(names) != 1:
            raise(ValueError("Must have a SINGLE residue name in each mol2 file."))

        all_names = all_names.union(list(names))

    if len(all_names) != len(mol2_filenames):
        raise(ValueError("Must have UNIQUE residue names in each mol2 file."))
    if len(mol2_filenames) != len(frcmod_filenames):
        raise(ValueError("Must provide an equal number of frcmod and mol2 file names."))

    #Get number of files
    nfiles = len(mol2_filenames)

    #Check validity of water model options
    valid_water = ['TIP3P', 'SPC', None]
    if not water_model in valid_water:
        raise(ValueError("Must provide a valid water model."))

    #If we are requesting a different water model, check if there is water present
    if not water_model==None:
        parmed = import_("parmed")
        solventIsWater = []
        waterPresent = False
        for i in range(nfiles):
            mol = parmed.load_file( mol2_filenames[i] )
            #Check if it is water by checking GAFF atom names
            types = [ atom.type for atom in mol.atoms ]
            if 'oh' in types and types.count('ho')==2 and len(types)==3:
                solventIsWater.append(True)
                waterPresent = True
            else:
                solventIsWater.append(False)

        #In this case, if we have any water, we will now work on fewer .mol2 and .frcmod files and instead use the force field files for those. So, reduce nfiles and remove the files we don't need from the .mol2 and .frcmod filename lists
        #After doing so, go on to interpret the specified water model and compose the water model string needed for tleap
        if waterPresent:
            new_mol2_filenames = []
            new_frcmod_filenames = []
            water_mol2_filenames = []
            for i in range( nfiles ):
                if not solventIsWater[i]:
                    new_mol2_filenames.append( mol2_filenames[i] )
                    new_frcmod_filenames.append( frcmod_filenames[i] )
                else:
                    water_mol2_filenames.append( mol2_filenames[i] )
            nfiles = len(new_mol2_filenames)
            mol2_filenames = new_mol2_filenames
            frcmod_filenames = new_frcmod_filenames

            #Now interpret the specified water model and translate into AMBER nomenclature
            if water_model=='TIP3P':
                water_model = 'TP3'
            elif water_model =='SPC':
                water_model = 'SPC'
            else:
                raise(ValueError("Cannot translate specified water model into one of the available models."))


            #Compose string for loading specified water molecule
            water_string = '\n'
            water_names = [md.load(filename).top.residue(0).name for filename in water_mol2_filenames]
            for name in water_names:
                water_string += '%s = %s\n' % (name, water_model )
                #Also if not TIP3P, update to source correct frcmod file
                if water_model == 'SPC':
                    water_string += 'loadamberparams frcmod.spce\n'
                elif water_model =='TP3':
                    continue
                else:
                    raise(ValueError("Cannot identify water frcmod file to be loaded."))

            #Rename water atoms in box file to match what is expected by AMBER
            packmol = import_("openmoltools.packmol")
            packmol.rename_water_atoms(box_filename)
    else:
        waterPresent = False

    #Make temporary, hardcoded filenames for mol2 and frcmod input to avoid tleap filename restrictions
    tmp_mol2_filenames = [ 'in%d.mol2' % n for n in range(nfiles) ]
    tmp_frcmod_filenames = [ 'in%d.frcmod' % n for n in range(nfiles) ]

    #Make temporary, hardcoded filenames for output files to avoid tleap filename restrictions
    tmp_prmtop_filename = 'out.prmtop'
    tmp_inpcrd_filename = 'out.inpcrd'
    tmp_box_filename = 'tbox.pdb'

    #Build absolute paths of input files so we can use context and temporary directory
    infiles = mol2_filenames + frcmod_filenames + [box_filename]
    infiles = [ os.path.abspath(filenm) for filenm in infiles ]

    #Build absolute paths of output files so we can copy them back
    prmtop_filename = os.path.abspath( prmtop_filename )
    inpcrd_filename = os.path.abspath( inpcrd_filename )

    #Use temporary directory and do the setup
    with mdtraj.utils.enter_temp_directory():

        #Copy input files to temporary file names in target directory
        for (infile, outfile) in zip( infiles, tmp_mol2_filenames+tmp_frcmod_filenames+[tmp_box_filename] ):
            shutil.copy( infile, outfile)
            logger.debug('Copying input file %s to %s...\n' % (infile, outfile))


        all_names = [md.load(filename).top.residue(0).name for filename in tmp_mol2_filenames]

        mol2_section = "\n".join("%s = loadmol2 %s" % (all_names[k], filename) for k, filename in enumerate(tmp_mol2_filenames))
        #If non-GAFF water is present, load desired parameters for that water as well.
        if waterPresent:
            mol2_section += water_string
        amberparams_section = "\n".join("loadamberparams %s" % (filename) for k, filename in enumerate(tmp_frcmod_filenames))

        tleap_commands = TLEAP_TEMPLATE % dict(mol2_section=mol2_section, amberparams_section=amberparams_section, box_filename=tmp_box_filename, prmtop_filename=tmp_prmtop_filename, inpcrd_filename=tmp_inpcrd_filename)
        print(tleap_commands)

        file_handle = open('tleap_commands', 'w')
        file_handle.writelines(tleap_commands)
        file_handle.close()

        logger.debug('Running tleap in temporary directory.')
        cmd = "tleap -f %s " % file_handle.name
        logger.debug(cmd)

        output = getoutput(cmd)
        logger.debug(output)
        check_for_errors( output, other_errors = ['Improper number of arguments'], ignore_errors = ['unperturbed charge of the unit', 'ignoring the error'] )

        #Copy stuff back to right filenames
        for (tfile, finalfile) in zip( [tmp_prmtop_filename, tmp_inpcrd_filename], [prmtop_filename, inpcrd_filename] ):
            shutil.copy( tfile, finalfile)

    return tleap_commands

Example 19

Project: openmoltools
Source File: amber.py
View license
def build_mixture_prmtop(mol2_filenames, frcmod_filenames, box_filename, prmtop_filename, inpcrd_filename, water_model = 'TIP3P'):
    """Create a prmtop and inpcrd from a collection of mol2 and frcmod files
    as well as a single box PDB.  We have used this for setting up
    simulations of neat liquids or binary mixtures.

    Parameters
    ----------
    mol2_filenames : list(str)
        Filenames of GAFF flavored mol2 files.  Each must contain exactly
        ONE ligand.
    frcmod_filenames : str
        Filename of input GAFF frcmod filenames.
    box_filename : str
        Filename of PDB containing an arbitrary box of the mol2 molecules.
    prmtop_filename : str
        output prmtop filename.  Should have suffix .prmtop
    inpcrd_filename : str
        output inpcrd filename.  Should have suffix .inpcrd
    water_model : str, optional. Default: "TIP3P"
        String specifying water model to be used IF water is present as a component of the mixture. Valid options are currently "TIP3P", "SPC", or None. If None is specified, flexible GAFF-water will be used as for any other solute (old behavior).

    Returns
    -------
    tleap_commands : str
        The string of commands piped to tleap for building the prmtop
        and inpcrd files.  This will *already* have been run, but the
        output can be useful for debugging or archival purposes. However,
        this will reflect temporary file names for both input and output
        file as these are used to avoid tleap filename restrictions.

    Notes
    -----
    This can be easily broken if there are missing, duplicated, or
    inconsistent ligand residue names in your box, mol2, and frcmod files.
    You can use mdtraj to edit the residue names with something like
    this: trj.top.residue(0).name = "L1"
    """

    # Check for one residue name per mol2 file and uniqueness between all mol2 files
    all_names = set()
    for filename in mol2_filenames:
        t = md.load(filename)
        names = set([r.name for r in t.top.residues])

        if len(names) != 1:
            raise(ValueError("Must have a SINGLE residue name in each mol2 file."))

        all_names = all_names.union(list(names))

    if len(all_names) != len(mol2_filenames):
        raise(ValueError("Must have UNIQUE residue names in each mol2 file."))
    if len(mol2_filenames) != len(frcmod_filenames):
        raise(ValueError("Must provide an equal number of frcmod and mol2 file names."))

    #Get number of files
    nfiles = len(mol2_filenames)

    #Check validity of water model options
    valid_water = ['TIP3P', 'SPC', None]
    if not water_model in valid_water:
        raise(ValueError("Must provide a valid water model."))

    #If we are requesting a different water model, check if there is water present
    if not water_model==None:
        parmed = import_("parmed")
        solventIsWater = []
        waterPresent = False
        for i in range(nfiles):
            mol = parmed.load_file( mol2_filenames[i] )
            #Check if it is water by checking GAFF atom names
            types = [ atom.type for atom in mol.atoms ]
            if 'oh' in types and types.count('ho')==2 and len(types)==3:
                solventIsWater.append(True)
                waterPresent = True
            else:
                solventIsWater.append(False)

        #In this case, if we have any water, we will now work on fewer .mol2 and .frcmod files and instead use the force field files for those. So, reduce nfiles and remove the files we don't need from the .mol2 and .frcmod filename lists
        #After doing so, go on to interpret the specified water model and compose the water model string needed for tleap
        if waterPresent:
            new_mol2_filenames = []
            new_frcmod_filenames = []
            water_mol2_filenames = []
            for i in range( nfiles ):
                if not solventIsWater[i]:
                    new_mol2_filenames.append( mol2_filenames[i] )
                    new_frcmod_filenames.append( frcmod_filenames[i] )
                else:
                    water_mol2_filenames.append( mol2_filenames[i] )
            nfiles = len(new_mol2_filenames)
            mol2_filenames = new_mol2_filenames
            frcmod_filenames = new_frcmod_filenames

            #Now interpret the specified water model and translate into AMBER nomenclature
            if water_model=='TIP3P':
                water_model = 'TP3'
            elif water_model =='SPC':
                water_model = 'SPC'
            else:
                raise(ValueError("Cannot translate specified water model into one of the available models."))


            #Compose string for loading specified water molecule
            water_string = '\n'
            water_names = [md.load(filename).top.residue(0).name for filename in water_mol2_filenames]
            for name in water_names:
                water_string += '%s = %s\n' % (name, water_model )
                #Also if not TIP3P, update to source correct frcmod file
                if water_model == 'SPC':
                    water_string += 'loadamberparams frcmod.spce\n'
                elif water_model =='TP3':
                    continue
                else:
                    raise(ValueError("Cannot identify water frcmod file to be loaded."))

            #Rename water atoms in box file to match what is expected by AMBER
            packmol = import_("openmoltools.packmol")
            packmol.rename_water_atoms(box_filename)
    else:
        waterPresent = False

    #Make temporary, hardcoded filenames for mol2 and frcmod input to avoid tleap filename restrictions
    tmp_mol2_filenames = [ 'in%d.mol2' % n for n in range(nfiles) ]
    tmp_frcmod_filenames = [ 'in%d.frcmod' % n for n in range(nfiles) ]

    #Make temporary, hardcoded filenames for output files to avoid tleap filename restrictions
    tmp_prmtop_filename = 'out.prmtop'
    tmp_inpcrd_filename = 'out.inpcrd'
    tmp_box_filename = 'tbox.pdb'

    #Build absolute paths of input files so we can use context and temporary directory
    infiles = mol2_filenames + frcmod_filenames + [box_filename]
    infiles = [ os.path.abspath(filenm) for filenm in infiles ]

    #Build absolute paths of output files so we can copy them back
    prmtop_filename = os.path.abspath( prmtop_filename )
    inpcrd_filename = os.path.abspath( inpcrd_filename )

    #Use temporary directory and do the setup
    with mdtraj.utils.enter_temp_directory():

        #Copy input files to temporary file names in target directory
        for (infile, outfile) in zip( infiles, tmp_mol2_filenames+tmp_frcmod_filenames+[tmp_box_filename] ):
            shutil.copy( infile, outfile)
            logger.debug('Copying input file %s to %s...\n' % (infile, outfile))


        all_names = [md.load(filename).top.residue(0).name for filename in tmp_mol2_filenames]

        mol2_section = "\n".join("%s = loadmol2 %s" % (all_names[k], filename) for k, filename in enumerate(tmp_mol2_filenames))
        #If non-GAFF water is present, load desired parameters for that water as well.
        if waterPresent:
            mol2_section += water_string
        amberparams_section = "\n".join("loadamberparams %s" % (filename) for k, filename in enumerate(tmp_frcmod_filenames))

        tleap_commands = TLEAP_TEMPLATE % dict(mol2_section=mol2_section, amberparams_section=amberparams_section, box_filename=tmp_box_filename, prmtop_filename=tmp_prmtop_filename, inpcrd_filename=tmp_inpcrd_filename)
        print(tleap_commands)

        file_handle = open('tleap_commands', 'w')
        file_handle.writelines(tleap_commands)
        file_handle.close()

        logger.debug('Running tleap in temporary directory.')
        cmd = "tleap -f %s " % file_handle.name
        logger.debug(cmd)

        output = getoutput(cmd)
        logger.debug(output)
        check_for_errors( output, other_errors = ['Improper number of arguments'], ignore_errors = ['unperturbed charge of the unit', 'ignoring the error'] )

        #Copy stuff back to right filenames
        for (tfile, finalfile) in zip( [tmp_prmtop_filename, tmp_inpcrd_filename], [prmtop_filename, inpcrd_filename] ):
            shutil.copy( tfile, finalfile)

    return tleap_commands

Example 20

Project: Sick-Beard
Source File: SickBeard.py
View license
def main():
    """
    TV for me
    """

    # do some preliminary stuff
    sickbeard.MY_FULLNAME = os.path.normpath(os.path.abspath(__file__))
    sickbeard.MY_NAME = os.path.basename(sickbeard.MY_FULLNAME)
    sickbeard.PROG_DIR = os.path.dirname(sickbeard.MY_FULLNAME)
    sickbeard.DATA_DIR = sickbeard.PROG_DIR
    sickbeard.MY_ARGS = sys.argv[1:]
    sickbeard.CREATEPID = False
    sickbeard.DAEMON = False

    sickbeard.SYS_ENCODING = None

    try:
        locale.setlocale(locale.LC_ALL, "")
        sickbeard.SYS_ENCODING = locale.getpreferredencoding()
    except (locale.Error, IOError):
        pass

    # For OSes that are poorly configured I'll just randomly force UTF-8
    if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        sickbeard.SYS_ENCODING = 'UTF-8'

    if not hasattr(sys, "setdefaultencoding"):
        reload(sys)

    try:
        # pylint: disable=E1101
        # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
        sys.setdefaultencoding(sickbeard.SYS_ENCODING)
    except:
        print 'Sorry, you MUST add the Sick Beard folder to the PYTHONPATH environment variable'
        print 'or find another way to force Python to use ' + sickbeard.SYS_ENCODING + ' for string encoding.'
        sys.exit(1)

    # Need console logging for SickBeard.py and SickBeard-console.exe
    consoleLogging = (not hasattr(sys, "frozen")) or (sickbeard.MY_NAME.lower().find('-console') > 0)

    # Rename the main thread
    threading.currentThread().name = "MAIN"

    try:
        opts, args = getopt.getopt(sys.argv[1:], "qfdp::", ['quiet', 'forceupdate', 'daemon', 'port=', 'pidfile=', 'nolaunch', 'config=', 'datadir='])  # @UnusedVariable
    except getopt.GetoptError:
        print "Available Options: --quiet, --forceupdate, --port, --daemon, --pidfile, --config, --datadir"
        sys.exit()

    forceUpdate = False
    forcedPort = None
    noLaunch = False

    for o, a in opts:
        # For now we'll just silence the logging
        if o in ('-q', '--quiet'):
            consoleLogging = False

        # Should we update (from tvdb) all shows in the DB right away?
        if o in ('-f', '--forceupdate'):
            forceUpdate = True

        # Suppress launching web browser
        # Needed for OSes without default browser assigned
        # Prevent duplicate browser window when restarting in the app
        if o in ('--nolaunch',):
            noLaunch = True

        # Override default/configured port
        if o in ('-p', '--port'):
            forcedPort = int(a)

        # Run as a daemon
        if o in ('-d', '--daemon'):
            if sys.platform == 'win32':
                print "Daemonize not supported under Windows, starting normally"
            else:
                consoleLogging = False
                sickbeard.DAEMON = True

        # Specify folder to load the config file from
        if o in ('--config',):
            sickbeard.CONFIG_FILE = os.path.abspath(a)

        # Specify folder to use as the data dir
        if o in ('--datadir',):
            sickbeard.DATA_DIR = os.path.abspath(a)

        # Write a pidfile if requested
        if o in ('--pidfile',):
            sickbeard.PIDFILE = str(a)

            # If the pidfile already exists, sickbeard may still be running, so exit
            if os.path.exists(sickbeard.PIDFILE):
                sys.exit("PID file '" + sickbeard.PIDFILE + "' already exists. Exiting.")

            # The pidfile is only useful in daemon mode, make sure we can write the file properly
            if sickbeard.DAEMON:
                sickbeard.CREATEPID = True
                try:
                    file(sickbeard.PIDFILE, 'w').write("pid\n")
                except IOError, e:
                    raise SystemExit("Unable to write PID file: %s [%d]" % (e.strerror, e.errno))
            else:
                logger.log(u"Not running in daemon mode. PID file creation disabled.")

    # If they don't specify a config file then put it in the data dir
    if not sickbeard.CONFIG_FILE:
        sickbeard.CONFIG_FILE = os.path.join(sickbeard.DATA_DIR, "config.ini")

    # Make sure that we can create the data dir
    if not os.access(sickbeard.DATA_DIR, os.F_OK):
        try:
            os.makedirs(sickbeard.DATA_DIR, 0744)
        except os.error, e:
            raise SystemExit("Unable to create datadir '" + sickbeard.DATA_DIR + "'")

    # Make sure we can write to the data dir
    if not os.access(sickbeard.DATA_DIR, os.W_OK):
        raise SystemExit("Datadir must be writeable '" + sickbeard.DATA_DIR + "'")

    # Make sure we can write to the config file
    if not os.access(sickbeard.CONFIG_FILE, os.W_OK):
        if os.path.isfile(sickbeard.CONFIG_FILE):
            raise SystemExit("Config file '" + sickbeard.CONFIG_FILE + "' must be writeable.")
        elif not os.access(os.path.dirname(sickbeard.CONFIG_FILE), os.W_OK):
            raise SystemExit("Config file root dir '" + os.path.dirname(sickbeard.CONFIG_FILE) + "' must be writeable.")

    os.chdir(sickbeard.DATA_DIR)

    if consoleLogging:
        print "Starting up Sick Beard " + SICKBEARD_VERSION + " from " + sickbeard.CONFIG_FILE

    # Load the config and publish it to the sickbeard package
    if not os.path.isfile(sickbeard.CONFIG_FILE):
        logger.log(u"Unable to find '" + sickbeard.CONFIG_FILE + "' , all settings will be default!", logger.ERROR)

    sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE)

    CUR_DB_VERSION = db.DBConnection().checkDBVersion() 
    if CUR_DB_VERSION > 0:
        if CUR_DB_VERSION < MIN_DB_VERSION:
            raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") is too old to migrate from with this version of Sick Beard (" + str(MIN_DB_VERSION) + ").\n" + \
                             "Upgrade using a previous version of SB first, or start with no database file to begin fresh.")
        if CUR_DB_VERSION > MAX_DB_VERSION:
            raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") has been incremented past what this version of Sick Beard supports (" + str(MAX_DB_VERSION) + ").\n" + \
                             "If you have used other forks of SB, your database may be unusable due to their modifications.")    
            
    # Initialize the config and our threads
    sickbeard.initialize(consoleLogging=consoleLogging)

    sickbeard.showList = []

    if sickbeard.DAEMON:
        daemonize()

    # Use this PID for everything
    sickbeard.PID = os.getpid()

    if forcedPort:
        logger.log(u"Forcing web server to port " + str(forcedPort))
        startPort = forcedPort
    else:
        startPort = sickbeard.WEB_PORT

    if sickbeard.WEB_LOG:
        log_dir = sickbeard.LOG_DIR
    else:
        log_dir = None

    # sickbeard.WEB_HOST is available as a configuration value in various
    # places but is not configurable. It is supported here for historic reasons.
    if sickbeard.WEB_HOST and sickbeard.WEB_HOST != '0.0.0.0':
        webhost = sickbeard.WEB_HOST
    else:
        if sickbeard.WEB_IPV6:
            webhost = '::'
        else:
            webhost = '0.0.0.0'

    try:
        initWebServer({
                      'port': startPort,
                      'host': webhost,
                      'data_root': os.path.join(sickbeard.PROG_DIR, 'gui/'+sickbeard.GUI_NAME),
                      'web_root': sickbeard.WEB_ROOT,
                      'log_dir': log_dir,
                      'username': sickbeard.WEB_USERNAME,
                      'password': sickbeard.WEB_PASSWORD,
                      'enable_https': sickbeard.ENABLE_HTTPS,
                      'https_cert': sickbeard.HTTPS_CERT,
                      'https_key': sickbeard.HTTPS_KEY,
                      })
    except IOError:
        logger.log(u"Unable to start web server, is something else running on port %d?" % startPort, logger.ERROR)
        if sickbeard.LAUNCH_BROWSER and not sickbeard.DAEMON:
            logger.log(u"Launching browser and exiting", logger.ERROR)
            sickbeard.launchBrowser(startPort)
        sys.exit()

    # Build from the DB to start with
    logger.log(u"Loading initial show list")
    loadShowsFromDB()

    # Fire up all our threads
    sickbeard.start()

    # Launch browser if we're supposed to
    if sickbeard.LAUNCH_BROWSER and not noLaunch and not sickbeard.DAEMON:
        sickbeard.launchBrowser(startPort)

    # Start an update if we're supposed to
    if forceUpdate or sickbeard.UPDATE_SHOWS_ON_START:
        sickbeard.showUpdateScheduler.action.run(force=True) # @UndefinedVariable

    # Stay alive while my threads do the work
    while (True):

        if sickbeard.invoked_command:
            sickbeard.invoked_command()
            sickbeard.invoked_command = None

        time.sleep(1)

    return

Example 21

Project: nginx-python-buildpack
Source File: install.py
View license
    def run(self, options, args):

        if (
            options.no_install or
            options.no_download or
            (options.build_dir != build_prefix) or
            options.no_clean
        ):
            logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, '
                              'and --no-clean are deprecated.  See https://github.com/pypa/pip/issues/906.')

        if options.download_dir:
            options.no_install = True
            options.ignore_installed = True
        options.build_dir = os.path.abspath(options.build_dir)
        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if virtualenv_no_global():
                raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.")
            install_options.append('--user')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
                raise CommandError("Target path exists but is not a directory, will not continue.")
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
            index_urls = []

        if options.use_mirrors:
            logger.deprecated("1.7",
                        "--use-mirrors has been deprecated and will be removed"
                        " in the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")

        if options.mirrors:
            logger.deprecated("1.7",
                        "--mirrors has been deprecated and will be removed in "
                        " the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")
            index_urls += options.mirrors

        session = self._build_session(options)

        finder = self._build_package_finder(options, index_urls, session)

        requirement_set = RequirementSet(
            build_dir=options.build_dir,
            src_dir=options.src_dir,
            download_dir=options.download_dir,
            download_cache=options.download_cache,
            upgrade=options.upgrade,
            as_egg=options.as_egg,
            ignore_installed=options.ignore_installed,
            ignore_dependencies=options.ignore_dependencies,
            force_reinstall=options.force_reinstall,
            use_user_site=options.use_user_site,
            target_dir=temp_target_dir,
            session=session,
            pycompile=options.compile,
        )
        for name in args:
            requirement_set.add_requirement(
                InstallRequirement.from_line(name, None))
        for name in options.editables:
            requirement_set.add_requirement(
                InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
        for filename in options.requirements:
            for req in parse_requirements(filename, finder=finder, options=options, session=session):
                requirement_set.add_requirement(req)
        if not requirement_set.has_requirements:
            opts = {'name': self.name}
            if options.find_links:
                msg = ('You must give at least one requirement to %(name)s '
                       '(maybe you meant "pip %(name)s %(links)s"?)' %
                       dict(opts, links=' '.join(options.find_links)))
            else:
                msg = ('You must give at least one requirement '
                       'to %(name)s (see "pip help %(name)s")' % opts)
            logger.warn(msg)
            return

        try:
            if not options.no_download:
                requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
            else:
                requirement_set.locate_files()

            if not options.no_install and not self.bundle:
                requirement_set.install(install_options, global_options, root=options.root_path)
                installed = ' '.join([req.name for req in
                                      requirement_set.successfully_installed])
                if installed:
                    logger.notify('Successfully installed %s' % installed)
            elif not self.bundle:
                downloaded = ' '.join([req.name for req in
                                       requirement_set.successfully_downloaded])
                if downloaded:
                    logger.notify('Successfully downloaded %s' % downloaded)
            elif self.bundle:
                requirement_set.create_bundle(self.bundle_filename)
                logger.notify('Created bundle in %s' % self.bundle_filename)
        except PreviousBuildDirError:
            options.no_clean = True
            raise
        finally:
            # Clean up
            if (not options.no_clean) and ((not options.no_install) or options.download_dir):
                requirement_set.cleanup_files(bundle=self.bundle)

        if options.target_dir:
            if not os.path.exists(options.target_dir):
                os.makedirs(options.target_dir)
            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
            for item in os.listdir(lib_dir):
                shutil.move(
                    os.path.join(lib_dir, item),
                    os.path.join(options.target_dir, item)
                    )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 22

Project: Chips-2.0
Source File: tokens.py
View license
    def scan(self,
             filename,
             input_file=None,
             parameters={},
             external_preprocessor=True):
        """Convert the test file into tokens"""

        self.filename = filename

        if external_preprocessor:

            directory = os.path.abspath(__file__)
            directory = os.path.dirname(directory)
            directory = os.path.join(directory, "include")

            cpp_commands = [
                "cpp",
                "-nostdinc",
                "-isystem",
                directory,
                filename]
            pipe = subprocess.Popen(cpp_commands, stdout=subprocess.PIPE)
            input_file = pipe.stdout
        else:
            if input_file is None:
                try:
                    input_file = open(self.filename)
                except IOError:
                    raise C2CHIPError("Cannot open file: " + self.filename)

        token = []
        tokens = []
        self.lineno = 1
        jump = False
        for line in input_file:

            # include files
            line = line + " "
            if jump:
                if line.strip().startswith("#endif"):
                    jump = False
                if line.strip().startswith("#else"):
                    jump = False
                self.lineno += 1
                continue

            elif external_preprocessor and line.strip().startswith("#"):
                l = line.strip()
                l = l.lstrip("#")
                l = l.split('"')
                lineno = int(l[0].strip())
                self.lineno = lineno
                filename = l[1].strip().strip('"')
                self.filename = filename
                continue

            elif line.strip().startswith("#include"):
                filename = self.filename
                lineno = self.lineno
                self.tokens.extend(tokens)
                if line.strip().endswith(">"):
                    directory = os.path.abspath(__file__)
                    directory = os.path.dirname(directory)
                    directory = os.path.join(directory, "include")
                else:
                    directory = os.path.abspath(self.filename)
                    directory = os.path.dirname(directory)
                self.filename = line.strip().replace(
                    "#include", "").strip(' ><"')
                self.filename = os.path.join(directory, self.filename)
                self.scan(self.filename)
                self.lineno = lineno
                self.filename = filename
                tokens = []
                self.lineno += 1
                continue

            elif line.strip().startswith("#define"):
                definition = line.strip().split(" ")[1]
                self.definitions.append(definition)
                self.lineno += 1
                continue

            elif line.strip().startswith("#undef"):
                definition = line.strip().split(" ")[1]
                self.definitions.remove(definition)
                self.lineno += 1
                continue

            elif line.strip().startswith("#ifdef"):
                definition = line.strip().split(" ")[1]
                if definition not in self.definitions:
                    jump = True
                self.lineno += 1
                continue

            elif line.strip().startswith("#ifndef"):
                definition = line.strip().split(" ")[1]
                if definition in self.definitions:
                    jump = True
                self.lineno += 1
                continue

            elif line.strip().startswith("#else"):
                jump = True
                self.lineno += 1
                continue

            elif line.strip().startswith("#endif"):
                self.lineno += 1
                continue

            newline = True
            for char in line:

                if not token:
                    token = char

                # c style comment
                elif (token + char).startswith("/*"):
                    if (token + char).endswith("*/"):
                        token = ""
                    else:
                        token += char

                # c++ style comment
                elif token.startswith("//"):
                    if newline:
                        token = char
                    else:
                        token += char

                # identifier
                elif token[0].isalpha():
                    if char.isalnum() or char == "_":
                        token += char
                    else:
                        tokens.append((self.filename, self.lineno, token))
                        token = char

                # number
                elif token[0].isdigit():
                    if char.upper() in "0123456789ABCDEFXUL.":
                        token += char
                    elif token.upper().endswith("E") and char in ["+", "-"]:
                        token += char
                    else:
                        tokens.append((self.filename, self.lineno, token))
                        token = char

                # string literal
                elif token.startswith('"'):
                    if char == '"' and previous_char != "\\":
                        token += char
                        tokens.append((self.filename, self.lineno, token))
                        token = ""
                    else:
                        # remove dummy space from the end of a line
                        if newline:
                            token = token[:-1]
                        previous_char = char
                        token += char

                # character literal
                elif token.startswith("'"):
                    if char == "'":
                        token += char
                        tokens.append((self.filename, self.lineno, token))
                        token = ""
                    else:
                        token += char

                # operator
                elif token in operators:
                    if token + char in operators:
                        token += char
                    else:
                        tokens.append((self.filename, self.lineno, token))
                        token = char

                else:
                    token = char

                newline = False
            self.lineno += 1

        self.tokens.extend(tokens)

Example 23

Project: onionbalance
Source File: settings.py
View license
def generate_config():
    """
    Entry point for interactive config file generation.
    """

    # Parse initial command line options
    args = parse_cmd_args().parse_args()

    # Simplify the logging output for the command line tool
    logger = log.get_config_generator_logger()

    logger.info("Beginning OnionBalance config generation.")

    # If CLI options have been provided, don't enter interactive mode
    # Crude check to see if any options beside --verbosity are set.
    verbose = True if '-v' in sys.argv else False

    if ((len(sys.argv) > 1 and not verbose) or len(sys.argv) > 3 or
            args.no_interactive):
        interactive = False
        logger.info("Entering non-interactive mode.")
    else:
        interactive = True
        logger.info("No command line arguments found, entering interactive "
                    "mode.")

    logger.setLevel(logging.__dict__[args.verbosity.upper()])

    # Check if output directory exists, if not try create it
    output_path = None
    if interactive:
        output_path = input("Enter path to store generated config "
                            "[{}]: ".format(os.path.abspath(args.output)))
    output_path = output_path or args.output
    try:
        util.try_make_dir(output_path)
    except OSError:
        logger.exception("Problem encountered when trying to create the "
                         "output directory %s.", os.path.abspath(output_path))
    else:
        logger.debug("Created the output directory '%s'.",
                     os.path.abspath(output_path))

    # The output directory should be empty to avoid having conflict keys
    # or config files.
    if not util.is_directory_empty(output_path):
        logger.error("The specified output directory is not empty. Please "
                     "delete any files and folders or specify another output "
                     "directory.")
        sys.exit(1)

    # Load master key if specified
    key_path = None
    if interactive:
        # Read key path from user
        key_path = input("Enter path to master service private key "
                         "(Leave empty to generate a key): ")
    key_path = args.key or key_path
    if key_path:
        if not os.path.isfile(key_path):
            logger.error("The specified master service private key '%s' "
                         "could not be found. Please confirm the path and "
                         "file permissions are correct.", key_path)
            sys.exit(1)
        else:
            # Try load the specified private key file
            master_key = util.key_decrypt_prompt(key_path)
            if not master_key:
                logger.error("The specified master private key %s could not "
                             "be loaded.", os.path.abspath(master_key))
                sys.exit(1)
            else:
                master_onion_address = util.calc_onion_address(master_key)
                logger.info("Successfully loaded a master key for service "
                            "%s.onion.", master_onion_address)

    else:
        # No key specified, begin generating a new one.
        master_key = Crypto.PublicKey.RSA.generate(1024)
        master_onion_address = util.calc_onion_address(master_key)
        logger.debug("Created a new master key for service %s.onion.",
                     master_onion_address)

    # Finished loading/generating master key, now try generate keys for
    # each service instance
    num_instances = None
    if interactive:
        num_instances = input("Number of instance services to create "
                              "[{}]: ".format(args.num_instances))
        # Cast to int if a number was specified
        try:
            num_instances = int(num_instances)
        except ValueError:
            num_instances = None
    num_instances = num_instances or args.num_instances
    logger.debug("Creating %d service instances.", num_instances)

    tag = None
    if interactive:
        tag = input("Provide a tag name to group these instances "
                    "[{}]: ".format(args.tag))
    tag = tag or args.tag

    # Create HiddenServicePort line for instance torrc file
    service_virtual_port = None
    if interactive:
        service_virtual_port = input("Specify the service virtual port (for "
                                     "client connections) [{}]: ".format(
                                         args.service_virtual_port))
    service_virtual_port = service_virtual_port or args.service_virtual_port

    service_target = None
    if interactive:
        # In interactive mode, change default target to match the specified
        # virtual port
        default_service_target = u'127.0.0.1:{}'.format(service_virtual_port)
        service_target = input("Specify the service target IP and port (where "
                               "your service is listening) [{}]: ".format(
                                   default_service_target))
        service_target = service_target or default_service_target
    service_target = service_target or args.service_target
    torrc_port_line = u'HiddenServicePort {} {}'.format(service_virtual_port,
                                                        service_target)

    instances = []
    for i in range(0, num_instances):
        instance_key = Crypto.PublicKey.RSA.generate(1024)
        instance_address = util.calc_onion_address(instance_key)
        logger.debug("Created a key for instance %s.onion.",
                     instance_address)
        instances.append((instance_address, instance_key))

    # Write master service key to directory
    master_passphrase = None
    if interactive:
        master_passphrase = getpass.getpass(
            "Provide an optional password to encrypt the master private "
            "key (Not encrypted if no password is specified): ")
    master_passphrase = master_passphrase or args.password

    # Finished reading input, starting to write config files.
    master_dir = os.path.join(output_path, 'master')
    util.try_make_dir(master_dir)
    master_key_file = os.path.join(master_dir,
                                   '{}.key'.format(master_onion_address))
    with open(master_key_file, "wb") as key_file:
        os.chmod(master_key_file, 384)  # chmod 0600 in decimal
        key_file.write(master_key.exportKey(passphrase=master_passphrase))
        logger.debug("Successfully wrote master key to file %s.",
                     os.path.abspath(master_key_file))

    # Create YAML OnionBalance settings file for these instances
    service_data = {'key': '{}.key'.format(master_onion_address)}
    service_data['instances'] = [{'address': address,
                                  'name': '{}{}'.format(tag, i+1)} for
                                 i, (address, _) in enumerate(instances)]
    settings_data = {'services': [service_data]}
    config_yaml = yaml.dump(settings_data, default_flow_style=False)

    config_file_path = os.path.join(master_dir, 'config.yaml')
    with open(config_file_path, "w") as config_file:
        config_file.write(u"# OnionBalance Config File\n")
        config_file.write(config_yaml)
        logger.info("Wrote master service config file '%s'.",
                    os.path.abspath(config_file_path))

    # Write master service torrc
    master_torrc_path = os.path.join(master_dir, 'torrc-server')
    master_torrc_template = pkg_resources.resource_string(__name__,
                                                          'data/torrc-server')
    with open(master_torrc_path, "w") as master_torrc_file:
        master_torrc_file.write(master_torrc_template.decode('utf-8'))

    # Try generate config files for each service instance
    for i, (instance_address, instance_key) in enumerate(instances):
        # Create a numbered directory for instance
        instance_dir = os.path.join(output_path, '{}{}'.format(tag, i+1))
        instance_key_dir = os.path.join(instance_dir, instance_address)
        util.try_make_dir(instance_key_dir)
        os.chmod(instance_key_dir, 1472)  # chmod 2700 in decimal

        instance_key_file = os.path.join(instance_key_dir, 'private_key')
        with open(instance_key_file, "wb") as key_file:
            os.chmod(instance_key_file, 384)  # chmod 0600 in decimal
            key_file.write(instance_key.exportKey())
            logger.debug("Successfully wrote key for instance %s.onion to "
                         "file.", instance_address)

        # Write torrc file for each instance
        instance_torrc = os.path.join(instance_dir, 'instance_torrc')
        instance_torrc_template = pkg_resources.resource_string(
            __name__, 'data/torrc-instance')
        with open(instance_torrc, "w") as torrc_file:
            torrc_file.write(instance_torrc_template.decode('utf-8'))
            # The ./ relative path prevents Tor from raising relative
            # path warnings. The relative path may need to be edited manual
            # to work on Windows systems.
            torrc_file.write(u"HiddenServiceDir {}\n".format(
                instance_address))
            torrc_file.write(u"{}\n".format(torrc_port_line))

    # Output final status message
    logger.info("Done! Successfully generated an OnionBalance config and %d "
                "instance keys for service %s.onion.",
                num_instances, master_onion_address)

    sys.exit(0)

Example 24

Project: HealthStarter
Source File: install.py
View license
    def run(self, options, args):
        cmdoptions.resolve_wheel_no_use_binary(options)
        cmdoptions.check_install_build_global(options)

        if options.download_dir:
            options.ignore_installed = True

        if options.build_dir:
            options.build_dir = os.path.abspath(options.build_dir)

        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if virtualenv_no_global():
                raise InstallationError(
                    "Can not perform a '--user' install. User site-packages "
                    "are not visible in this virtualenv."
                )
            install_options.append('--user')
            install_options.append('--prefix=')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if (os.path.exists(options.target_dir) and not
                    os.path.isdir(options.target_dir)):
                raise CommandError(
                    "Target path exists but is not a directory, will not "
                    "continue."
                )
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.info('Ignoring indexes: %s', ','.join(index_urls))
            index_urls = []

        if options.download_cache:
            warnings.warn(
                "--download-cache has been deprecated and will be removed in "
                "the future. Pip now automatically uses and configures its "
                "cache.",
                RemovedInPip8Warning,
            )

        with self._build_session(options) as session:

            finder = self._build_package_finder(options, index_urls, session)
            build_delete = (not (options.no_clean or options.build_dir))
            wheel_cache = WheelCache(options.cache_dir, options.format_control)
            with BuildDirectory(options.build_dir,
                                delete=build_delete) as build_dir:
                requirement_set = RequirementSet(
                    build_dir=build_dir,
                    src_dir=options.src_dir,
                    download_dir=options.download_dir,
                    upgrade=options.upgrade,
                    as_egg=options.as_egg,
                    ignore_installed=options.ignore_installed,
                    ignore_dependencies=options.ignore_dependencies,
                    force_reinstall=options.force_reinstall,
                    use_user_site=options.use_user_site,
                    target_dir=temp_target_dir,
                    session=session,
                    pycompile=options.compile,
                    isolated=options.isolated_mode,
                    wheel_cache=wheel_cache,
                )

                self.populate_requirement_set(
                    requirement_set, args, options, finder, session, self.name,
                    wheel_cache
                )

                if not requirement_set.has_requirements:
                    return

                try:
                    if (options.download_dir or not wheel or not
                            options.cache_dir):
                        # on -d don't do complex things like building
                        # wheels, and don't try to build wheels when wheel is
                        # not installed.
                        requirement_set.prepare_files(finder)
                    else:
                        # build wheels before install.
                        wb = WheelBuilder(
                            requirement_set,
                            finder,
                            build_options=[],
                            global_options=[],
                        )
                        # Ignore the result: a failed wheel will be
                        # installed from the sdist/vcs whatever.
                        wb.build(autobuilding=True)

                    if not options.download_dir:
                        requirement_set.install(
                            install_options,
                            global_options,
                            root=options.root_path,
                        )
                        reqs = sorted(
                            requirement_set.successfully_installed,
                            key=operator.attrgetter('name'))
                        items = []
                        for req in reqs:
                            item = req.name
                            try:
                                if hasattr(req, 'installed_version'):
                                    if req.installed_version:
                                        item += '-' + req.installed_version
                            except Exception:
                                pass
                            items.append(item)
                        installed = ' '.join(items)
                        if installed:
                            logger.info('Successfully installed %s', installed)
                    else:
                        downloaded = ' '.join([
                            req.name
                            for req in requirement_set.successfully_downloaded
                        ])
                        if downloaded:
                            logger.info(
                                'Successfully downloaded %s', downloaded
                            )
                except PreviousBuildDirError:
                    options.no_clean = True
                    raise
                finally:
                    # Clean up
                    if not options.no_clean:
                        requirement_set.cleanup_files()

        if options.target_dir:
            ensure_dir(options.target_dir)

            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']

            for item in os.listdir(lib_dir):
                target_item_dir = os.path.join(options.target_dir, item)
                if os.path.exists(target_item_dir):
                    if not options.upgrade:
                        logger.warning(
                            'Target directory %s already exists. Specify '
                            '--upgrade to force replacement.',
                            target_item_dir
                        )
                        continue
                    if os.path.islink(target_item_dir):
                        logger.warning(
                            'Target directory %s already exists and is '
                            'a link. Pip will not automatically replace '
                            'links, please remove if replacement is '
                            'desired.',
                            target_item_dir
                        )
                        continue
                    if os.path.isdir(target_item_dir):
                        shutil.rmtree(target_item_dir)
                    else:
                        os.remove(target_item_dir)

                shutil.move(
                    os.path.join(lib_dir, item),
                    target_item_dir
                )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 25

Project: pip-update-requirements
Source File: install.py
View license
    def run(self, options, args):
        cmdoptions.resolve_wheel_no_use_binary(options)
        cmdoptions.check_install_build_global(options)

        if options.allow_external:
            warnings.warn(
                "--allow-external has been deprecated and will be removed in "
                "the future. Due to changes in the repository protocol, it no "
                "longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_all_external:
            warnings.warn(
                "--allow-all-external has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_unverified:
            warnings.warn(
                "--allow-unverified has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.download_dir:
            warnings.warn(
                "pip install --download has been deprecated and will be "
                "removed in the future. Pip now has a download command that "
                "should be used instead.",
                RemovedInPip10Warning,
            )
            options.ignore_installed = True

        if options.build_dir:
            options.build_dir = os.path.abspath(options.build_dir)

        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if options.prefix_path:
                raise CommandError(
                    "Can not combine '--user' and '--prefix' as they imply "
                    "different installation locations"
                )
            if virtualenv_no_global():
                raise InstallationError(
                    "Can not perform a '--user' install. User site-packages "
                    "are not visible in this virtualenv."
                )
            install_options.append('--user')
            install_options.append('--prefix=')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if (os.path.exists(options.target_dir) and not
                    os.path.isdir(options.target_dir)):
                raise CommandError(
                    "Target path exists but is not a directory, will not "
                    "continue."
                )
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []

        with self._build_session(options) as session:

            finder = self._build_package_finder(options, session)
            build_delete = (not (options.no_clean or options.build_dir))
            wheel_cache = WheelCache(options.cache_dir, options.format_control)
            if options.cache_dir and not check_path_owner(options.cache_dir):
                logger.warning(
                    "The directory '%s' or its parent directory is not owned "
                    "by the current user and caching wheels has been "
                    "disabled. check the permissions and owner of that "
                    "directory. If executing pip with sudo, you may want "
                    "sudo's -H flag.",
                    options.cache_dir,
                )
                options.cache_dir = None

            with BuildDirectory(options.build_dir,
                                delete=build_delete) as build_dir:
                requirement_set = RequirementSet(
                    build_dir=build_dir,
                    src_dir=options.src_dir,
                    download_dir=options.download_dir,
                    upgrade=options.upgrade,
                    as_egg=options.as_egg,
                    ignore_installed=options.ignore_installed,
                    ignore_dependencies=options.ignore_dependencies,
                    force_reinstall=options.force_reinstall,
                    use_user_site=options.use_user_site,
                    target_dir=temp_target_dir,
                    session=session,
                    pycompile=options.compile,
                    isolated=options.isolated_mode,
                    wheel_cache=wheel_cache,
                    require_hashes=options.require_hashes,
                )

                self.populate_requirement_set(
                    requirement_set, args, options, finder, session, self.name,
                    wheel_cache
                )

                if not requirement_set.has_requirements:
                    return

                try:
                    if (options.download_dir or not wheel or not
                            options.cache_dir):
                        # on -d don't do complex things like building
                        # wheels, and don't try to build wheels when wheel is
                        # not installed.
                        requirement_set.prepare_files(finder)
                    else:
                        # build wheels before install.
                        wb = WheelBuilder(
                            requirement_set,
                            finder,
                            build_options=[],
                            global_options=[],
                        )
                        # Ignore the result: a failed wheel will be
                        # installed from the sdist/vcs whatever.
                        wb.build(autobuilding=True)

                    if not options.download_dir:
                        requirement_set.install(
                            install_options,
                            global_options,
                            root=options.root_path,
                            prefix=options.prefix_path,
                        )
                        reqs = sorted(
                            requirement_set.successfully_installed,
                            key=operator.attrgetter('name'))
                        items = []
                        for req in reqs:
                            item = req.name
                            try:
                                if hasattr(req, 'installed_version'):
                                    if req.installed_version:
                                        item += '-' + req.installed_version
                            except Exception:
                                pass
                            items.append(item)
                        installed = ' '.join(items)
                        if installed:
                            logger.info('Successfully installed %s', installed)
                    else:
                        downloaded = ' '.join([
                            req.name
                            for req in requirement_set.successfully_downloaded
                        ])
                        if downloaded:
                            logger.info(
                                'Successfully downloaded %s', downloaded
                            )
                except PreviousBuildDirError:
                    options.no_clean = True
                    raise
                finally:
                    # Clean up
                    if not options.no_clean:
                        requirement_set.cleanup_files()

        if options.target_dir:
            ensure_dir(options.target_dir)

            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']

            for item in os.listdir(lib_dir):
                target_item_dir = os.path.join(options.target_dir, item)
                if os.path.exists(target_item_dir):
                    if not options.upgrade:
                        logger.warning(
                            'Target directory %s already exists. Specify '
                            '--upgrade to force replacement.',
                            target_item_dir
                        )
                        continue
                    if os.path.islink(target_item_dir):
                        logger.warning(
                            'Target directory %s already exists and is '
                            'a link. Pip will not automatically replace '
                            'links, please remove if replacement is '
                            'desired.',
                            target_item_dir
                        )
                        continue
                    if os.path.isdir(target_item_dir):
                        shutil.rmtree(target_item_dir)
                    else:
                        os.remove(target_item_dir)

                shutil.move(
                    os.path.join(lib_dir, item),
                    target_item_dir
                )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 26

Project: GrepBugs
Source File: grepbugs.py
View license
def repo_scan(repo, account, force, no_reports):
	"""
	Check code out from a remote repo and scan import
	"""
	try:
		db  = lite.connect(dbfile)
		cur = db.cursor()

	except lite.Error as e:
		print 'Error connecting to db file'
		logging.debug('Error connecting to db file' + str(e))
		sys.exit(1)

	params = [repo]
	cur.execute("SELECT command, checkout_url, api_url FROM repo_sites WHERE site=? LIMIT 1;", params)
	rows = cur.fetchall()

	for row in rows:
		api_url = row[2].replace('ACCOUNT', account)

		if 'github' == repo:
			page = 1
			
			# call api_url
			# if request fails, try 3 times
			count     = 0
			max_tries = 3
			logging.info('Calling github api for ' + api_url)
			while count < max_tries:
				try:
					r    = requests.get(api_url + '?page=' + str(page) + '&per_page=100')
					
					if 200 != r.status_code:
						raise ValueError('Request failed!', r.status_code)

					data = r.json()

					# no exceptions so break out of while loop
					break
				
				except ValueError as e:
					count = count + 1
					logging.debug(str(e.args))
					time.sleep(5)

				except requests.ConnectionError as e:
					count = count + 1
					if count <= max_tries:
						logging.warning('Error retreiving grep rules: ConnectionError (attempt ' + str(count) + ' of ' + str(max_tries) + '): ' + str(e))
						time.sleep(3) # take a break, throttle a bit
				
				except requests.HTTPError as e:
					count = count + 1
					if count <= max_tries:
						logging.warning('Error retreiving grep rules: HTTPError (attempt ' + str(count) + ' of ' + str(max_tries) + '): ' + str(e))
						time.sleep(3) # take a break, throttle a bit
			
				except requests.Timeout as e:
					count = count + 1
					if count <= max_tries:
						logging.warning('Error retreiving grep rules: Timeout (attempt ' + str(count) + ' of ' + str(max_tries) + '): ' + str(e))
						time.sleep(3) # take a break, throttle a bit

				except Exception as e:
					print 'CRITICAL: Unhandled exception occured! Quiters gonna quit! See log file for details.'
					logging.critical('Unhandled exception: ' + str(e))
					sys.exit(1)

			if count == max_tries:
				# grep rules were not retrieved, could be working with old rules.
				logging.critical('Error retreiving data from github api (no more tries left. could be using old grep rules.): ' + str(e))
				sys.exit(1)

			while len(data):
				print 'Get page: ' + str(page)
				for i in range(0, len(data)):
					do_scan        = True
					project_name   = data[i]["name"]
					default_branch = data[i]["default_branch"]
					last_scanned   = last_scan(repo, account, project_name)
					last_changed   = datetime.datetime.strptime(data[i]['pushed_at'], "%Y-%m-%dT%H:%M:%SZ")
					checkout_url   = 'https://github.com/' + account + '/' + project_name + '.git'
					cmd            = 'git'

					print project_name + ' last changed on ' + str(last_changed) + ' and last scanned on ' + str(last_scanned)

					if None != last_scanned:
						if last_changed < last_scanned:
							do_scan = False
							time.sleep(1) # throttle requests; github could be temperamental

					if True == force:
							do_scan = True

					if True == do_scan:
						checkout_code(cmd, checkout_url, account, project_name)
						# scan local files
						local_scan(os.path.dirname(os.path.abspath(__file__)) + '/remotesrc/' + account + '/' + project_name, repo, account, project_name, default_branch, no_reports)
						# clean up because of big projects and stuff
						call(['rm', '-rf', os.path.dirname(os.path.abspath(__file__)) + '/remotesrc/' + account + '/' + project_name])
						
				# get next page of projects
				page += 1
				r    = requests.get(api_url + '?page=' + str(page) + '&per_page=100')
				data = r.json()

		elif 'bitbucket' == repo:
			# call api_url
			r    = requests.get(api_url)
			data = r.json()
			
			for j in range(0, len(data["values"])):
				value =  data["values"][j]

				if 'git' == value['scm']:
					do_scan      = True
					project_name = str(value['full_name']).split('/')[1]
					last_scanned = last_scan(repo, account, project_name)
					date_split   = str(value['updated_on']).split('.')[0]
					last_changed = datetime.datetime.strptime(date_split, "%Y-%m-%dT%H:%M:%S")
					checkout_url = 'https://bitbucket.org/' + value['full_name']
					cmd          = 'git'

					print project_name + ' last changed on ' + str(last_changed) + ' and last scanned on ' + str(last_scanned)

					if None != last_scanned:
						if last_changed < last_scanned:
							do_scan = False

					if True == do_scan:
						checkout_code(cmd, checkout_url, account, project_name)
						# scan local files
						local_scan(os.path.dirname(os.path.abspath(__file__)) + '/remotesrc/' + account + '/' + project_name, repo, account, project_name, 'none', no_reports)

		elif 'sourceforge' == repo:
			message = 'Support for sourceforge removed because of http://seclists.org/nmap-dev/2015/q2/194. You should move your project to another hosting site, such as GitHub or BitBucket.'
			logging.debug(message)
			print message
			"""
			# call api_url
			r    = requests.get(api_url)
			data = r.json()
			
			for i in data['projects']:
				do_scan      = True
				project_name = i["url"].replace('/p/', '').replace('/', '')
				cmd          = None 
				r            = requests.get('https://sourceforge.net/rest' + i['url'])
				project_json = r.json()
				for j in project_json:
					for t in project_json['tools']:
						if 'code' == t['mount_point']:
							if 'git' == t['name']:
								cmd          = 'git'
								checkout_url = 'git://git.code.sf.net/p/' + str(project_name).lower() + '/code'
							elif 'svn' == t['name']:
								cmd          = 'svn'
								checkout_url = 'svn://svn.code.sf.net/p/' + str(project_name).lower() + '/code'

				last_scanned = last_scan(repo, account, project_name)
				date_split   = i['last_updated'].split('.')[0]
				last_changed = datetime.datetime.strptime(date_split, "%Y-%m-%d %H:%M:%S")

				print project_name + ' last changed on ' + str(last_changed) + ' and last scanned on ' + str(last_scanned)

				if None != last_scanned:
					if last_changed < last_scanned:
						do_scan = False

				if True == do_scan:
					if None != cmd:
						checkout_code(cmd, checkout_url, account, project_name)
						# scan local files
						local_scan(os.path.dirname(os.path.abspath(__file__)) + '/remotesrc/' + account + '/' + project_name, repo, account, project_name)
					else:
						print 'No sourceforge repo for ' + account + ' ' + project_name
			"""

		db.close()
		# clean up
		try:
			shutil.rmtree(os.path.abspath(__file__) + '/remotesrc/' + account)
		except Exception as e:
			logging.debug('Error removing directory: ' + str(e))
		
		print 'SCAN COMPLETE!'

Example 27

Project: datafari
Source File: install.py
View license
    def run(self, options, args):
        cmdoptions.resolve_wheel_no_use_binary(options)
        cmdoptions.check_install_build_global(options)

        if options.download_dir:
            options.ignore_installed = True

        if options.build_dir:
            options.build_dir = os.path.abspath(options.build_dir)

        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if virtualenv_no_global():
                raise InstallationError(
                    "Can not perform a '--user' install. User site-packages "
                    "are not visible in this virtualenv."
                )
            install_options.append('--user')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if (os.path.exists(options.target_dir) and not
                    os.path.isdir(options.target_dir)):
                raise CommandError(
                    "Target path exists but is not a directory, will not "
                    "continue."
                )
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.info('Ignoring indexes: %s', ','.join(index_urls))
            index_urls = []

        if options.download_cache:
            warnings.warn(
                "--download-cache has been deprecated and will be removed in "
                "the future. Pip now automatically uses and configures its "
                "cache.",
                RemovedInPip8Warning,
            )

        with self._build_session(options) as session:

            finder = self._build_package_finder(options, index_urls, session)
            build_delete = (not (options.no_clean or options.build_dir))
            wheel_cache = WheelCache(options.cache_dir, options.format_control)
            with BuildDirectory(options.build_dir,
                                delete=build_delete) as build_dir:
                requirement_set = RequirementSet(
                    build_dir=build_dir,
                    src_dir=options.src_dir,
                    download_dir=options.download_dir,
                    upgrade=options.upgrade,
                    as_egg=options.as_egg,
                    ignore_installed=options.ignore_installed,
                    ignore_dependencies=options.ignore_dependencies,
                    force_reinstall=options.force_reinstall,
                    use_user_site=options.use_user_site,
                    target_dir=temp_target_dir,
                    session=session,
                    pycompile=options.compile,
                    isolated=options.isolated_mode,
                    wheel_cache=wheel_cache,
                )

                self.populate_requirement_set(
                    requirement_set, args, options, finder, session, self.name,
                    wheel_cache
                )

                if not requirement_set.has_requirements:
                    return

                try:
                    if (options.download_dir or not wheel or not
                            options.cache_dir):
                        # on -d don't do complex things like building
                        # wheels, and don't try to build wheels when wheel is
                        # not installed.
                        requirement_set.prepare_files(finder)
                    else:
                        # build wheels before install.
                        wb = WheelBuilder(
                            requirement_set,
                            finder,
                            build_options=[],
                            global_options=[],
                        )
                        # Ignore the result: a failed wheel will be
                        # installed from the sdist/vcs whatever.
                        wb.build(autobuilding=True)

                    if not options.download_dir:
                        requirement_set.install(
                            install_options,
                            global_options,
                            root=options.root_path,
                        )
                        reqs = sorted(
                            requirement_set.successfully_installed,
                            key=operator.attrgetter('name'))
                        items = []
                        for req in reqs:
                            item = req.name
                            try:
                                if hasattr(req, 'installed_version'):
                                    if req.installed_version:
                                        item += '-' + req.installed_version
                            except Exception:
                                pass
                            items.append(item)
                        installed = ' '.join(items)
                        if installed:
                            logger.info('Successfully installed %s', installed)
                    else:
                        downloaded = ' '.join([
                            req.name
                            for req in requirement_set.successfully_downloaded
                        ])
                        if downloaded:
                            logger.info(
                                'Successfully downloaded %s', downloaded
                            )
                except PreviousBuildDirError:
                    options.no_clean = True
                    raise
                finally:
                    # Clean up
                    if not options.no_clean:
                        requirement_set.cleanup_files()

        if options.target_dir:
            ensure_dir(options.target_dir)

            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']

            for item in os.listdir(lib_dir):
                target_item_dir = os.path.join(options.target_dir, item)
                if os.path.exists(target_item_dir):
                    if not options.upgrade:
                        logger.warning(
                            'Target directory %s already exists. Specify '
                            '--upgrade to force replacement.',
                            target_item_dir
                        )
                        continue
                    if os.path.islink(target_item_dir):
                        logger.warning(
                            'Target directory %s already exists and is '
                            'a link. Pip will not automatically replace '
                            'links, please remove if replacement is '
                            'desired.',
                            target_item_dir
                        )
                        continue
                    if os.path.isdir(target_item_dir):
                        shutil.rmtree(target_item_dir)
                    else:
                        os.remove(target_item_dir)

                shutil.move(
                    os.path.join(lib_dir, item),
                    target_item_dir
                )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 28

Project: Flask
Source File: install.py
View license
    def run(self, options, args):

        if (
            options.no_install or
            options.no_download or
            (options.build_dir != build_prefix) or
            options.no_clean
        ):
            logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, '
                              'and --no-clean are deprecated.  See https://github.com/pypa/pip/issues/906.')

        if options.download_dir:
            options.no_install = True
            options.ignore_installed = True
        options.build_dir = os.path.abspath(options.build_dir)
        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if virtualenv_no_global():
                raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.")
            install_options.append('--user')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
                raise CommandError("Target path exists but is not a directory, will not continue.")
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
            index_urls = []

        if options.use_mirrors:
            logger.deprecated("1.7",
                        "--use-mirrors has been deprecated and will be removed"
                        " in the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")

        if options.mirrors:
            logger.deprecated("1.7",
                        "--mirrors has been deprecated and will be removed in "
                        " the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")
            index_urls += options.mirrors

        session = self._build_session(options)

        finder = self._build_package_finder(options, index_urls, session)

        requirement_set = RequirementSet(
            build_dir=options.build_dir,
            src_dir=options.src_dir,
            download_dir=options.download_dir,
            download_cache=options.download_cache,
            upgrade=options.upgrade,
            as_egg=options.as_egg,
            ignore_installed=options.ignore_installed,
            ignore_dependencies=options.ignore_dependencies,
            force_reinstall=options.force_reinstall,
            use_user_site=options.use_user_site,
            target_dir=temp_target_dir,
            session=session,
            pycompile=options.compile,
        )
        for name in args:
            requirement_set.add_requirement(
                InstallRequirement.from_line(name, None))
        for name in options.editables:
            requirement_set.add_requirement(
                InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
        for filename in options.requirements:
            for req in parse_requirements(filename, finder=finder, options=options, session=session):
                requirement_set.add_requirement(req)
        if not requirement_set.has_requirements:
            opts = {'name': self.name}
            if options.find_links:
                msg = ('You must give at least one requirement to %(name)s '
                       '(maybe you meant "pip %(name)s %(links)s"?)' %
                       dict(opts, links=' '.join(options.find_links)))
            else:
                msg = ('You must give at least one requirement '
                       'to %(name)s (see "pip help %(name)s")' % opts)
            logger.warn(msg)
            return

        try:
            if not options.no_download:
                requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
            else:
                requirement_set.locate_files()

            if not options.no_install and not self.bundle:
                requirement_set.install(install_options, global_options, root=options.root_path)
                installed = ' '.join([req.name for req in
                                      requirement_set.successfully_installed])
                if installed:
                    logger.notify('Successfully installed %s' % installed)
            elif not self.bundle:
                downloaded = ' '.join([req.name for req in
                                       requirement_set.successfully_downloaded])
                if downloaded:
                    logger.notify('Successfully downloaded %s' % downloaded)
            elif self.bundle:
                requirement_set.create_bundle(self.bundle_filename)
                logger.notify('Created bundle in %s' % self.bundle_filename)
        except PreviousBuildDirError:
            options.no_clean = True
            raise
        finally:
            # Clean up
            if (not options.no_clean) and ((not options.no_install) or options.download_dir):
                requirement_set.cleanup_files(bundle=self.bundle)

        if options.target_dir:
            if not os.path.exists(options.target_dir):
                os.makedirs(options.target_dir)
            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
            for item in os.listdir(lib_dir):
                shutil.move(
                    os.path.join(lib_dir, item),
                    os.path.join(options.target_dir, item)
                    )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 29

Project: matlab2cpp
Source File: __init__.py
View license
def main(args):
    """
Initiate the interpretation and conversion process.

Args:
    args (ArgumentParser): arguments parsed through m2cpp
    """

    builder = tree.builder.Builder(disp=args.disp, comments=args.comments,
                                   original=args.original, enable_omp=args.enable_omp, enable_tbb=args.enable_tbb)

    paths_from_file = []
    #read setpath.m file and return string list of paths
    if args.paths_file:
        import setpaths
        paths_from_file = setpaths.multiple_folder_paths(args.paths_file)

    pathOne = os.path.dirname(os.path.abspath(args.filename))

    if os.path.isfile(args.filename):
        paths = [os.path.abspath(os.path.dirname(args.filename))] + paths_from_file

        if args.disp:
            print "building tree..."

        filenames = [os.path.abspath(args.filename)]

        stack = []
        while filenames:

            filename = filenames.pop(0)
            assert os.path.isfile(filename)

            if filename in stack:
                continue

            if args.disp:
                print "loading", filename

            stack.append(filename)

            f = open(filename, "rU")
            code = f.read()
            f.close()

            #code = re.sub('%#', '##', code)

            #Here you have to change filename to current folder for .py files
            #local_name = pathOne + sep + os.path.basename(filename)
            local_name = os.getcwd() + sep + os.path.basename(filename)
            
            if os.path.isfile(local_name + ".py") and not args.reset:

                try:
                    cfg = imp.load_source("cfg", local_name + ".py")

                except:
                    raise ImportError("""Supplement file:
    %s.py
    is formated incorrectly. Change the format or convert with '-r' option to create
    a new file.""" % local_name)

                if "verbatims" in cfg.__dict__ and cfg.verbatims:
                    verbatims = cfg.verbatims
                    code = supplement.verbatim.set(verbatims, code)

                builder.load(filename, code)
                program = builder[-1]

                if "functions" in cfg.__dict__:

                    funcs = program.ftypes

                    for name in funcs.keys():
                        if name in cfg.functions:
                            for key in cfg.functions[name].keys():
                                funcs[name][key] = cfg.functions[name][key]

                    program.ftypes = funcs

                if "structs" in cfg.__dict__:

                    structs = program.stypes

                    for name in structs.keys():
                        if name in cfg.structs:
                            for key in cfg.structs[name].keys():
                                structs[name][key] = cfg.structs[name][key]

                    program.stypes = structs

                if "includes" in cfg.__dict__:

                    includes = program.itypes

                    for key in cfg.includes:
                        if key not in includes:
                            includes.append(key)

                    program.itypes = includes

            else:
                builder.load(filename, code)
                program = builder[-1]

            # add unknown variables to stack if they exists as files
            unknowns = builder.get_unknowns(filename)

            for i in xrange(len(unknowns)-1, -1, -1):
                #print i
                for path in paths:
                    #print path
                    if os.path.isfile(path + sep + unknowns[i] + ".m"):
                        unknowns[i] = unknowns[i] + ".m"
                    if os.path.isfile(path + sep + unknowns[i]):
                        program.include(path + sep + unknowns[i])
                        #filenames.append(path + sep + unknowns.pop(i))
                        filenames.append(path + sep + unknowns[i])


    else:
        builder.load("unnamed", args.filename)
        program = builder[-1]

    #--- work in progress ---
    #Run this mlabwrap code
    #Have this in a try-except block
    #import mwrapmat
    #wrapmat = mwrapmat.Wrapmat()
    #wrapmat.eval_code(builder)
    #------------------------

    #--- work in progress ---
    #Get data types from matlab
    if args.matlab_suggest:
        import matlab_types
        builder = matlab_types.mtypes(builder, args)
    #------------------------

    if args.disp:
        print "configure tree"

    builder.configure(suggest=(2*args.suggest or args.matlab_suggest))

    #--- work in progress ---
    #Modify the Abstract Syntax Tree (AST)
    import modify
    builder.project = modify.transform_AST(builder.project, args.nargin)
    #------------------------
    
    if args.disp:
        print builder.project.summary()
        print "generate translation"

    builder.project.translate(args)

    t = time.time()
    stamp = date.fromtimestamp(t).strftime('%Y-%m-%d %H:%M:%S')

    for program in builder.project:

        #name = program.name
        #if os.path.isfile(args.filename):
        #    name = pathOne + sep + os.path.basename(name)
            #print name
        name = os.getcwd() + sep + os.path.basename(program.name)
        #print name

        cpp = qfunctions.qcpp(program)
        hpp = qfunctions.qhpp(program)
        py = qfunctions.qpy(program, prefix=True)
        log = qfunctions.qlog(program)

        if args.disp:
            print "Writing files..."

        if args.reset:
            for ext in [".cpp", ".hpp", ".log", ".py"]:
                if os.path.isfile(name+ext):
                    os.remove(name+ext)

        if cpp:
            cpp = """// Automatically translated using Matlab2cpp %g on %s

%s""" % (__version__, stamp, cpp)
            f = open(name+".cpp", "w")
            f.write(cpp)
            f.close()

        if hpp:
            hpp = """// Automatically translated using Matlab2cpp %g on %s
            
%s""" % (__version__, stamp, hpp)
            f = open(name+".hpp", "w")
            f.write(hpp)
            f.close()

        if log:
            log = "Automatically translated using Matlab2cpp %g on %s\n\n%s"\
                    % (__version__, stamp, log)
            f = open(name+".log", "w")
            f.write(log)
            f.close()

        if py:
            py = """# Automatically translated using Matlab2cpp %g on %s
#
%s""" % (__version__, stamp, py)
            f = open(name+".py", "w")
            f.write(py)
            f.close()

        if os.path.isfile(name+".pyc"):
            os.remove(name+".pyc")


    program = builder[0]

    if args.tree_full:
        print program.summary(args)

    elif args.tree:
        if program[1][0].cls == "Main":
            print program[1][0][3].summary(args)
        else:
            print program[1].summary(args)

    elif args.line:
        nodes = program[1].flatten(False, False, False)
        for node_ in nodes:
            if node_.line == args.line and node_.cls != "Block":
                print node_.str.replace("__percent__", "%")
                break
    else:
        print program[1].str.replace("__percent__", "%")

Example 30

Project: matlab2cpp
Source File: __init__.py
View license
def main(args):
    """
Initiate the interpretation and conversion process.

Args:
    args (ArgumentParser): arguments parsed through m2cpp
    """

    builder = tree.builder.Builder(disp=args.disp, comments=args.comments,
                                   original=args.original, enable_omp=args.enable_omp, enable_tbb=args.enable_tbb)

    paths_from_file = []
    #read setpath.m file and return string list of paths
    if args.paths_file:
        import setpaths
        paths_from_file = setpaths.multiple_folder_paths(args.paths_file)

    pathOne = os.path.dirname(os.path.abspath(args.filename))

    if os.path.isfile(args.filename):
        paths = [os.path.abspath(os.path.dirname(args.filename))] + paths_from_file

        if args.disp:
            print "building tree..."

        filenames = [os.path.abspath(args.filename)]

        stack = []
        while filenames:

            filename = filenames.pop(0)
            assert os.path.isfile(filename)

            if filename in stack:
                continue

            if args.disp:
                print "loading", filename

            stack.append(filename)

            f = open(filename, "rU")
            code = f.read()
            f.close()

            #code = re.sub('%#', '##', code)

            #Here you have to change filename to current folder for .py files
            #local_name = pathOne + sep + os.path.basename(filename)
            local_name = os.getcwd() + sep + os.path.basename(filename)
            
            if os.path.isfile(local_name + ".py") and not args.reset:

                try:
                    cfg = imp.load_source("cfg", local_name + ".py")

                except:
                    raise ImportError("""Supplement file:
    %s.py
    is formated incorrectly. Change the format or convert with '-r' option to create
    a new file.""" % local_name)

                if "verbatims" in cfg.__dict__ and cfg.verbatims:
                    verbatims = cfg.verbatims
                    code = supplement.verbatim.set(verbatims, code)

                builder.load(filename, code)
                program = builder[-1]

                if "functions" in cfg.__dict__:

                    funcs = program.ftypes

                    for name in funcs.keys():
                        if name in cfg.functions:
                            for key in cfg.functions[name].keys():
                                funcs[name][key] = cfg.functions[name][key]

                    program.ftypes = funcs

                if "structs" in cfg.__dict__:

                    structs = program.stypes

                    for name in structs.keys():
                        if name in cfg.structs:
                            for key in cfg.structs[name].keys():
                                structs[name][key] = cfg.structs[name][key]

                    program.stypes = structs

                if "includes" in cfg.__dict__:

                    includes = program.itypes

                    for key in cfg.includes:
                        if key not in includes:
                            includes.append(key)

                    program.itypes = includes

            else:
                builder.load(filename, code)
                program = builder[-1]

            # add unknown variables to stack if they exists as files
            unknowns = builder.get_unknowns(filename)

            for i in xrange(len(unknowns)-1, -1, -1):
                #print i
                for path in paths:
                    #print path
                    if os.path.isfile(path + sep + unknowns[i] + ".m"):
                        unknowns[i] = unknowns[i] + ".m"
                    if os.path.isfile(path + sep + unknowns[i]):
                        program.include(path + sep + unknowns[i])
                        #filenames.append(path + sep + unknowns.pop(i))
                        filenames.append(path + sep + unknowns[i])


    else:
        builder.load("unnamed", args.filename)
        program = builder[-1]

    #--- work in progress ---
    #Run this mlabwrap code
    #Have this in a try-except block
    #import mwrapmat
    #wrapmat = mwrapmat.Wrapmat()
    #wrapmat.eval_code(builder)
    #------------------------

    #--- work in progress ---
    #Get data types from matlab
    if args.matlab_suggest:
        import matlab_types
        builder = matlab_types.mtypes(builder, args)
    #------------------------

    if args.disp:
        print "configure tree"

    builder.configure(suggest=(2*args.suggest or args.matlab_suggest))

    #--- work in progress ---
    #Modify the Abstract Syntax Tree (AST)
    import modify
    builder.project = modify.transform_AST(builder.project, args.nargin)
    #------------------------
    
    if args.disp:
        print builder.project.summary()
        print "generate translation"

    builder.project.translate(args)

    t = time.time()
    stamp = date.fromtimestamp(t).strftime('%Y-%m-%d %H:%M:%S')

    for program in builder.project:

        #name = program.name
        #if os.path.isfile(args.filename):
        #    name = pathOne + sep + os.path.basename(name)
            #print name
        name = os.getcwd() + sep + os.path.basename(program.name)
        #print name

        cpp = qfunctions.qcpp(program)
        hpp = qfunctions.qhpp(program)
        py = qfunctions.qpy(program, prefix=True)
        log = qfunctions.qlog(program)

        if args.disp:
            print "Writing files..."

        if args.reset:
            for ext in [".cpp", ".hpp", ".log", ".py"]:
                if os.path.isfile(name+ext):
                    os.remove(name+ext)

        if cpp:
            cpp = """// Automatically translated using Matlab2cpp %g on %s

%s""" % (__version__, stamp, cpp)
            f = open(name+".cpp", "w")
            f.write(cpp)
            f.close()

        if hpp:
            hpp = """// Automatically translated using Matlab2cpp %g on %s
            
%s""" % (__version__, stamp, hpp)
            f = open(name+".hpp", "w")
            f.write(hpp)
            f.close()

        if log:
            log = "Automatically translated using Matlab2cpp %g on %s\n\n%s"\
                    % (__version__, stamp, log)
            f = open(name+".log", "w")
            f.write(log)
            f.close()

        if py:
            py = """# Automatically translated using Matlab2cpp %g on %s
#
%s""" % (__version__, stamp, py)
            f = open(name+".py", "w")
            f.write(py)
            f.close()

        if os.path.isfile(name+".pyc"):
            os.remove(name+".pyc")


    program = builder[0]

    if args.tree_full:
        print program.summary(args)

    elif args.tree:
        if program[1][0].cls == "Main":
            print program[1][0][3].summary(args)
        else:
            print program[1].summary(args)

    elif args.line:
        nodes = program[1].flatten(False, False, False)
        for node_ in nodes:
            if node_.line == args.line and node_.cls != "Block":
                print node_.str.replace("__percent__", "%")
                break
    else:
        print program[1].str.replace("__percent__", "%")

Example 31

Project: kbengine
Source File: install.py
View license
    def run(self, options, args):

        if (
            options.no_install or
            options.no_download or
            (options.build_dir != build_prefix) or
            options.no_clean
        ):
            logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, '
                              'and --no-clean are deprecated.  See https://github.com/pypa/pip/issues/906.')

        if options.download_dir:
            options.no_install = True
            options.ignore_installed = True
        options.build_dir = os.path.abspath(options.build_dir)
        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if virtualenv_no_global():
                raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.")
            install_options.append('--user')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
                raise CommandError("Target path exists but is not a directory, will not continue.")
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
            index_urls = []

        if options.use_mirrors:
            logger.deprecated("1.7",
                        "--use-mirrors has been deprecated and will be removed"
                        " in the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")

        if options.mirrors:
            logger.deprecated("1.7",
                        "--mirrors has been deprecated and will be removed in "
                        " the future. Explicit uses of --index-url and/or "
                        "--extra-index-url is suggested.")
            index_urls += options.mirrors

        session = self._build_session(options)

        finder = self._build_package_finder(options, index_urls, session)

        requirement_set = RequirementSet(
            build_dir=options.build_dir,
            src_dir=options.src_dir,
            download_dir=options.download_dir,
            download_cache=options.download_cache,
            upgrade=options.upgrade,
            as_egg=options.as_egg,
            ignore_installed=options.ignore_installed,
            ignore_dependencies=options.ignore_dependencies,
            force_reinstall=options.force_reinstall,
            use_user_site=options.use_user_site,
            target_dir=temp_target_dir,
            session=session,
            pycompile=options.compile,
        )
        for name in args:
            requirement_set.add_requirement(
                InstallRequirement.from_line(name, None))
        for name in options.editables:
            requirement_set.add_requirement(
                InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
        for filename in options.requirements:
            for req in parse_requirements(filename, finder=finder, options=options, session=session):
                requirement_set.add_requirement(req)
        if not requirement_set.has_requirements:
            opts = {'name': self.name}
            if options.find_links:
                msg = ('You must give at least one requirement to %(name)s '
                       '(maybe you meant "pip %(name)s %(links)s"?)' %
                       dict(opts, links=' '.join(options.find_links)))
            else:
                msg = ('You must give at least one requirement '
                       'to %(name)s (see "pip help %(name)s")' % opts)
            logger.warn(msg)
            return

        try:
            if not options.no_download:
                requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
            else:
                requirement_set.locate_files()

            if not options.no_install and not self.bundle:
                requirement_set.install(install_options, global_options, root=options.root_path)
                installed = ' '.join([req.name for req in
                                      requirement_set.successfully_installed])
                if installed:
                    logger.notify('Successfully installed %s' % installed)
            elif not self.bundle:
                downloaded = ' '.join([req.name for req in
                                       requirement_set.successfully_downloaded])
                if downloaded:
                    logger.notify('Successfully downloaded %s' % downloaded)
            elif self.bundle:
                requirement_set.create_bundle(self.bundle_filename)
                logger.notify('Created bundle in %s' % self.bundle_filename)
        except PreviousBuildDirError:
            options.no_clean = True
            raise
        finally:
            # Clean up
            if (not options.no_clean) and ((not options.no_install) or options.download_dir):
                requirement_set.cleanup_files(bundle=self.bundle)

        if options.target_dir:
            if not os.path.exists(options.target_dir):
                os.makedirs(options.target_dir)
            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
            for item in os.listdir(lib_dir):
                shutil.move(
                    os.path.join(lib_dir, item),
                    os.path.join(options.target_dir, item)
                    )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 32

Project: lago
Source File: cmd.py
View license
@lago.plugins.cli.cli_plugin(
    help='Initialize a directory for framework deployment'
)
@lago.plugins.cli.cli_plugin_add_argument(
    'virt_config',
    help=(
        'Configuration of resources to deploy, json and yaml file formats '
        'are supported, takes option precedence over workdir. Will use '
        '$PWD/LagoInitFile by default. You can use any env vars in that file, '
        'inculuding the extra ones LAGO_PREFIX_PATH LAGO_WORKDIR_PATH and '
        'LAGO_INITFILE_PATH'
    ),
    metavar='VIRT_CONFIG',
    type=os.path.abspath,
    nargs='?',
    default=None,
)
@lago.plugins.cli.cli_plugin_add_argument(
    'workdir',
    help=(
        'Workdir directory of the deployment, if none passed, it will use '
        '$PWD/.lago'
    ),
    metavar='WORKDIR',
    type=os.path.abspath,
    nargs='?',
    default=None,
)
@lago.plugins.cli.cli_plugin_add_argument(
    '--template-repo-path',
    help='Repo file describing the templates',
    default='http://templates.ovirt.org/repo/repo.metadata',
)
@lago.plugins.cli.cli_plugin_add_argument(
    '--template-repo-name',
    help='Name of the repo from the template repos dir',
)
@lago.plugins.cli.cli_plugin_add_argument(
    '--template-store',
    help='Location to store templates at',
    default='/var/lib/lago/store',
    type=os.path.abspath,
)
@lago.plugins.cli.cli_plugin_add_argument(
    '--template-repos',
    help='Location to store repos',
    default='/var/lib/lago/repos',
    type=os.path.abspath,
)
@lago.plugins.cli.cli_plugin_add_argument(
    '--set-current',
    action='store_true',
    help='If passed, it will set the newly created prefix as the current one',
)
@lago.plugins.cli.cli_plugin_add_argument(
    '--skip-bootstrap',
    action='store_true',
    help=(
        'If passed, will skip bootstrapping the images, useful if you are '
        'using templates and you already know they will have the correct '
        'root pass for example'
    ),
)
@log_utils.log_task('Initialize and populate prefix', LOGGER)
def do_init(
    workdir,
    virt_config,
    prefix_name='default',
    template_repo_path=None,
    template_repo_name=None,
    template_store=None,
    template_repos=None,
    set_current=False,
    skip_bootstrap=False,
    **kwargs
):

    if virt_config is None and workdir is not None:
        virt_config = workdir
        workdir = None

    if workdir is None:
        workdir = os.path.abspath('.lago')

    if virt_config is None:
        virt_config = os.path.abspath('LagoInitFile')

    os.environ['LAGO_INITFILE_PATH'] = os.path.dirname(
        os.path.abspath(virt_config)
    )

    if prefix_name == 'current':
        prefix_name = 'default'

    LOGGER.debug('Using workdir %s', workdir)
    workdir = lago_workdir.Workdir(workdir)
    if not os.path.exists(workdir.path):
        LOGGER.debug(
            'Initializing workdir %s with prefix %s',
            workdir.path,
            prefix_name,
        )
        prefix = workdir.initialize(prefix_name)
    else:
        LOGGER.debug(
            'Adding prefix %s to workdir %s',
            prefix_name,
            workdir.path,
        )
        prefix = workdir.add_prefix(prefix_name)

    log_utils.setup_prefix_logging(prefix.paths.logs())

    try:
        if template_repo_path:
            repo = lago.templates.TemplateRepository.from_url(
                template_repo_path
            )
        else:
            if template_repo_name:
                repo = lago.templates.find_repo_by_name(
                    name=template_repo_name
                )

            else:
                raise RuntimeError(
                    'No template repo was configured or specified'
                )

        store = lago.templates.TemplateStore(template_store)

        with open(virt_config, 'r') as virt_fd:
            prefix.virt_conf_from_stream(
                virt_fd,
                repo,
                store,
                do_bootstrap=not skip_bootstrap,
            )

        if set_current:
            workdir.set_current(new_current=prefix_name)

    except:
        shutil.rmtree(prefix.paths.prefixed(''), ignore_errors=True)
        raise

Example 33

Project: grab-site
Source File: main.py
View license
@click.command()

@click.option('--concurrency', default=2, metavar='NUM',
	help='Use this many connections to fetch in parallel (default: 2).')

@click.option('--concurrent', default=-1, metavar='NUM',
	help='Alias for --concurrency.')

@click.option('--delay', default="0", metavar='DELAY',
	help=
		'Time to wait between requests, in milliseconds (default: 0).  '
		'Can be "NUM", or "MIN-MAX" to use a random delay between MIN and MAX '
		'for each request.  Delay applies to each concurrent fetcher, not globally.')

@click.option('--recursive/--1', default=True,
	help=
		'--recursive (default: true) to crawl under last /path/ component '
		'recursively, or --1 to get just START_URL.')

@click.option('--offsite-links/--no-offsite-links', default=True,
	help=
		'--offsite-links (default: true) to grab all links to a depth of 1 '
		'on other domains, or --no-offsite-links to disable.')

@click.option('--igsets', default="", metavar='LIST',
	help='Comma-separated list of ignore sets to use in addition to "global".')

@click.option('--ignore-sets', default="", metavar='LIST',
	help='Alias for --igsets.')

@click.option('--igon/--igoff', default=False,
	help=
		'--igon (default: false) to print all URLs being ignored to the terminal '
		'and dashboard.')

@click.option('--video/--no-video', default=True,
	help=
		'--no-video (default: false) to skip the download of videos by both '
		'mime type and file extension.  Skipped videos are logged to '
		'DIR/skipped_videos')

@click.option('-i', '--input-file', default=None, type=str,
	help=
		'Load list of URLs-to-grab from a local file or from a URL; like wget -i. '
		'File must be a newline-delimited list of URLs. '
		'Combine with --1 to avoid a recursive crawl on each URL.')

@click.option('--max-content-length', default=-1, metavar='N',
	help=
		"Skip the download of any response that claims a Content-Length "
		"larger than N (default: -1, don't skip anything).")

@click.option('--level', default="inf", metavar='NUM',
	help='Recurse this many levels (default: inf).')

@click.option('--page-requisites-level', default="5", metavar='NUM',
	help='Recursive this many levels for page requisites (default: 5).')

@click.option('--warc-max-size', default=5368709120, metavar='BYTES',
	help=
		'Try to limit each WARC file to around BYTES bytes before rolling over '
		'to a new WARC file (default: 5368709120, which is 5GiB).')

@click.option('--ua', default="Mozilla/5.0 (Windows NT 6.3; WOW64; rv:49.0) Gecko/20100101 Firefox/49.0",
	metavar='STRING', help='Send User-Agent: STRING instead of pretending to be Firefox on Windows.')

@click.option('--wpull-args', default="",
	metavar='ARGS', help=
		r'String containing additional arguments to pass to wpull; '
		r'see ~/.local/bin/wpull --help.  ARGS is split with shlex.split '
		r'and individual arguments can contain spaces if quoted, e.g. '
		r'--wpull-args="--youtube-dl \"--youtube-dl-exe=/My Documents/youtube-dl\""')

@click.option('--sitemaps/--no-sitemaps', default=True,
	help=
		'--sitemaps (default: true) to queue URLs from sitemap.xml '
		'at the root of the site, or --no-sitemaps to disable.')

@click.option('--dupespotter/--no-dupespotter', default=True,
	help=
		'--dupespotter (default: true) to skip the extraction of links '
		'from pages that look like duplicates of earlier pages, or '
		'--no-dupespotter to disable.  Disable this for sites that are '
		'directory listings.')

@click.option('--id', default=None, type=str, metavar='ID',
	help=
		'Use id ID for the crawl instead of a random 128-bit id. '
		'This must be unique for every crawl.')

@click.option('--dir', default=None, type=str, metavar='DIR', help=
	'Put control files, temporary files, and unfinished WARCs in DIR '
	'(default: a directory name based on the URL, date, and first 8 '
	'characters of the id).')

@click.option('--finished-warc-dir', default=None, type=str, metavar='FINISHED_WARC_DIR',
	help='Move finished .warc.gz and .cdx files to this directory.')

@click.option('--custom-hooks', default=None, type=str, metavar='PY_SCRIPT',
	help=
		'Copy PY_SCRIPT to DIR/custom_hooks.py, then exec DIR/custom_hooks.py '
		'on startup and every time it changes.  The script gets a `wpull_hook` '
		'global that can be used to change crawl behavior.  '
		'See libgrabsite/wpull_hooks.py and extra_docs/custom_hooks_sample.py.')

@click.option('--which-wpull-args-partial', is_flag=True,
	help=
		'Print a partial list of wpull arguments that would be used and exit.  '
		'Excludes grab-site-specific features, and removes DIR/ from paths.  '
		'Useful for reporting bugs on wpull without grab-site involvement.')

@click.option('--which-wpull-command', is_flag=True,
	help=
		"Populate DIR/ but don't start wpull; instead print the command that would "
		"have been used to start wpull with all of the grab-site functionality.")

@click.option('--version', is_flag=True, callback=print_version,
	expose_value=False, is_eager=True, help='Print version and exit.')

@click.argument('start_url', nargs=-1, required=False)

def main(concurrency, concurrent, delay, recursive, offsite_links, igsets,
ignore_sets, igon, video, level, page_requisites_level, max_content_length,
sitemaps, dupespotter, warc_max_size, ua, input_file, wpull_args, start_url,
id, dir, finished_warc_dir, custom_hooks, which_wpull_args_partial,
which_wpull_command):
	if not (input_file or start_url):
		print("Neither a START_URL or --input-file= was specified; see --help", file=sys.stderr)
		sys.exit(1)
	elif input_file and start_url:
		print("Can't specify both START_URL and --input-file=; see --help", file=sys.stderr)
		sys.exit(1)

	span_hosts_allow = "page-requisites,linked-pages"
	if not offsite_links:
		span_hosts_allow = "page-requisites"

	if concurrent != -1:
		concurrency = concurrent

	if ignore_sets != "":
		igsets = ignore_sets

	if start_url:
		claim_start_url = start_url[0]
	else:
		input_file_is_remote = bool(re.match("^(ftp|https?)://", input_file))
		if input_file_is_remote:
			claim_start_url = input_file
		else:
			claim_start_url = 'file://' + os.path.abspath(input_file)

	if not id:
		id = binascii.hexlify(os.urandom(16)).decode('utf-8')
	ymd = datetime.datetime.utcnow().isoformat()[:10]
	no_proto_no_trailing = claim_start_url.split('://', 1)[1].rstrip('/')[:100]
	warc_name = "{}-{}-{}".format(re.sub('[^-_a-zA-Z0-9%\.,;@+=]', '-', no_proto_no_trailing).lstrip('-'), ymd, id[:8])

	def get_base_wpull_args():
		return ["-U", ua,
			"--header=Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
			"--header=Accept-Language: en-US,en;q=0.5",
			"--no-check-certificate",
			"--no-robots",
			"--inet4-only",
			"--dns-timeout", "20",
			"--connect-timeout", "20",
			"--read-timeout", "900",
			"--session-timeout", str(86400 * 2),
			"--tries", "3",
			"--waitretry", "5",
			"--max-redirect", "8",
			"--quiet"
		]

	# make absolute because wpull will start in temp/
	if not dir:
		working_dir = os.path.abspath(warc_name)
	else:
		working_dir = os.path.abspath(dir)

	LIBGRABSITE = os.path.dirname(libgrabsite.__file__)
	args = get_base_wpull_args() + [
		"--output-file", "{}/wpull.log".format(working_dir),
		"--database", "{}/wpull.db".format(working_dir),
		"--plugin-script", "{}/plugin.py".format(LIBGRABSITE),
		"--python-script", "{}/wpull_hooks.py".format(LIBGRABSITE),
		"--save-cookies", "{}/cookies.txt".format(working_dir),
		"--delete-after",
		"--page-requisites",
		"--no-parent",
		"--concurrent", str(concurrency),
		"--warc-file", "{}/{}".format(working_dir, warc_name),
		"--warc-max-size", str(warc_max_size),
		"--warc-cdx",
		"--strip-session-id",
		"--escaped-fragment",
		"--level", level,
		"--page-requisites-level", page_requisites_level,
		"--span-hosts-allow", span_hosts_allow,
		"--load-cookies", "{}/default_cookies.txt".format(LIBGRABSITE)
	]

	# psutil is not available on Windows and therefore wpull's --monitor-*
	# options are also not available.
	if os.name != "nt" and sys.platform != "cygwin":
		# psutil may also just be not installed
		try:
			import psutil
		except ImportError:
			psutil = None
		if psutil is not None:
			args += [
				"--monitor-disk", "400m",
				"--monitor-memory", "10k",
			]
		args += [
			"--debug-manhole"
		]

	if finished_warc_dir is not None:
		args += ["--warc-move", finished_warc_dir]

	if sitemaps:
		args += ["--sitemaps"]

	if recursive:
		args += ["--recursive"]

	if wpull_args:
		args += shlex.split(wpull_args)

	DIR_input_file = os.path.join(working_dir, "input_file")
	if start_url:
		args.extend(start_url)
	else:
		args += ["--input-file", DIR_input_file]

	if which_wpull_args_partial:
		replace_2arg(args, "--output-file", ["--output-file", "wpull.log"])
		replace_2arg(args, "--database", ["--database", "wpull.db"])
		replace_2arg(args, "--plugin-script", [])
		replace_2arg(args, "--python-script", [])
		replace_2arg(args, "--save-cookies", ["--save-cookies", "cookies.txt"])
		replace_2arg(args, "--load-cookies", [])
		replace_2arg(args, "--warc-file", ["--warc-file", warc_name])
		try:
			args.remove("--quiet")
		except ValueError:
			pass
		print(" ".join(shlex.quote(a) for a in args))
		return

	# Create DIR and DIR files only after which_wpull_args_* checks

	os.makedirs(working_dir)
	temp_dir = os.path.join(working_dir, "temp")
	os.makedirs(temp_dir)

	DIR_custom_hooks = os.path.join(working_dir, "custom_hooks.py")
	if custom_hooks:
		shutil.copyfile(custom_hooks, DIR_custom_hooks)
	else:
		with open(DIR_custom_hooks, "wb") as _:
			pass

	if input_file is not None:
		# wpull -i doesn't support URLs, so download the input file ourselves if necessary
		if input_file_is_remote:
			# TODO: use wpull with correct user agent instead of urllib.request
			# wpull -O fails: https://github.com/chfoo/wpull/issues/275
			u = urllib.request.urlopen(input_file)
			with open(DIR_input_file, "wb") as f:
				while True:
					s = u.read(1024*1024)
					if not s:
						break
					f.write(s)
		else:
			shutil.copyfile(input_file, DIR_input_file)

	with open("{}/id".format(working_dir), "w") as f:
		f.write(id)

	with open("{}/start_url".format(working_dir), "w") as f:
		f.write(claim_start_url)

	with open("{}/all_start_urls".format(working_dir), "w") as f:
		for u in start_url:
			f.write(u + "\n")

	with open("{}/concurrency".format(working_dir), "w") as f:
		f.write(str(concurrency))

	with open("{}/max_content_length".format(working_dir), "w") as f:
		f.write(str(max_content_length))

	with open("{}/igsets".format(working_dir), "w") as f:
		f.write("global,{}".format(igsets))

	if video:
		with open("{}/video".format(working_dir), "w") as f:
			pass

	if not igon:
		with open("{}/igoff".format(working_dir), "w") as f:
			pass

	with open("{}/ignores".format(working_dir), "w") as f:
		pass

	with open("{}/delay".format(working_dir), "w") as f:
		f.write(delay)

	# We don't actually need to write control files for this mode to work, but the
	# only reason to use this is if you're starting wpull manually with modified
	# arguments, and wpull_hooks.py requires the control files.
	if which_wpull_command:
		bin = sys.argv[0].replace("/grab-site", "/wpull") # TODO
		print("GRAB_SITE_WORKING_DIR={} DUPESPOTTER_ENABLED={} {} {}".format(
			working_dir, int(dupespotter), bin, " ".join(shlex.quote(a) for a in args)))
		return

	# Mutate argv, environ, cwd before we turn into wpull
	sys.argv[1:] = args
	os.environ["GRAB_SITE_WORKING_DIR"] = working_dir
	os.environ["DUPESPOTTER_ENABLED"] = "1" if dupespotter else "0"
	# We can use --warc-tempdir= to put WARC-related temporary files in a temp
	# directory, but wpull also creates non-WARC-related "resp_cb" temporary
	# files in the cwd, so we must start wpull in temp/ anyway.
	os.chdir(temp_dir)

	from wpull.app import Application
	def noop_setup_signal_handlers(self):
		pass

	# Don't let wpull install a handler for SIGINT or SIGTERM,
	# because we install our own in wpull_hooks.py.
	Application.setup_signal_handlers = noop_setup_signal_handlers

	import wpull.__main__
	wpull.__main__.main()

Example 34

Project: bloodhound
Source File: standalone.py
View license
def main():
    from optparse import OptionParser, OptionValueError
    parser = OptionParser(usage='usage: %prog [options] [projenv] ...',
                          version='%%prog %s' % VERSION)

    auths = {}
    def _auth_callback(option, opt_str, value, parser, cls):
        info = value.split(',', 3)
        if len(info) != 3:
            raise OptionValueError("Incorrect number of parameters for %s"
                                   % option)

        env_name, filename, realm = info
        if env_name in auths:
            print >> sys.stderr, 'Ignoring duplicate authentication option ' \
                                 'for project: %s' % env_name
        else:
            auths[env_name] = cls(os.path.abspath(filename), realm)

    def _validate_callback(option, opt_str, value, parser, valid_values):
        if value not in valid_values:
            raise OptionValueError('%s must be one of: %s, not %s'
                                   % (opt_str, '|'.join(valid_values), value))
        setattr(parser.values, option.dest, value)

    def _octal(option, opt_str, value, parser):
        try:
            setattr(parser.values, option.dest, int(value, 8))
        except ValueError:
            raise OptionValueError('Invalid octal umask value: %r' % value)

    parser.add_option('-a', '--auth', action='callback', type='string',
                      metavar='DIGESTAUTH', callback=_auth_callback,
                      callback_args=(DigestAuthentication,),
                      help='[projectdir],[htdigest_file],[realm]')
    parser.add_option('--basic-auth', action='callback', type='string',
                      metavar='BASICAUTH', callback=_auth_callback,
                      callback_args=(BasicAuthentication,),
                      help='[projectdir],[htpasswd_file],[realm]')

    parser.add_option('-p', '--port', action='store', type='int', dest='port',
                      help='the port number to bind to')
    parser.add_option('-b', '--hostname', action='store', dest='hostname',
                      help='the host name or IP address to bind to')
    parser.add_option('--protocol', action='callback', type="string",
                      dest='protocol', callback=_validate_callback,
                      callback_args=(('http', 'scgi', 'ajp', 'fcgi'),),
                      help='http|scgi|ajp|fcgi')
    parser.add_option('-q', '--unquote', action='store_true',
                      dest='unquote',
                      help='unquote PATH_INFO (may be needed when using ajp)')
    parser.add_option('--http10', action='store_false', dest='http11',
                      help='use HTTP/1.0 protocol version instead of HTTP/1.1')
    parser.add_option('--http11', action='store_true', dest='http11',
                      help='use HTTP/1.1 protocol version (default)')
    parser.add_option('-e', '--env-parent-dir', action='store',
                      dest='env_parent_dir', metavar='PARENTDIR',
                      help='parent directory of the project environments')
    parser.add_option('--base-path', action='store', type='string', # XXX call this url_base_path?
                      dest='base_path',
                      help='the initial portion of the request URL\'s "path"')

    parser.add_option('-r', '--auto-reload', action='store_true',
                      dest='autoreload',
                      help='restart automatically when sources are modified')

    parser.add_option('-s', '--single-env', action='store_true',
                      dest='single_env', help='only serve a single '
                      'project without the project list', default=False)

    if os.name == 'posix':
        parser.add_option('-d', '--daemonize', action='store_true',
                          dest='daemonize',
                          help='run in the background as a daemon')
        parser.add_option('--pidfile', action='store',
                          dest='pidfile',
                          help='when daemonizing, file to which to write pid')
        parser.add_option('--umask', action='callback', type='string',
                          dest='umask', metavar='MASK', callback=_octal,
                          help='when daemonizing, file mode creation mask '
                          'to use, in octal notation (default 022)')

        try:
            import grp, pwd
            
            def _group(option, opt_str, value, parser):
                try:
                    value = int(value)
                except ValueError:
                    try:
                        value = grp.getgrnam(value)[2]
                    except KeyError:
                        raise OptionValueError('group not found: %r' % value)
                setattr(parser.values, option.dest, value)

            def _user(option, opt_str, value, parser):
                try:
                    value = int(value)
                except ValueError:
                    try:
                        value = pwd.getpwnam(value)[2]
                    except KeyError:
                        raise OptionValueError('user not found: %r' % value)
                setattr(parser.values, option.dest, value)
            
            parser.add_option('--group', action='callback', type='string',
                              dest='group', metavar='GROUP', callback=_group,
                              help='the group to run as')
            parser.add_option('--user', action='callback', type='string',
                              dest='user', metavar='USER', callback=_user,
                              help='the user to run as')
        except ImportError:
            pass

    parser.set_defaults(port=None, hostname='', base_path='', daemonize=False,
                        protocol='http', http11=True, umask=022, user=None,
                        group=None)
    options, args = parser.parse_args()

    if not args and not options.env_parent_dir:
        parser.error('either the --env-parent-dir option or at least one '
                     'environment must be specified')
    if options.single_env:
        if options.env_parent_dir:
            parser.error('the --single-env option cannot be used with '
                         '--env-parent-dir')
        elif len(args) > 1:
            parser.error('the --single-env option cannot be used with '
                         'more than one enviroment')
    if options.daemonize and options.autoreload:
        parser.error('the --auto-reload option cannot be used with '
                     '--daemonize')

    if options.port is None:
        options.port = {
            'http': 80,
            'scgi': 4000,
            'ajp': 8009,
            'fcgi': 8000,
        }[options.protocol]
    server_address = (options.hostname, options.port)

    # relative paths don't work when daemonized
    args = [os.path.abspath(a) for a in args]
    if options.env_parent_dir:
        options.env_parent_dir = os.path.abspath(options.env_parent_dir)
    if parser.has_option('pidfile') and options.pidfile:
        options.pidfile = os.path.abspath(options.pidfile)

    wsgi_app = TracEnvironMiddleware(dispatch_request,
                                     options.env_parent_dir, args,
                                     options.single_env)
    if auths:
        if options.single_env:
            project_name = os.path.basename(args[0])
            wsgi_app = AuthenticationMiddleware(wsgi_app, auths, project_name)
        else:
            wsgi_app = AuthenticationMiddleware(wsgi_app, auths)
    base_path = options.base_path.strip('/')
    if base_path:
        wsgi_app = BasePathMiddleware(wsgi_app, base_path)

    if options.protocol == 'http':
        def serve():
            addr, port = server_address
            if not addr or addr == '0.0.0.0':
                loc = '0.0.0.0:%s view at http://127.0.0.1:%s/%s' \
                       % (port, port, base_path)
            else:
                loc = 'http://%s:%s/%s' % (addr, port, base_path)

            try:
                httpd = TracHTTPServer(server_address, wsgi_app,
                                       options.env_parent_dir, args,
                                       use_http_11=options.http11)
            except socket.error, e:
                print 'Error starting Trac server on %s' % loc
                print e.strerror
                sys.exit(1)

            print 'Server starting in PID %i.' % os.getpid()
            print 'Serving on %s' % loc
            if options.http11:
                print 'Using HTTP/1.1 protocol version'
            httpd.serve_forever()
    elif options.protocol in ('scgi', 'ajp', 'fcgi'):
        def serve():
            server_cls = __import__('flup.server.%s' % options.protocol,
                                    None, None, ['']).WSGIServer
            flup_app = wsgi_app
            if options.unquote:
                from trac.web.fcgi_frontend import FlupMiddleware
                flup_app = FlupMiddleware(flup_app)
            ret = server_cls(flup_app, bindAddress=server_address).run()
            sys.exit(42 if ret else 0) # if SIGHUP exit with status 42

    try:
        if options.daemonize:
            daemon.daemonize(pidfile=options.pidfile, progname='tracd',
                             umask=options.umask)
        if options.group is not None:
            os.setgid(options.group)
        if options.user is not None:
            os.setuid(options.user)

        if options.autoreload:
            def modification_callback(file):
                print >> sys.stderr, 'Detected modification of %s, ' \
                                     'restarting.' % file
            autoreload.main(serve, modification_callback)
        else:
            serve()

    except OSError, e:
        print >> sys.stderr, '%s: %s' % (e.__class__.__name__, e)
        sys.exit(1)
    except KeyboardInterrupt:
        pass

Example 35

Project: bloodhound
Source File: standalone.py
View license
def main():
    from optparse import OptionParser, OptionValueError
    parser = OptionParser(usage='usage: %prog [options] [projenv] ...',
                          version='%%prog %s' % VERSION)

    auths = {}
    def _auth_callback(option, opt_str, value, parser, cls):
        info = value.split(',', 3)
        if len(info) != 3:
            raise OptionValueError("Incorrect number of parameters for %s"
                                   % option)

        env_name, filename, realm = info
        if env_name in auths:
            print >> sys.stderr, 'Ignoring duplicate authentication option ' \
                                 'for project: %s' % env_name
        else:
            auths[env_name] = cls(os.path.abspath(filename), realm)

    def _validate_callback(option, opt_str, value, parser, valid_values):
        if value not in valid_values:
            raise OptionValueError('%s must be one of: %s, not %s'
                                   % (opt_str, '|'.join(valid_values), value))
        setattr(parser.values, option.dest, value)

    def _octal(option, opt_str, value, parser):
        try:
            setattr(parser.values, option.dest, int(value, 8))
        except ValueError:
            raise OptionValueError('Invalid octal umask value: %r' % value)

    parser.add_option('-a', '--auth', action='callback', type='string',
                      metavar='DIGESTAUTH', callback=_auth_callback,
                      callback_args=(DigestAuthentication,),
                      help='[projectdir],[htdigest_file],[realm]')
    parser.add_option('--basic-auth', action='callback', type='string',
                      metavar='BASICAUTH', callback=_auth_callback,
                      callback_args=(BasicAuthentication,),
                      help='[projectdir],[htpasswd_file],[realm]')

    parser.add_option('-p', '--port', action='store', type='int', dest='port',
                      help='the port number to bind to')
    parser.add_option('-b', '--hostname', action='store', dest='hostname',
                      help='the host name or IP address to bind to')
    parser.add_option('--protocol', action='callback', type="string",
                      dest='protocol', callback=_validate_callback,
                      callback_args=(('http', 'scgi', 'ajp', 'fcgi'),),
                      help='http|scgi|ajp|fcgi')
    parser.add_option('-q', '--unquote', action='store_true',
                      dest='unquote',
                      help='unquote PATH_INFO (may be needed when using ajp)')
    parser.add_option('--http10', action='store_false', dest='http11',
                      help='use HTTP/1.0 protocol version instead of HTTP/1.1')
    parser.add_option('--http11', action='store_true', dest='http11',
                      help='use HTTP/1.1 protocol version (default)')
    parser.add_option('-e', '--env-parent-dir', action='store',
                      dest='env_parent_dir', metavar='PARENTDIR',
                      help='parent directory of the project environments')
    parser.add_option('--base-path', action='store', type='string', # XXX call this url_base_path?
                      dest='base_path',
                      help='the initial portion of the request URL\'s "path"')

    parser.add_option('-r', '--auto-reload', action='store_true',
                      dest='autoreload',
                      help='restart automatically when sources are modified')

    parser.add_option('-s', '--single-env', action='store_true',
                      dest='single_env', help='only serve a single '
                      'project without the project list', default=False)

    if os.name == 'posix':
        parser.add_option('-d', '--daemonize', action='store_true',
                          dest='daemonize',
                          help='run in the background as a daemon')
        parser.add_option('--pidfile', action='store',
                          dest='pidfile',
                          help='when daemonizing, file to which to write pid')
        parser.add_option('--umask', action='callback', type='string',
                          dest='umask', metavar='MASK', callback=_octal,
                          help='when daemonizing, file mode creation mask '
                          'to use, in octal notation (default 022)')

        try:
            import grp, pwd
            
            def _group(option, opt_str, value, parser):
                try:
                    value = int(value)
                except ValueError:
                    try:
                        value = grp.getgrnam(value)[2]
                    except KeyError:
                        raise OptionValueError('group not found: %r' % value)
                setattr(parser.values, option.dest, value)

            def _user(option, opt_str, value, parser):
                try:
                    value = int(value)
                except ValueError:
                    try:
                        value = pwd.getpwnam(value)[2]
                    except KeyError:
                        raise OptionValueError('user not found: %r' % value)
                setattr(parser.values, option.dest, value)
            
            parser.add_option('--group', action='callback', type='string',
                              dest='group', metavar='GROUP', callback=_group,
                              help='the group to run as')
            parser.add_option('--user', action='callback', type='string',
                              dest='user', metavar='USER', callback=_user,
                              help='the user to run as')
        except ImportError:
            pass

    parser.set_defaults(port=None, hostname='', base_path='', daemonize=False,
                        protocol='http', http11=True, umask=022, user=None,
                        group=None)
    options, args = parser.parse_args()

    if not args and not options.env_parent_dir:
        parser.error('either the --env-parent-dir option or at least one '
                     'environment must be specified')
    if options.single_env:
        if options.env_parent_dir:
            parser.error('the --single-env option cannot be used with '
                         '--env-parent-dir')
        elif len(args) > 1:
            parser.error('the --single-env option cannot be used with '
                         'more than one enviroment')
    if options.daemonize and options.autoreload:
        parser.error('the --auto-reload option cannot be used with '
                     '--daemonize')

    if options.port is None:
        options.port = {
            'http': 80,
            'scgi': 4000,
            'ajp': 8009,
            'fcgi': 8000,
        }[options.protocol]
    server_address = (options.hostname, options.port)

    # relative paths don't work when daemonized
    args = [os.path.abspath(a) for a in args]
    if options.env_parent_dir:
        options.env_parent_dir = os.path.abspath(options.env_parent_dir)
    if parser.has_option('pidfile') and options.pidfile:
        options.pidfile = os.path.abspath(options.pidfile)

    wsgi_app = TracEnvironMiddleware(dispatch_request,
                                     options.env_parent_dir, args,
                                     options.single_env)
    if auths:
        if options.single_env:
            project_name = os.path.basename(args[0])
            wsgi_app = AuthenticationMiddleware(wsgi_app, auths, project_name)
        else:
            wsgi_app = AuthenticationMiddleware(wsgi_app, auths)
    base_path = options.base_path.strip('/')
    if base_path:
        wsgi_app = BasePathMiddleware(wsgi_app, base_path)

    if options.protocol == 'http':
        def serve():
            addr, port = server_address
            if not addr or addr == '0.0.0.0':
                loc = '0.0.0.0:%s view at http://127.0.0.1:%s/%s' \
                       % (port, port, base_path)
            else:
                loc = 'http://%s:%s/%s' % (addr, port, base_path)

            try:
                httpd = TracHTTPServer(server_address, wsgi_app,
                                       options.env_parent_dir, args,
                                       use_http_11=options.http11)
            except socket.error, e:
                print 'Error starting Trac server on %s' % loc
                print e.strerror
                sys.exit(1)

            print 'Server starting in PID %i.' % os.getpid()
            print 'Serving on %s' % loc
            if options.http11:
                print 'Using HTTP/1.1 protocol version'
            httpd.serve_forever()
    elif options.protocol in ('scgi', 'ajp', 'fcgi'):
        def serve():
            server_cls = __import__('flup.server.%s' % options.protocol,
                                    None, None, ['']).WSGIServer
            flup_app = wsgi_app
            if options.unquote:
                from trac.web.fcgi_frontend import FlupMiddleware
                flup_app = FlupMiddleware(flup_app)
            ret = server_cls(flup_app, bindAddress=server_address).run()
            sys.exit(42 if ret else 0) # if SIGHUP exit with status 42

    try:
        if options.daemonize:
            daemon.daemonize(pidfile=options.pidfile, progname='tracd',
                             umask=options.umask)
        if options.group is not None:
            os.setgid(options.group)
        if options.user is not None:
            os.setuid(options.user)

        if options.autoreload:
            def modification_callback(file):
                print >> sys.stderr, 'Detected modification of %s, ' \
                                     'restarting.' % file
            autoreload.main(serve, modification_callback)
        else:
            serve()

    except OSError, e:
        print >> sys.stderr, '%s: %s' % (e.__class__.__name__, e)
        sys.exit(1)
    except KeyboardInterrupt:
        pass

Example 36

Project: AST-text-analysis
Source File: main.py
View license
def main():
    args = sys.argv[1:]
    opts, args = getopt.getopt(args, "s:a:w:v:l:f:c:r:p:dy")
    opts = dict(opts)

    # Default values for non-boolean options
    # Language of the text collection / keyphrases ("english" / "german" / "french" /...)
    opts.setdefault("-l", consts.Language.ENGLISH)

    # Relevance measures
    # Similarity measure to use ("ast" / "cosine")
    opts.setdefault("-s", consts.RelevanceMeasure.AST)
    # Algorithm to use for computing ASTs ("easa" / "ast_linear" / "ast_naive")
    opts.setdefault("-a", consts.ASTAlgorithm.EASA)
    # Term weighting scheme used for computing the cosine similarity ("tf-idf" / "tf")
    opts.setdefault("-w", consts.TermWeighting.TF_IDF)
    # Elements of the vector space for the cosine similarity ("stems" / "lemmata" / "words")
    opts.setdefault("-v", consts.VectorSpace.STEMS)

    # Graph construction
    opts.setdefault("-c", "0.6")    # Referral confidence for graph construction
    opts.setdefault("-r", "0.25")   # Relevance threshold of the matching score
    opts.setdefault("-p", "1")      # Support threshold for graph nodes
    # NOTE(mikhaildubov): Default value of '-f' (output format) depends on the subcommand.

    if len(args) < 2:
        print("Invalid syntax: EAST should be called as:\n\n"
              "    east [options] <command> <subcommand> args\n\n"
              "Commands available: keyphrases.\n"
              "Subcommands available: table/graph.")
        return 1

    command = args[0]
    subcommand = args[1]

    if command == "keyphrases":

        if len(args) < 4:
            print('Invalid syntax. For keyphrases analysis, EAST should be called as:\n\n'
                  '    east [options] keyphrases <subcommand> "path/to/keyphrases.txt" '
                  '"path/to/texts/dir"')
            return 1

        # Keywords
        keyphrases_file = os.path.abspath(args[2])
        with open(keyphrases_file) as f:
            # NOTE(mikhaildubov): utils.prepare_text() should not be called in clients like this
            #                     one; it is already called in the applications module. Note that
            #                     the double-calling of this method results in errors.
            keyphrases = f.read().splitlines()

        # Text collection (either a directory or a single file)
        text_collection_path = os.path.abspath(args[3])

        if os.path.isdir(text_collection_path):
            text_files = [os.path.abspath(text_collection_path) + "/" + filename
                          for filename in os.listdir(text_collection_path)
                          if filename.endswith(".txt")]
        else:
            # TODO(mikhaildubov): Check that this single file ends with ".txt".
            text_files = [os.path.abspath(text_collection_path)]

        texts = {}
        # NOTE(mikhaildubov): If we have only one text file, we should split the lines.
        if len(text_files) == 1:
            with open(text_files[0]) as f:
                lines = f.read().splitlines()
                for i in xrange(len(lines)):
                    texts[str(i)] = lines[i]
        # NOTE(mikhaildubov): If there are multiple text files, read them one-by-one.
        else:
            for filename in text_files:
                with open(filename) as f:
                    text_name = os.path.basename(filename).decode("utf-8")[:-4]
                    texts[text_name] = f.read()

        language = opts["-l"]

        # Similarity measure
        similarity_measure = opts["-s"]
        if similarity_measure == "ast":
            ast_algorithm = opts["-a"]
            normalized_scores = "-d" not in opts
            similarity_measure = relevance.ASTRelevanceMeasure(ast_algorithm, normalized_scores)
        elif similarity_measure == "cosine":
            vector_space = opts["-v"]
            term_weighting = opts["-w"]
            similarity_measure = relevance.CosineRelevanceMeasure(vector_space, term_weighting)

        # Synomimizer
        use_synonyms = "-y" in opts
        synonimizer = synonyms.SynonymExtractor(text_collection_path) if use_synonyms else None

        if subcommand == "table":

            keyphrases_table = applications.keyphrases_table(
                                    keyphrases, texts, similarity_measure_factory,
                                    synonimizer, language)

            opts.setdefault("-f", "xml")  # Table output format ("csv" is the other option)
            table_format = opts["-f"].lower()

            try:
                res = formatting.format_table(keyphrases_table, table_format)
                print res
            except Exception as e:
                print e
                return 1

        elif subcommand == "graph":

            # Graph construction parameters: Referral confidence, relevance and support thresholds            
            referral_confidence = float(opts["-c"])
            relevance_threshold = float(opts["-r"])
            support_threshold = float(opts["-p"])

            graph = applications.keyphrases_graph(keyphrases, texts, referral_confidence,
                                                  relevance_threshold, support_threshold,
                                                  similarity_measure, synonimizer, language)

            opts.setdefault("-f", "edges")  # Graph output format (also "gml" possible)
            graph_format = opts["-f"].lower()

            try:
                res = formatting.format_graph(graph, graph_format)
                print res
            except Exception as e:
                print e
                return 1

        else:
            print "Invalid subcommand: '%s'. Please use one of: 'table', 'graph'." % subcommand
            return 1

    else:
        print "Invalid command: '%s'. Please use one of: 'keyphrases'." % command
        return 1

Example 37

View license
def ParseArguments(argv):
  """Parses command-line arguments.

  Args:
    argv: Command-line arguments, including the executable name, used to
      execute this application.

  Returns:
    Tuple (args, option_dict) where:
      args: List of command-line arguments following the executable name.
      option_dict: Dictionary of parsed flags that maps keys from DEFAULT_ARGS
        to their values, which are either pulled from the defaults, or from
        command-line flags.
  """
  option_dict = DEFAULT_ARGS.copy()

  try:
    opts, args = getopt.gnu_getopt(
      argv[1:],
      'a:cdhp:',
      [ 'address=',
        'admin_console_server=',
        'admin_console_host=',
        'allow_skipped_files',
        'auth_domain=',
        'clear_datastore',
        'blobstore_path=',
        'datastore_path=',
        'debug',
        'debug_imports',
        'enable_sendmail',
        'disable_static_caching',
        'show_mail_body',
        'help',
        'history_path=',
        'port=',
        'require_indexes',
        'smtp_host=',
        'smtp_password=',
        'smtp_port=',
        'smtp_user=',
        'template_dir=',
        'trusted',
      ])
  except getopt.GetoptError, e:
    print >>sys.stderr, 'Error: %s' % e
    PrintUsageExit(1)

  for option, value in opts:
    if option in ('-h', '--help'):
      PrintUsageExit(0)

    if option in ('-d', '--debug'):
      option_dict[ARG_LOG_LEVEL] = logging.DEBUG

    if option in ('-p', '--port'):
      try:
        option_dict[ARG_PORT] = int(value)
        if not (65535 > option_dict[ARG_PORT] > 0):
          raise ValueError
      except ValueError:
        print >>sys.stderr, 'Invalid value supplied for port'
        PrintUsageExit(1)

    if option in ('-a', '--address'):
      option_dict[ARG_ADDRESS] = value

    if option == '--blobstore_path':
      option_dict[ARG_BLOBSTORE_PATH] = os.path.abspath(value)

    if option == '--datastore_path':
      option_dict[ARG_DATASTORE_PATH] = os.path.abspath(value)

    if option == '--history_path':
      option_dict[ARG_HISTORY_PATH] = os.path.abspath(value)

    if option in ('-c', '--clear_datastore'):
      option_dict[ARG_CLEAR_DATASTORE] = True

    if option == '--require_indexes':
      option_dict[ARG_REQUIRE_INDEXES] = True

    if option == '--smtp_host':
      option_dict[ARG_SMTP_HOST] = value

    if option == '--smtp_port':
      try:
        option_dict[ARG_SMTP_PORT] = int(value)
        if not (65535 > option_dict[ARG_SMTP_PORT] > 0):
          raise ValueError
      except ValueError:
        print >>sys.stderr, 'Invalid value supplied for SMTP port'
        PrintUsageExit(1)

    if option == '--smtp_user':
      option_dict[ARG_SMTP_USER] = value

    if option == '--smtp_password':
      option_dict[ARG_SMTP_PASSWORD] = value

    if option == '--enable_sendmail':
      option_dict[ARG_ENABLE_SENDMAIL] = True

    if option == '--show_mail_body':
      option_dict[ARG_SHOW_MAIL_BODY] = True

    if option == '--auth_domain':
      option_dict['_DEFAULT_ENV_AUTH_DOMAIN'] = value

    if option == '--debug_imports':
      option_dict['_ENABLE_LOGGING'] = True

    if option == '--template_dir':
      option_dict[ARG_TEMPLATE_DIR] = value

    if option == '--admin_console_server':
      option_dict[ARG_ADMIN_CONSOLE_SERVER] = value.strip()

    if option == '--admin_console_host':
      option_dict[ARG_ADMIN_CONSOLE_HOST] = value

    if option == '--allow_skipped_files':
      option_dict[ARG_ALLOW_SKIPPED_FILES] = True

    if option == '--disable_static_caching':
      option_dict[ARG_STATIC_CACHING] = False

    if option == '--trusted':
      option_dict[ARG_TRUSTED] = True

  return args, option_dict

Example 38

View license
def ParseArguments(argv):
  """Parses command-line arguments.

  Args:
    argv: Command-line arguments, including the executable name, used to
      execute this application.

  Returns:
    Tuple (args, option_dict) where:
      args: List of command-line arguments following the executable name.
      option_dict: Dictionary of parsed flags that maps keys from DEFAULT_ARGS
        to their values, which are either pulled from the defaults, or from
        command-line flags.
  """
  option_dict = DEFAULT_ARGS.copy()

  try:
    opts, args = getopt.gnu_getopt(
      argv[1:],
      'a:cdhp:',
      [ 'address=',
        'admin_console_server=',
        'admin_console_host=',
        'allow_skipped_files',
        'auth_domain=',
        'clear_datastore',
        'blobstore_path=',
        'datastore_path=',
        'debug',
        'debug_imports',
        'enable_sendmail',
        'disable_static_caching',
        'show_mail_body',
        'help',
        'history_path=',
        'port=',
        'require_indexes',
        'smtp_host=',
        'smtp_password=',
        'smtp_port=',
        'smtp_user=',
        'template_dir=',
        'trusted',
      ])
  except getopt.GetoptError, e:
    print >>sys.stderr, 'Error: %s' % e
    PrintUsageExit(1)

  for option, value in opts:
    if option in ('-h', '--help'):
      PrintUsageExit(0)

    if option in ('-d', '--debug'):
      option_dict[ARG_LOG_LEVEL] = logging.DEBUG

    if option in ('-p', '--port'):
      try:
        option_dict[ARG_PORT] = int(value)
        if not (65535 > option_dict[ARG_PORT] > 0):
          raise ValueError
      except ValueError:
        print >>sys.stderr, 'Invalid value supplied for port'
        PrintUsageExit(1)

    if option in ('-a', '--address'):
      option_dict[ARG_ADDRESS] = value

    if option == '--blobstore_path':
      option_dict[ARG_BLOBSTORE_PATH] = os.path.abspath(value)

    if option == '--datastore_path':
      option_dict[ARG_DATASTORE_PATH] = os.path.abspath(value)

    if option == '--history_path':
      option_dict[ARG_HISTORY_PATH] = os.path.abspath(value)

    if option in ('-c', '--clear_datastore'):
      option_dict[ARG_CLEAR_DATASTORE] = True

    if option == '--require_indexes':
      option_dict[ARG_REQUIRE_INDEXES] = True

    if option == '--smtp_host':
      option_dict[ARG_SMTP_HOST] = value

    if option == '--smtp_port':
      try:
        option_dict[ARG_SMTP_PORT] = int(value)
        if not (65535 > option_dict[ARG_SMTP_PORT] > 0):
          raise ValueError
      except ValueError:
        print >>sys.stderr, 'Invalid value supplied for SMTP port'
        PrintUsageExit(1)

    if option == '--smtp_user':
      option_dict[ARG_SMTP_USER] = value

    if option == '--smtp_password':
      option_dict[ARG_SMTP_PASSWORD] = value

    if option == '--enable_sendmail':
      option_dict[ARG_ENABLE_SENDMAIL] = True

    if option == '--show_mail_body':
      option_dict[ARG_SHOW_MAIL_BODY] = True

    if option == '--auth_domain':
      option_dict['_DEFAULT_ENV_AUTH_DOMAIN'] = value

    if option == '--debug_imports':
      option_dict['_ENABLE_LOGGING'] = True

    if option == '--template_dir':
      option_dict[ARG_TEMPLATE_DIR] = value

    if option == '--admin_console_server':
      option_dict[ARG_ADMIN_CONSOLE_SERVER] = value.strip()

    if option == '--admin_console_host':
      option_dict[ARG_ADMIN_CONSOLE_HOST] = value

    if option == '--allow_skipped_files':
      option_dict[ARG_ALLOW_SKIPPED_FILES] = True

    if option == '--disable_static_caching':
      option_dict[ARG_STATIC_CACHING] = False

    if option == '--trusted':
      option_dict[ARG_TRUSTED] = True

  return args, option_dict

Example 39

Project: mysql-utilities
Source File: serverclone.py
View license
def clone_server(conn_val, options):
    """Clone an existing server

    This method creates a new instance of a running server using a datadir
    set to the new_data parametr, with a port set to new_port, server_id
    set to new_id and a root password of root_pass. You can also specify
    additional parameters for the mysqld command line as well as turn on
    verbosity mode to display more diagnostic information during the clone
    process.

    The method will build a new base database installation from the .sql
    files used to construct a new installation. Once the database is
    created, the server will be started.

    dest_val[in]        a dictionary containing connection information
                        including:
                        (user, password, host, port, socket)
    options[in]         dictionary of options:
      new_data[in]        An existing path to create the new database and use
                          as datadir for new instance
                          (default = None)
      new_port[in]        Port number for new instance
                          (default = 3307)
      new_id[in]          Server_id for new instance
                          (default = 2)
      root_pass[in]       Password for root user on new instance (optional)
      mysqld_options[in]  Additional command line options for mysqld
      verbosity[in]       Print additional information during operation
                          (default is 0)
      quiet[in]           If True, do not print messages.
                          (default is False)
      cmd_file[in]        file name to write startup command
      start_timeout[in]   Number of seconds to wait for server to start
    """
    new_data = os.path.abspath(options.get('new_data', None))
    new_port = options.get('new_port', '3307')
    root_pass = options.get('root_pass', None)
    verbosity = options.get('verbosity', 0)
    user = options.get('user', 'root')
    quiet = options.get('quiet', False)
    cmd_file = options.get('cmd_file', None)
    start_timeout = int(options.get('start_timeout', 10))
    mysqld_options = options.get('mysqld_options', '')
    force = options.get('force', False)
    quote_char = "'" if os.name == "posix" else '"'

    if not check_port_in_use('localhost', int(new_port)):
        raise UtilError("Port {0} in use. Please choose an "
                        "available port.".format(new_port))

    # Check if path to database files is greater than MAX_DIR_SIZE char,
    if len(new_data) > MAX_DATADIR_SIZE and not force:
        raise UtilError("The --new-data path '{0}' is too long "
                        "(> {1} characters). Please use a smaller one. "
                        "You can use the --force option to skip this "
                        "check".format(new_data, MAX_DATADIR_SIZE))

    # Clone running server
    if conn_val is not None:
        # Try to connect to the MySQL database server.
        server1_options = {
            'conn_info': conn_val,
            'role': "source",
        }
        server1 = Server(server1_options)
        server1.connect()
        if not quiet:
            print "# Cloning the MySQL server running on %s." % \
                conn_val["host"]

        # Get basedir
        rows = server1.exec_query("SHOW VARIABLES LIKE 'basedir'")
        if not rows:
            raise UtilError("Unable to determine basedir of running server.")
        basedir = os.path.normpath(rows[0][1])

    # Cloning downed or offline server
    else:
        basedir = os.path.abspath(options.get("basedir", None))
        if not quiet:
            print "# Cloning the MySQL server located at %s." % basedir

    new_data_deleted = False
    # If datadir exists, has data, and user said it was Ok, delete it
    if os.path.exists(new_data) and options.get("delete", False) and \
       os.listdir(new_data):
        new_data_deleted = True
        shutil.rmtree(new_data, True)

    # Create new data directory if it does not exist
    if not os.path.exists(new_data):
        if not quiet:
            print "# Creating new data directory..."
        try:
            os.mkdir(new_data)
        except OSError as err:
            raise UtilError("Unable to create directory '{0}', reason: {1}"
                            "".format(new_data, err.strerror))

    # After create the new data directory, check for free space, so the errors
    # regarding invalid or inaccessible path had been dismissed already.
    # If not force specified verify and stop if there is not enough free space
    if not force and os.path.exists(new_data) and \
       estimate_free_space(new_data) < REQ_FREE_SPACE:
        # Don't leave empty folders, delete new_data if was previously deleted
        if os.path.exists(new_data) and new_data_deleted:
            shutil.rmtree(new_data, True)
        raise UtilError(LOW_SPACE_ERRR_MSG.format(directory=new_data,
                                                  megabytes=REQ_FREE_SPACE))

    # Check for warning of using --skip-innodb
    mysqld_path = get_tool_path(basedir, "mysqld")
    version_str = get_mysqld_version(mysqld_path)
    # convert version_str from str tuple to integer tuple if possible
    if version_str is not None:
        version = tuple([int(digit) for digit in version_str])
    else:
        version = None
    if mysqld_options is not None and ("--skip-innodb" in mysqld_options or
       "--innodb" in mysqld_options) and version is not None and \
       version >= (5, 7, 5):
        print("# WARNING: {0}".format(WARN_OPT_SKIP_INNODB))

    if not quiet:
        print "# Configuring new instance..."
        print "# Locating mysql tools..."

    mysqladmin_path = get_tool_path(basedir, "mysqladmin")

    mysql_basedir = basedir
    if os.path.exists(os.path.join(basedir, "local/mysql/share/")):
        mysql_basedir = os.path.join(mysql_basedir, "local/mysql/")
    # for source trees
    elif os.path.exists(os.path.join(basedir, "/sql/share/english/")):
        mysql_basedir = os.path.join(mysql_basedir, "/sql/")

    locations = [
        ("mysqld", mysqld_path),
        ("mysqladmin", mysqladmin_path),
    ]

    # From 5.7.6 version onwards, bootstrap is done via mysqld with the
    # --initialize-insecure option, so no need to get information about the
    # sql system tables that need to be loaded.
    if version < (5, 7, 6):
        system_tables = get_tool_path(basedir, "mysql_system_tables.sql",
                                      False)
        system_tables_data = get_tool_path(basedir,
                                           "mysql_system_tables_data.sql",
                                           False)
        test_data_timezone = get_tool_path(basedir,
                                           "mysql_test_data_timezone.sql",
                                           False)
        help_data = get_tool_path(basedir, "fill_help_tables.sql", False)
        locations.extend([("mysql_system_tables.sql", system_tables),
                          ("mysql_system_tables_data.sql", system_tables_data),
                          ("mysql_test_data_timezone.sql", test_data_timezone),
                          ("fill_help_tables.sql", help_data),
                          ])

    if verbosity >= 3 and not quiet:
        print "# Location of files:"
        if cmd_file is not None:
            locations.append(("write startup command to", cmd_file))

        for location in locations:
            print "# % 28s: %s" % location

    # Create the new mysql data with mysql_import_db-like process
    if not quiet:
        print "# Setting up empty database and mysql tables..."

    fnull = open(os.devnull, 'w')

    # For MySQL versions before 5.7.6, use regular bootstrap procedure.
    if version < (5, 7, 6):
        # Get bootstrap SQL statements
        sql = list()
        sql.append("CREATE DATABASE mysql;")
        sql.append("USE mysql;")
        innodb_disabled = False
        if mysqld_options:
            innodb_disabled = '--innodb=OFF' in mysqld_options
        for sqlfile in [system_tables, system_tables_data, test_data_timezone,
                        help_data]:
            lines = open(sqlfile, 'r').readlines()
            # On MySQL 5.7.5, the [email protected] account creation was
            # moved from the system_tables_data sql file into the
            # mysql_install_db binary. Since we don't use mysql_install_db
            # directly we need to create the root user account ourselves.
            if (version is not None and version == (5, 7, 5) and
                    sqlfile == system_tables_data):
                lines.extend(_CREATE_ROOT_USER)
            for line in lines:
                line = line.strip()
                # Don't fail when InnoDB is turned off (Bug#16369955)
                # (Ugly hack)
                if (sqlfile == system_tables and
                   "SET @[email protected]@SES" in line and innodb_disabled):
                    for line in lines:
                        if 'SET SESSION [email protected]@sql' in line:
                            break
                sql.append(line)

        # Bootstap to setup mysql tables
        cmd = [
            mysqld_path,
            "--no-defaults",
            "--bootstrap",
            "--datadir={0}".format(new_data),
            "--basedir={0}".format(os.path.abspath(mysql_basedir)),
        ]

        if verbosity >= 1 and not quiet:
            proc = subprocess.Popen(cmd, shell=False, stdin=subprocess.PIPE)
        else:
            proc = subprocess.Popen(cmd, shell=False, stdin=subprocess.PIPE,
                                    stdout=fnull, stderr=fnull)
        proc.communicate('\n'.join(sql))

    # From 5.7.6 onwards, mysql_install_db has been replaced by mysqld and
    # the --initialize option
    else:
        cmd = [
            mysqld_path,
            "--no-defaults",
            "--initialize-insecure=on",
            "--datadir={0}".format(new_data),
            "--basedir={0}".format(os.path.abspath(mysql_basedir))
        ]
        if verbosity >= 1 and not quiet:
            proc = subprocess.Popen(cmd, shell=False, stdin=subprocess.PIPE)
        else:
            proc = subprocess.Popen(cmd, shell=False, stdin=subprocess.PIPE,
                                    stdout=fnull, stderr=fnull)
    # Wait for subprocess to finish
    res = proc.wait()
    # Kill subprocess just in case it didn't finish - Ok if proc doesn't exist
    if int(res) != 0:
        if os.name == "posix":
            try:
                os.kill(proc.pid, subprocess.signal.SIGTERM)
            except OSError as error:
                if not error.strerror.startswith("No such process"):
                    raise UtilError("Failed to kill process with pid '{0}'"
                                    "".format(proc.pid))
        else:
            ret_code = subprocess.call("taskkill /F /T /PID "
                                       "{0}".format(proc.pid), shell=True)

            # return code 0 means it was successful and 128 means it tried
            # to kill a process that doesn't exist
            if ret_code not in (0, 128):
                raise UtilError("Failed to kill process with pid '{0}'. "
                                "Return code {1}".format(proc.pid,
                                                         ret_code))

    # Drop the bootstrap file
    if os.path.isfile("bootstrap.sql"):
        os.unlink("bootstrap.sql")

    # Start the instance
    if not quiet:
        print "# Starting new instance of the server..."

    # If the user is not the same as the user running the script...
    # and this is a Posix system... and we are running as root
    if user_change_as_root(options):
        subprocess.call(['chown', '-R', user, new_data])
        subprocess.call(['chgrp', '-R', user, new_data])

    socket_path = os.path.join(new_data, 'mysql.sock')
    # If socket path is too long, use mkdtemp to create a tmp dir and
    # use it instead to store the socket
    if os.name == 'posix' and len(socket_path) > MAX_SOCKET_PATH_SIZE:
        socket_path = os.path.join(tempfile.mkdtemp(), 'mysql.sock')
        if not quiet:
            print("# WARNING: The socket file path '{0}' is too long (>{1}), "
                  "using '{2}' instead".format(
                      os.path.join(new_data, 'mysql.sock'),
                      MAX_SOCKET_PATH_SIZE, socket_path))

    cmd = {
        'datadir': '--datadir={0}'.format(new_data),
        'tmpdir': '--tmpdir={0}'.format(new_data),
        'pid-file': '--pid-file={0}'.format(
            os.path.join(new_data, "clone.pid")),
        'port': '--port={0}'.format(new_port),
        'server': '--server-id={0}'.format(options.get('new_id', 2)),
        'basedir': '--basedir={0}'.format(mysql_basedir),
        'socket': '--socket={0}'.format(socket_path),
    }
    if user:
        cmd.update({'user': '--user={0}'.format(user)})
    if mysqld_options:
        if isinstance(mysqld_options, (list, tuple)):
            cmd.update(dict(zip(mysqld_options, mysqld_options)))
        else:
            new_opts = mysqld_options.strip(" ")
            # Drop the --mysqld=
            if new_opts.startswith("--mysqld="):
                new_opts = new_opts[9:]
            if new_opts.startswith('"') and new_opts.endswith('"'):
                list_ = shlex.split(new_opts.strip('"'))
                cmd.update(dict(zip(list_, list_)))
            elif new_opts.startswith("'") and new_opts.endswith("'"):
                list_ = shlex.split(new_opts.strip("'"))
                cmd.update(dict(zip(list_, list_)))
            # Special case where there is only 1 option
            elif len(new_opts.split("--")) == 1:
                cmd.update({mysqld_options: mysqld_options})
            else:
                list_ = shlex.split(new_opts)
                cmd.update(dict(zip(list_, list_)))

    # set of options that must be surrounded with quotes
    options_to_quote = set(["datadir", "tmpdir", "basedir", "socket",
                            "pid-file"])

    # Strip spaces from each option
    for key in cmd:
        cmd[key] = cmd[key].strip(' ')

    # Write startup command if specified
    if cmd_file is not None:
        if verbosity >= 0 and not quiet:
            print "# Writing startup command to file."
        cfile = open(cmd_file, 'w')
        comment = " Startup command generated by mysqlserverclone.\n"
        if os.name == 'posix' and cmd_file.endswith('.sh'):
            cfile.write("#!/bin/sh\n")
            cfile.write("#{0}".format(comment))
        elif os.name == 'nt' and cmd_file.endswith('.bat'):
            cfile.write("REM{0}".format(comment))
        else:
            cfile.write("#{0}".format(comment))

        start_cmd_lst = ["{0}{1}{0} --no-defaults".format(quote_char,
                                                          mysqld_path)]

        # build start command
        for key, val in cmd.iteritems():
            if key in options_to_quote:
                val = "{0}{1}{0}".format(quote_char, val)
            start_cmd_lst.append(val)
        cfile.write("{0}\n".format(" ".join(start_cmd_lst)))
        cfile.close()

    if os.name == "nt" and verbosity >= 1:
        cmd.update({"console": "--console"})

    start_cmd_lst = [mysqld_path, "--no-defaults"]
    sorted_keys = sorted(cmd.keys())
    start_cmd_lst.extend([cmd[val] for val in sorted_keys])
    if verbosity >= 1 and not quiet:
        if verbosity >= 2:
            print("# Startup command for new server:\n"
                  "{0}".format(" ".join(start_cmd_lst)))
        proc = subprocess.Popen(start_cmd_lst, shell=False)
    else:
        proc = subprocess.Popen(start_cmd_lst, shell=False, stdout=fnull,
                                stderr=fnull)

    # Try to connect to the new MySQL instance
    if not quiet:
        print "# Testing connection to new instance..."
    new_sock = None

    if os.name == "posix":
        new_sock = socket_path
    port_int = int(new_port)

    conn = {
        "user": "root",
        "passwd": "",
        "host": conn_val["host"] if conn_val is not None else "localhost",
        "port": port_int,
        "unix_socket": new_sock
    }

    server2_options = {
        'conn_info': conn,
        'role': "clone",
    }
    server2 = Server(server2_options)

    i = 0
    while i < start_timeout:
        i += 1
        time.sleep(1)
        try:
            server2.connect()
            i = start_timeout + 1
        except:
            pass
        finally:
            if verbosity >= 1 and not quiet:
                print "# trying again..."

    if i == start_timeout:
        raise UtilError("Unable to communicate with new instance. "
                        "Process id = {0}.".format(proc.pid))
    elif not quiet:
        print "# Success!"

    # Set the root password
    if root_pass:
        if not quiet:
            print "# Setting the root password..."
        cmd = [mysqladmin_path, '--no-defaults', '-v', '-uroot']
        if os.name == "posix":
            cmd.append("--socket={0}".format(new_sock))
        else:
            cmd.append("--port={0}".format(int(new_port)))
        cmd.extend(["password", root_pass])
        if verbosity > 0 and not quiet:
            proc = subprocess.Popen(cmd, shell=False)
        else:
            proc = subprocess.Popen(cmd, shell=False,
                                    stdout=fnull, stderr=fnull)

        # Wait for subprocess to finish
        res = proc.wait()

    if not quiet:
        conn_str = "# Connection Information:\n"
        conn_str += "#  -uroot"
        if root_pass:
            conn_str += " -p%s" % root_pass
        if os.name == "posix":
            conn_str += " --socket=%s" % new_sock
        else:
            conn_str += " --port=%s" % new_port
        print conn_str
        print "#...done."

    fnull.close()

Example 40

Project: mysql-utilities
Source File: serverclone.py
View license
def clone_server(conn_val, options):
    """Clone an existing server

    This method creates a new instance of a running server using a datadir
    set to the new_data parametr, with a port set to new_port, server_id
    set to new_id and a root password of root_pass. You can also specify
    additional parameters for the mysqld command line as well as turn on
    verbosity mode to display more diagnostic information during the clone
    process.

    The method will build a new base database installation from the .sql
    files used to construct a new installation. Once the database is
    created, the server will be started.

    dest_val[in]        a dictionary containing connection information
                        including:
                        (user, password, host, port, socket)
    options[in]         dictionary of options:
      new_data[in]        An existing path to create the new database and use
                          as datadir for new instance
                          (default = None)
      new_port[in]        Port number for new instance
                          (default = 3307)
      new_id[in]          Server_id for new instance
                          (default = 2)
      root_pass[in]       Password for root user on new instance (optional)
      mysqld_options[in]  Additional command line options for mysqld
      verbosity[in]       Print additional information during operation
                          (default is 0)
      quiet[in]           If True, do not print messages.
                          (default is False)
      cmd_file[in]        file name to write startup command
      start_timeout[in]   Number of seconds to wait for server to start
    """
    new_data = os.path.abspath(options.get('new_data', None))
    new_port = options.get('new_port', '3307')
    root_pass = options.get('root_pass', None)
    verbosity = options.get('verbosity', 0)
    user = options.get('user', 'root')
    quiet = options.get('quiet', False)
    cmd_file = options.get('cmd_file', None)
    start_timeout = int(options.get('start_timeout', 10))
    mysqld_options = options.get('mysqld_options', '')
    force = options.get('force', False)
    quote_char = "'" if os.name == "posix" else '"'

    if not check_port_in_use('localhost', int(new_port)):
        raise UtilError("Port {0} in use. Please choose an "
                        "available port.".format(new_port))

    # Check if path to database files is greater than MAX_DIR_SIZE char,
    if len(new_data) > MAX_DATADIR_SIZE and not force:
        raise UtilError("The --new-data path '{0}' is too long "
                        "(> {1} characters). Please use a smaller one. "
                        "You can use the --force option to skip this "
                        "check".format(new_data, MAX_DATADIR_SIZE))

    # Clone running server
    if conn_val is not None:
        # Try to connect to the MySQL database server.
        server1_options = {
            'conn_info': conn_val,
            'role': "source",
        }
        server1 = Server(server1_options)
        server1.connect()
        if not quiet:
            print "# Cloning the MySQL server running on %s." % \
                conn_val["host"]

        # Get basedir
        rows = server1.exec_query("SHOW VARIABLES LIKE 'basedir'")
        if not rows:
            raise UtilError("Unable to determine basedir of running server.")
        basedir = os.path.normpath(rows[0][1])

    # Cloning downed or offline server
    else:
        basedir = os.path.abspath(options.get("basedir", None))
        if not quiet:
            print "# Cloning the MySQL server located at %s." % basedir

    new_data_deleted = False
    # If datadir exists, has data, and user said it was Ok, delete it
    if os.path.exists(new_data) and options.get("delete", False) and \
       os.listdir(new_data):
        new_data_deleted = True
        shutil.rmtree(new_data, True)

    # Create new data directory if it does not exist
    if not os.path.exists(new_data):
        if not quiet:
            print "# Creating new data directory..."
        try:
            os.mkdir(new_data)
        except OSError as err:
            raise UtilError("Unable to create directory '{0}', reason: {1}"
                            "".format(new_data, err.strerror))

    # After create the new data directory, check for free space, so the errors
    # regarding invalid or inaccessible path had been dismissed already.
    # If not force specified verify and stop if there is not enough free space
    if not force and os.path.exists(new_data) and \
       estimate_free_space(new_data) < REQ_FREE_SPACE:
        # Don't leave empty folders, delete new_data if was previously deleted
        if os.path.exists(new_data) and new_data_deleted:
            shutil.rmtree(new_data, True)
        raise UtilError(LOW_SPACE_ERRR_MSG.format(directory=new_data,
                                                  megabytes=REQ_FREE_SPACE))

    # Check for warning of using --skip-innodb
    mysqld_path = get_tool_path(basedir, "mysqld")
    version_str = get_mysqld_version(mysqld_path)
    # convert version_str from str tuple to integer tuple if possible
    if version_str is not None:
        version = tuple([int(digit) for digit in version_str])
    else:
        version = None
    if mysqld_options is not None and ("--skip-innodb" in mysqld_options or
       "--innodb" in mysqld_options) and version is not None and \
       version >= (5, 7, 5):
        print("# WARNING: {0}".format(WARN_OPT_SKIP_INNODB))

    if not quiet:
        print "# Configuring new instance..."
        print "# Locating mysql tools..."

    mysqladmin_path = get_tool_path(basedir, "mysqladmin")

    mysql_basedir = basedir
    if os.path.exists(os.path.join(basedir, "local/mysql/share/")):
        mysql_basedir = os.path.join(mysql_basedir, "local/mysql/")
    # for source trees
    elif os.path.exists(os.path.join(basedir, "/sql/share/english/")):
        mysql_basedir = os.path.join(mysql_basedir, "/sql/")

    locations = [
        ("mysqld", mysqld_path),
        ("mysqladmin", mysqladmin_path),
    ]

    # From 5.7.6 version onwards, bootstrap is done via mysqld with the
    # --initialize-insecure option, so no need to get information about the
    # sql system tables that need to be loaded.
    if version < (5, 7, 6):
        system_tables = get_tool_path(basedir, "mysql_system_tables.sql",
                                      False)
        system_tables_data = get_tool_path(basedir,
                                           "mysql_system_tables_data.sql",
                                           False)
        test_data_timezone = get_tool_path(basedir,
                                           "mysql_test_data_timezone.sql",
                                           False)
        help_data = get_tool_path(basedir, "fill_help_tables.sql", False)
        locations.extend([("mysql_system_tables.sql", system_tables),
                          ("mysql_system_tables_data.sql", system_tables_data),
                          ("mysql_test_data_timezone.sql", test_data_timezone),
                          ("fill_help_tables.sql", help_data),
                          ])

    if verbosity >= 3 and not quiet:
        print "# Location of files:"
        if cmd_file is not None:
            locations.append(("write startup command to", cmd_file))

        for location in locations:
            print "# % 28s: %s" % location

    # Create the new mysql data with mysql_import_db-like process
    if not quiet:
        print "# Setting up empty database and mysql tables..."

    fnull = open(os.devnull, 'w')

    # For MySQL versions before 5.7.6, use regular bootstrap procedure.
    if version < (5, 7, 6):
        # Get bootstrap SQL statements
        sql = list()
        sql.append("CREATE DATABASE mysql;")
        sql.append("USE mysql;")
        innodb_disabled = False
        if mysqld_options:
            innodb_disabled = '--innodb=OFF' in mysqld_options
        for sqlfile in [system_tables, system_tables_data, test_data_timezone,
                        help_data]:
            lines = open(sqlfile, 'r').readlines()
            # On MySQL 5.7.5, the [email protected] account creation was
            # moved from the system_tables_data sql file into the
            # mysql_install_db binary. Since we don't use mysql_install_db
            # directly we need to create the root user account ourselves.
            if (version is not None and version == (5, 7, 5) and
                    sqlfile == system_tables_data):
                lines.extend(_CREATE_ROOT_USER)
            for line in lines:
                line = line.strip()
                # Don't fail when InnoDB is turned off (Bug#16369955)
                # (Ugly hack)
                if (sqlfile == system_tables and
                   "SET @[email protected]@SES" in line and innodb_disabled):
                    for line in lines:
                        if 'SET SESSION [email protected]@sql' in line:
                            break
                sql.append(line)

        # Bootstap to setup mysql tables
        cmd = [
            mysqld_path,
            "--no-defaults",
            "--bootstrap",
            "--datadir={0}".format(new_data),
            "--basedir={0}".format(os.path.abspath(mysql_basedir)),
        ]

        if verbosity >= 1 and not quiet:
            proc = subprocess.Popen(cmd, shell=False, stdin=subprocess.PIPE)
        else:
            proc = subprocess.Popen(cmd, shell=False, stdin=subprocess.PIPE,
                                    stdout=fnull, stderr=fnull)
        proc.communicate('\n'.join(sql))

    # From 5.7.6 onwards, mysql_install_db has been replaced by mysqld and
    # the --initialize option
    else:
        cmd = [
            mysqld_path,
            "--no-defaults",
            "--initialize-insecure=on",
            "--datadir={0}".format(new_data),
            "--basedir={0}".format(os.path.abspath(mysql_basedir))
        ]
        if verbosity >= 1 and not quiet:
            proc = subprocess.Popen(cmd, shell=False, stdin=subprocess.PIPE)
        else:
            proc = subprocess.Popen(cmd, shell=False, stdin=subprocess.PIPE,
                                    stdout=fnull, stderr=fnull)
    # Wait for subprocess to finish
    res = proc.wait()
    # Kill subprocess just in case it didn't finish - Ok if proc doesn't exist
    if int(res) != 0:
        if os.name == "posix":
            try:
                os.kill(proc.pid, subprocess.signal.SIGTERM)
            except OSError as error:
                if not error.strerror.startswith("No such process"):
                    raise UtilError("Failed to kill process with pid '{0}'"
                                    "".format(proc.pid))
        else:
            ret_code = subprocess.call("taskkill /F /T /PID "
                                       "{0}".format(proc.pid), shell=True)

            # return code 0 means it was successful and 128 means it tried
            # to kill a process that doesn't exist
            if ret_code not in (0, 128):
                raise UtilError("Failed to kill process with pid '{0}'. "
                                "Return code {1}".format(proc.pid,
                                                         ret_code))

    # Drop the bootstrap file
    if os.path.isfile("bootstrap.sql"):
        os.unlink("bootstrap.sql")

    # Start the instance
    if not quiet:
        print "# Starting new instance of the server..."

    # If the user is not the same as the user running the script...
    # and this is a Posix system... and we are running as root
    if user_change_as_root(options):
        subprocess.call(['chown', '-R', user, new_data])
        subprocess.call(['chgrp', '-R', user, new_data])

    socket_path = os.path.join(new_data, 'mysql.sock')
    # If socket path is too long, use mkdtemp to create a tmp dir and
    # use it instead to store the socket
    if os.name == 'posix' and len(socket_path) > MAX_SOCKET_PATH_SIZE:
        socket_path = os.path.join(tempfile.mkdtemp(), 'mysql.sock')
        if not quiet:
            print("# WARNING: The socket file path '{0}' is too long (>{1}), "
                  "using '{2}' instead".format(
                      os.path.join(new_data, 'mysql.sock'),
                      MAX_SOCKET_PATH_SIZE, socket_path))

    cmd = {
        'datadir': '--datadir={0}'.format(new_data),
        'tmpdir': '--tmpdir={0}'.format(new_data),
        'pid-file': '--pid-file={0}'.format(
            os.path.join(new_data, "clone.pid")),
        'port': '--port={0}'.format(new_port),
        'server': '--server-id={0}'.format(options.get('new_id', 2)),
        'basedir': '--basedir={0}'.format(mysql_basedir),
        'socket': '--socket={0}'.format(socket_path),
    }
    if user:
        cmd.update({'user': '--user={0}'.format(user)})
    if mysqld_options:
        if isinstance(mysqld_options, (list, tuple)):
            cmd.update(dict(zip(mysqld_options, mysqld_options)))
        else:
            new_opts = mysqld_options.strip(" ")
            # Drop the --mysqld=
            if new_opts.startswith("--mysqld="):
                new_opts = new_opts[9:]
            if new_opts.startswith('"') and new_opts.endswith('"'):
                list_ = shlex.split(new_opts.strip('"'))
                cmd.update(dict(zip(list_, list_)))
            elif new_opts.startswith("'") and new_opts.endswith("'"):
                list_ = shlex.split(new_opts.strip("'"))
                cmd.update(dict(zip(list_, list_)))
            # Special case where there is only 1 option
            elif len(new_opts.split("--")) == 1:
                cmd.update({mysqld_options: mysqld_options})
            else:
                list_ = shlex.split(new_opts)
                cmd.update(dict(zip(list_, list_)))

    # set of options that must be surrounded with quotes
    options_to_quote = set(["datadir", "tmpdir", "basedir", "socket",
                            "pid-file"])

    # Strip spaces from each option
    for key in cmd:
        cmd[key] = cmd[key].strip(' ')

    # Write startup command if specified
    if cmd_file is not None:
        if verbosity >= 0 and not quiet:
            print "# Writing startup command to file."
        cfile = open(cmd_file, 'w')
        comment = " Startup command generated by mysqlserverclone.\n"
        if os.name == 'posix' and cmd_file.endswith('.sh'):
            cfile.write("#!/bin/sh\n")
            cfile.write("#{0}".format(comment))
        elif os.name == 'nt' and cmd_file.endswith('.bat'):
            cfile.write("REM{0}".format(comment))
        else:
            cfile.write("#{0}".format(comment))

        start_cmd_lst = ["{0}{1}{0} --no-defaults".format(quote_char,
                                                          mysqld_path)]

        # build start command
        for key, val in cmd.iteritems():
            if key in options_to_quote:
                val = "{0}{1}{0}".format(quote_char, val)
            start_cmd_lst.append(val)
        cfile.write("{0}\n".format(" ".join(start_cmd_lst)))
        cfile.close()

    if os.name == "nt" and verbosity >= 1:
        cmd.update({"console": "--console"})

    start_cmd_lst = [mysqld_path, "--no-defaults"]
    sorted_keys = sorted(cmd.keys())
    start_cmd_lst.extend([cmd[val] for val in sorted_keys])
    if verbosity >= 1 and not quiet:
        if verbosity >= 2:
            print("# Startup command for new server:\n"
                  "{0}".format(" ".join(start_cmd_lst)))
        proc = subprocess.Popen(start_cmd_lst, shell=False)
    else:
        proc = subprocess.Popen(start_cmd_lst, shell=False, stdout=fnull,
                                stderr=fnull)

    # Try to connect to the new MySQL instance
    if not quiet:
        print "# Testing connection to new instance..."
    new_sock = None

    if os.name == "posix":
        new_sock = socket_path
    port_int = int(new_port)

    conn = {
        "user": "root",
        "passwd": "",
        "host": conn_val["host"] if conn_val is not None else "localhost",
        "port": port_int,
        "unix_socket": new_sock
    }

    server2_options = {
        'conn_info': conn,
        'role': "clone",
    }
    server2 = Server(server2_options)

    i = 0
    while i < start_timeout:
        i += 1
        time.sleep(1)
        try:
            server2.connect()
            i = start_timeout + 1
        except:
            pass
        finally:
            if verbosity >= 1 and not quiet:
                print "# trying again..."

    if i == start_timeout:
        raise UtilError("Unable to communicate with new instance. "
                        "Process id = {0}.".format(proc.pid))
    elif not quiet:
        print "# Success!"

    # Set the root password
    if root_pass:
        if not quiet:
            print "# Setting the root password..."
        cmd = [mysqladmin_path, '--no-defaults', '-v', '-uroot']
        if os.name == "posix":
            cmd.append("--socket={0}".format(new_sock))
        else:
            cmd.append("--port={0}".format(int(new_port)))
        cmd.extend(["password", root_pass])
        if verbosity > 0 and not quiet:
            proc = subprocess.Popen(cmd, shell=False)
        else:
            proc = subprocess.Popen(cmd, shell=False,
                                    stdout=fnull, stderr=fnull)

        # Wait for subprocess to finish
        res = proc.wait()

    if not quiet:
        conn_str = "# Connection Information:\n"
        conn_str += "#  -uroot"
        if root_pass:
            conn_str += " -p%s" % root_pass
        if os.name == "posix":
            conn_str += " --socket=%s" % new_sock
        else:
            conn_str += " --port=%s" % new_port
        print conn_str
        print "#...done."

    fnull.close()

Example 41

View license
def main(segmentation_file=None, label_file=None, output_file_name=None, parameter = "binary_centerline", remove_temp_files = 1, verbose = 0 ):

#Process for a binary file as output:
    if parameter == "binary_centerline":

        # Binary_centerline: Process for only a segmentation file:
        if "-i" in arguments and "-l" not in arguments:
                    # Extract path, file and extension
            segmentation_file = os.path.abspath(segmentation_file)
            path_data, file_data, ext_data = sct.extract_fname(segmentation_file)

            # create temporary folder
            path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
            sct.run('mkdir '+path_tmp)

            # copy files into tmp folder
            sct.run('cp '+segmentation_file+' '+path_tmp)

            # go to tmp folder
            os.chdir(path_tmp)

            # Change orientation of the input segmentation into RPI
            print '\nOrient segmentation image to RPI orientation...'
            fname_segmentation_orient = 'tmp.segmentation_rpi' + ext_data
            set_orientation(file_data+ext_data, 'RPI', fname_segmentation_orient)

            # Extract orientation of the input segmentation
            orientation = get_orientation(file_data+ext_data)
            print '\nOrientation of segmentation image: ' + orientation

            # Get size of data
            print '\nGet dimensions data...'
            nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_segmentation_orient)
            print '.. '+str(nx)+' x '+str(ny)+' y '+str(nz)+' z '+str(nt)

            print '\nOpen segmentation volume...'
            file = nibabel.load(fname_segmentation_orient)
            data = file.get_data()
            hdr = file.get_header()

            # Extract min and max index in Z direction
            X, Y, Z = (data>0).nonzero()
            min_z_index, max_z_index = min(Z), max(Z)
            x_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            y_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            z_centerline = [iz for iz in range(min_z_index, max_z_index+1)]
            # Extract segmentation points and average per slice
            for iz in range(min_z_index, max_z_index+1):
                x_seg, y_seg = (data[:,:,iz]>0).nonzero()
                x_centerline[iz-min_z_index] = np.mean(x_seg)
                y_centerline[iz-min_z_index] = np.mean(y_seg)

            #ne sert a rien
            for k in range(len(X)):
                data[X[k],Y[k],Z[k]] = 0

            print len(x_centerline)
            # Fit the centerline points with splines and return the new fitted coordinates
                    #done with nurbs for now
            x_centerline_fit, y_centerline_fit,x_centerline_deriv,y_centerline_deriv,z_centerline_deriv = b_spline_centerline(x_centerline,y_centerline,z_centerline)
                        # Create an image with the centerline
            for iz in range(min_z_index, max_z_index+1):
                data[round(x_centerline_fit[iz-min_z_index]), round(y_centerline_fit[iz-min_z_index]), iz] = 1    #with nurbs fitting
                #data[round(x_centerline[iz-min_z_index]), round(y_centerline[iz-min_z_index]), iz] = 1             #without nurbs fitting


            # Write the centerline image in RPI orientation
            hdr.set_data_dtype('uint8') # set imagetype to uint8
            print '\nWrite NIFTI volumes...'
            img = nibabel.Nifti1Image(data, None, hdr)
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = file_data+'_centerline'+ext_data
            nibabel.save(img,'tmp.centerline.nii')
            sct.generate_output_file('tmp.centerline.nii',file_name)

            del data

            # come back to parent folder
            os.chdir('..')

            # Change orientation of the output centerline into input orientation
            print '\nOrient centerline image to input orientation: ' + orientation
            set_orientation(path_tmp+'/'+file_name, orientation, file_name)

           # Remove temporary files
            if remove_temp_files:
                print('\nRemove temporary files...')
                sct.run('rm -rf '+path_tmp)

            return file_name


        # Binary_centerline: Process for only a label file:
        if "-l" in arguments and "-i" not in arguments:
            file = os.path.abspath(label_file)
            path_data, file_data, ext_data = sct.extract_fname(file)

            file = nibabel.load(label_file)
            data = file.get_data()
            hdr = file.get_header()

            X,Y,Z = (data>0).nonzero()
            Z_new = np.linspace(min(Z),max(Z),(max(Z)-min(Z)+1))

            # sort X and Y arrays using Z
            X = [X[i] for i in Z[:].argsort()]
            Y = [Y[i] for i in Z[:].argsort()]
            Z = [Z[i] for i in Z[:].argsort()]

            #print X, Y, Z

            f1 = interpolate.UnivariateSpline(Z, X)
            f2 = interpolate.UnivariateSpline(Z, Y)

            X_fit = f1(Z_new)
            Y_fit = f2(Z_new)

            #print X_fit
            #print Y_fit

            if verbose==1 :
                import matplotlib.pyplot as plt

                plt.figure()
                plt.plot(Z_new,X_fit)
                plt.plot(Z,X,'o',linestyle = 'None')
                plt.show()

                plt.figure()
                plt.plot(Z_new,Y_fit)
                plt.plot(Z,Y,'o',linestyle = 'None')
                plt.show()

            data =data*0

            for i in xrange(len(X_fit)):
                data[X_fit[i],Y_fit[i],Z_new[i]] = 1


            # Create NIFTI image
            print '\nSave volume ...'
            hdr.set_data_dtype('float32') # set image type to uint8
            img = nibabel.Nifti1Image(data, None, hdr)
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = file_data+'_centerline'+ext_data
            # save volume
            nibabel.save(img,file_name)
            print '\nFile created : ' + file_name

            del data



        #### Binary_centerline: Process for a segmentation file and a label file:
        if "-l" and "-i" in arguments:

            ## Creation of a temporary file that will contain each centerline file of the process
            path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
            sct.run('mkdir '+path_tmp)

            ##From label file create centerline image
            print '\nPROCESS PART 1: From label file create centerline image.'
            file_label = os.path.abspath(label_file)
            path_data_label, file_data_label, ext_data_label = sct.extract_fname(file_label)

            file_label = nibabel.load(label_file)

            #Copy label_file into temporary folder
            sct.run('cp '+label_file+' '+path_tmp)

            data_label = file_label.get_data()
            hdr_label = file_label.get_header()

            if verbose == 1:
                from copy import copy
                data_label_to_show = copy(data_label)

            X,Y,Z = (data_label>0).nonzero()
            Z_new = np.linspace(min(Z),max(Z),(max(Z)-min(Z)+1))

            # sort X and Y arrays using Z
            X = [X[i] for i in Z[:].argsort()]
            Y = [Y[i] for i in Z[:].argsort()]
            Z = [Z[i] for i in Z[:].argsort()]

            #print X, Y, Z

            f1 = interpolate.UnivariateSpline(Z, X)
            f2 = interpolate.UnivariateSpline(Z, Y)

            X_fit = f1(Z_new)
            Y_fit = f2(Z_new)

            #print X_fit
            #print Y_fit

            if verbose==1 :
                import matplotlib.pyplot as plt

                plt.figure()
                plt.plot(Z_new,X_fit)
                plt.plot(Z,X,'o',linestyle = 'None')
                plt.show()

                plt.figure()
                plt.plot(Z_new,Y_fit)
                plt.plot(Z,Y,'o',linestyle = 'None')
                plt.show()

            data_label =data_label*0

            for i in xrange(len(X_fit)):
                data_label[X_fit[i],Y_fit[i],Z_new[i]] = 1

            # Create NIFTI image
            print '\nSave volume ...'
            hdr_label.set_data_dtype('float32') # set image type to uint8
            img = nibabel.Nifti1Image(data_label, None, hdr_label)
            # save volume
            file_name_label = file_data_label + '_centerline' + ext_data_label
            nibabel.save(img, file_name_label)
            print '\nFile created : ' + file_name_label

            # copy files into tmp folder
            sct.run('cp '+file_name_label+' '+path_tmp)
            #effacer fichier dans folder parent
            os.remove(file_name_label)
            del data_label


            ##From segmentation file create centerline image
            print '\nPROCESS PART 2: From segmentation file create centerline image.'
            # Extract path, file and extension
            segmentation_file = os.path.abspath(segmentation_file)
            path_data_seg, file_data_seg, ext_data_seg = sct.extract_fname(segmentation_file)

            # copy files into tmp folder
            sct.run('cp '+segmentation_file+' '+path_tmp)

            # go to tmp folder
            os.chdir(path_tmp)

            # Change orientation of the input segmentation into RPI
            print '\nOrient segmentation image to RPI orientation...'
            fname_segmentation_orient = 'tmp.segmentation_rpi' + ext_data_seg
            set_orientation(file_data_seg+ext_data_seg, 'RPI', fname_segmentation_orient)

            # Extract orientation of the input segmentation
            orientation = get_orientation(file_data_seg+ext_data_seg)
            print '\nOrientation of segmentation image: ' + orientation

            # Get size of data
            print '\nGet dimensions data...'
            nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_segmentation_orient)
            print '.. '+str(nx)+' x '+str(ny)+' y '+str(nz)+' z '+str(nt)

            print '\nOpen segmentation volume...'
            file_seg = nibabel.load(fname_segmentation_orient)
            data_seg = file_seg.get_data()
            hdr_seg = file_seg.get_header()

            if verbose == 1:
                data_seg_to_show = copy(data_seg)

            # Extract min and max index in Z direction
            X, Y, Z = (data_seg>0).nonzero()
            min_z_index, max_z_index = min(Z), max(Z)
            x_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            y_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            z_centerline = [iz for iz in range(min_z_index, max_z_index+1)]
            # Extract segmentation points and average per slice
            for iz in range(min_z_index, max_z_index+1):
                x_seg, y_seg = (data_seg[:,:,iz]>0).nonzero()
                x_centerline[iz-min_z_index] = np.mean(x_seg)
                y_centerline[iz-min_z_index] = np.mean(y_seg)
            for k in range(len(X)):
                data_seg[X[k],Y[k],Z[k]] = 0
            # Fit the centerline points with splines and return the new fitted coordinates
                    #done with nurbs for now
            x_centerline_fit, y_centerline_fit,x_centerline_deriv,y_centerline_deriv,z_centerline_deriv = b_spline_centerline(x_centerline,y_centerline,z_centerline)


            # Create an image with the centerline
            for iz in range(min_z_index, max_z_index+1):
                data_seg[round(x_centerline_fit[iz-min_z_index]), round(y_centerline_fit[iz-min_z_index]), iz] = 1
            # Write the centerline image in RPI orientation
            hdr_seg.set_data_dtype('uint8') # set imagetype to uint8
            print '\nWrite NIFTI volumes...'
            img = nibabel.Nifti1Image(data_seg, None, hdr_seg)
            nibabel.save(img,'tmp.centerline.nii')
            file_name_seg = file_data_seg+'_centerline'+ext_data_seg
            sct.generate_output_file('tmp.centerline.nii',file_name_seg)   #pb ici

            # copy files into parent folder
            #sct.run('cp '+file_name_seg+' ../')

            del data_seg

            # come back to parent folder
#            os.chdir('..')

            # Change orientation of the output centerline into input orientation
            print '\nOrient centerline image to input orientation: ' + orientation
            set_orientation(file_name_seg, orientation, file_name_seg)



            print '\nRemoving overlap of the centerline obtain with label file if there are any:'

            ## Remove overlap from centerline file obtain with label file
            remove_overlap(file_name_label, file_name_seg, "generated_centerline_without_overlap.nii.gz")


            ## Concatenation of the two centerline files
            print '\nConcatenation of the two centerline files:'
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = 'centerline_total_from_label_and_seg'

            sct.run('fslmaths generated_centerline_without_overlap.nii.gz -add ' + file_name_seg + ' ' + file_name)



            if verbose == 1 :
                import matplotlib.pyplot as plt
                from scipy import ndimage

                #Get back concatenation of segmentation and labels before any processing
                data_concatenate = data_seg_to_show + data_label_to_show
                z_centerline = [iz for iz in range(0, nz, 1) if data_concatenate[:, :, iz].any()]
                nz_nonz = len(z_centerline)
                x_centerline = [0 for iz in range(0, nz_nonz, 1)]
                y_centerline = [0 for iz in range(0, nz_nonz, 1)]


                # Calculate centerline coordinates and create image of the centerline
                for iz in range(0, nz_nonz, 1):
                    x_centerline[iz], y_centerline[iz] = ndimage.measurements.center_of_mass(data_concatenate[:, :, z_centerline[iz]])

                #Load file with resulting centerline
                file_centerline_fit = nibabel.load(file_name)
                data_centerline_fit = file_centerline_fit.get_data()

                z_centerline_fit = [iz for iz in range(0, nz, 1) if data_centerline_fit[:, :, iz].any()]
                nz_nonz_fit = len(z_centerline_fit)
                x_centerline_fit_total = [0 for iz in range(0, nz_nonz_fit, 1)]
                y_centerline_fit_total = [0 for iz in range(0, nz_nonz_fit, 1)]

                #Convert to array
                x_centerline_fit_total = np.asarray(x_centerline_fit_total)
                y_centerline_fit_total = np.asarray(y_centerline_fit_total)
                #Calculate overlap between seg and label
                length_overlap = X_fit.shape[0] + x_centerline_fit.shape[0] - x_centerline_fit_total.shape[0]
                # The total fitting is the concatenation of the two fitting (
                for i in range(x_centerline_fit.shape[0]):
                    x_centerline_fit_total[i] = x_centerline_fit[i]
                    y_centerline_fit_total[i] = y_centerline_fit[i]
                for i in range(X_fit.shape[0]-length_overlap):
                    x_centerline_fit_total[x_centerline_fit.shape[0] + i] = X_fit[i+length_overlap]
                    y_centerline_fit_total[x_centerline_fit.shape[0] + i] = Y_fit[i+length_overlap]
                    print x_centerline_fit.shape[0] + i

                #for iz in range(0, nz_nonz_fit, 1):
                #    x_centerline_fit[iz], y_centerline_fit[iz] = ndimage.measurements.center_of_mass(data_centerline_fit[:, :, z_centerline_fit[iz]])

                #Creation of a vector x that takes into account the distance between the labels
                #x_centerline_fit = np.asarray(x_centerline_fit)
                #y_centerline_fit = np.asarray(y_centerline_fit)
                x_display = [0 for i in range(x_centerline_fit_total.shape[0])]
                y_display = [0 for i in range(y_centerline_fit_total.shape[0])]


                for i in range(0, nz_nonz, 1):
                    x_display[z_centerline[i]-z_centerline[0]] = x_centerline[i]
                    y_display[z_centerline[i]-z_centerline[0]] = y_centerline[i]

                plt.figure(1)
                plt.subplot(2,1,1)
                plt.plot(z_centerline_fit, x_display, 'ro')
                plt.plot(z_centerline_fit, x_centerline_fit_total)
                plt.xlabel("Z")
                plt.ylabel("X")
                plt.title("x and x_fit coordinates")

                plt.subplot(2,1,2)
                plt.plot(z_centerline_fit, y_display, 'ro')
                plt.plot(z_centerline_fit, y_centerline_fit_total)
                plt.xlabel("Z")
                plt.ylabel("Y")
                plt.title("y and y_fit coordinates")
                plt.show()

                del data_concatenate, data_label_to_show, data_seg_to_show, data_centerline_fit

            sct.run('cp '+file_name+' ../')

            # Copy result into parent folder
            sct.run('cp '+file_name+' ../')

            # Come back to parent folder
            os.chdir('..')

            # Remove temporary centerline files
            if remove_temp_files:
                print('\nRemove temporary files...')
                sct.run('rm -rf '+path_tmp)


  #Process for a text file as output:
    if parameter == "text_file" :
        print "\nText file process"
        #Process for only a segmentation file:
        if "-i" in arguments and "-l" not in arguments:

                    # Extract path, file and extension
            segmentation_file = os.path.abspath(segmentation_file)
            path_data, file_data, ext_data = sct.extract_fname(segmentation_file)


            # create temporary folder
            path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
            sct.run('mkdir '+path_tmp)

            # copy files into tmp folder
            sct.run('cp '+segmentation_file+' '+path_tmp)

            # go to tmp folder
            os.chdir(path_tmp)

            # Change orientation of the input segmentation into RPI
            print '\nOrient segmentation image to RPI orientation...'
            fname_segmentation_orient = 'tmp.segmentation_rpi' + ext_data
            set_orientation(file_data+ext_data, 'RPI', fname_segmentation_orient)

            # Extract orientation of the input segmentation
            orientation = get_orientation(file_data+ext_data)
            print '\nOrientation of segmentation image: ' + orientation

            # Get size of data
            print '\nGet dimensions data...'
            nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_segmentation_orient)
            print '.. '+str(nx)+' x '+str(ny)+' y '+str(nz)+' z '+str(nt)

            print '\nOpen segmentation volume...'
            file = nibabel.load(fname_segmentation_orient)
            data = file.get_data()
            hdr = file.get_header()

            # Extract min and max index in Z direction
            X, Y, Z = (data>0).nonzero()
            min_z_index, max_z_index = min(Z), max(Z)
            x_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            y_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            z_centerline = [iz for iz in range(min_z_index, max_z_index+1)]
            # Extract segmentation points and average per slice
            for iz in range(min_z_index, max_z_index+1):
                x_seg, y_seg = (data[:,:,iz]>0).nonzero()
                x_centerline[iz-min_z_index] = np.mean(x_seg)
                y_centerline[iz-min_z_index] = np.mean(y_seg)
            for k in range(len(X)):
                data[X[k],Y[k],Z[k]] = 0
            # Fit the centerline points with splines and return the new fitted coordinates
            x_centerline_fit, y_centerline_fit,x_centerline_deriv,y_centerline_deriv,z_centerline_deriv = b_spline_centerline(x_centerline,y_centerline,z_centerline)

            # Create output text file
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = file_data+'_centerline'+'.txt'

            sct.printv('\nWrite text file...', verbose)
            #file_results = open("../"+file_name, 'w')
            file_results = open(file_name, 'w')
            for i in range(min_z_index, max_z_index+1):
                file_results.write(str(int(i)) + ' ' + str(x_centerline_fit[i-min_z_index]) + ' ' + str(y_centerline_fit[i-min_z_index]) + '\n')
            file_results.close()

            # Copy result into parent folder
            sct.run('cp '+file_name+' ../')

            del data

            # come back to parent folder
            os.chdir('..')


           # Remove temporary files
            if remove_temp_files:
                print('\nRemove temporary files...')
                sct.run('rm -rf '+path_tmp)

            return file_name


        #Process for only a label file:
        if "-l" in arguments and "-i" not in arguments:
            file = os.path.abspath(label_file)
            path_data, file_data, ext_data = sct.extract_fname(file)

            file = nibabel.load(label_file)
            data = file.get_data()
            hdr = file.get_header()

            X,Y,Z = (data>0).nonzero()
            Z_new = np.linspace(min(Z),max(Z),(max(Z)-min(Z)+1))

            # sort X and Y arrays using Z
            X = [X[i] for i in Z[:].argsort()]
            Y = [Y[i] for i in Z[:].argsort()]
            Z = [Z[i] for i in Z[:].argsort()]

            #print X, Y, Z

            f1 = interpolate.UnivariateSpline(Z, X)
            f2 = interpolate.UnivariateSpline(Z, Y)

            X_fit = f1(Z_new)
            Y_fit = f2(Z_new)

            #print X_fit
            #print Y_fit

            if verbose==1 :
                import matplotlib.pyplot as plt

                plt.figure()
                plt.plot(Z_new,X_fit)
                plt.plot(Z,X,'o',linestyle = 'None')
                plt.show()

                plt.figure()
                plt.plot(Z_new,Y_fit)
                plt.plot(Z,Y,'o',linestyle = 'None')
                plt.show()

            data =data*0

            for iz in xrange(len(X_fit)):
                data[X_fit[iz],Y_fit[iz],Z_new[iz]] = 1

            # Create output text file
            sct.printv('\nWrite text file...', verbose)
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = file_data+'_centerline'+ext_data
            file_results = open(file_name, 'w')
            min_z_index, max_z_index = min(Z), max(Z)
            for i in range(min_z_index, max_z_index+1):
                file_results.write(str(int(i)) + ' ' + str(X_fit[i-min_z_index]) + ' ' + str(Y_fit[i-min_z_index]) + '\n')
            file_results.close()

            del data

        #Process for a segmentation file and a label file:
        if "-l" and "-i" in arguments:

            ## Creation of a temporary file that will contain each centerline file of the process
            path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
            sct.run('mkdir '+path_tmp)

            ##From label file create centerline text file
            print '\nPROCESS PART 1: From label file create centerline text file.'
            file_label = os.path.abspath(label_file)
            path_data_label, file_data_label, ext_data_label = sct.extract_fname(file_label)

            file_label = nibabel.load(label_file)

            #Copy label_file into temporary folder
            sct.run('cp '+label_file+' '+path_tmp)

            data_label = file_label.get_data()
            hdr_label = file_label.get_header()

            X,Y,Z = (data_label>0).nonzero()
            Z_new = np.linspace(min(Z),max(Z),(max(Z)-min(Z)+1))

            # sort X and Y arrays using Z
            X = [X[i] for i in Z[:].argsort()]
            Y = [Y[i] for i in Z[:].argsort()]
            Z = [Z[i] for i in Z[:].argsort()]

            #print X, Y, Z

            f1 = interpolate.UnivariateSpline(Z, X)
            f2 = interpolate.UnivariateSpline(Z, Y)

            X_fit = f1(Z_new)
            Y_fit = f2(Z_new)

            #print X_fit
            #print Y_fit

            if verbose==1 :
                import matplotlib.pyplot as plt

                plt.figure()
                plt.plot(Z_new,X_fit)
                plt.plot(Z,X,'o',linestyle = 'None')
                plt.show()

                plt.figure()
                plt.plot(Z_new,Y_fit)
                plt.plot(Z,Y,'o',linestyle = 'None')
                plt.show()

            data_label =data_label*0

            for i in xrange(len(X_fit)):
                data_label[X_fit[i],Y_fit[i],Z_new[i]] = 1

            # Create output text file
            sct.printv('\nWrite text file...', verbose)
            file_name_label = file_data_label+'_centerline'+'.txt'
            file_results = open(path_tmp + '/' + file_name_label, 'w')
            min_z_index, max_z_index = min(Z), max(Z)
            for i in range(min_z_index, max_z_index+1):
                file_results.write(str(int(i)) + ' ' + str(X_fit[i-min_z_index]) + ' ' + str(Y_fit[i-min_z_index]) + '\n')
            file_results.close()

            # copy files into tmp folder
            #sct.run('cp '+file_name_label+' '+path_tmp)

            del data_label


            ##From segmentation file create centerline text file
            print '\nPROCESS PART 2: From segmentation file create centerline image.'
            # Extract path, file and extension
            segmentation_file = os.path.abspath(segmentation_file)
            path_data_seg, file_data_seg, ext_data_seg = sct.extract_fname(segmentation_file)

            # copy files into tmp folder
            sct.run('cp '+segmentation_file+' '+path_tmp)

            # go to tmp folder
            os.chdir(path_tmp)

            # Change orientation of the input segmentation into RPI
            print '\nOrient segmentation image to RPI orientation...'
            fname_segmentation_orient = 'tmp.segmentation_rpi' + ext_data_seg
            set_orientation(file_data_seg+ext_data_seg, 'RPI', fname_segmentation_orient)

            # Extract orientation of the input segmentation
            orientation = get_orientation(file_data_seg+ext_data_seg)
            print '\nOrientation of segmentation image: ' + orientation

            # Get size of data
            print '\nGet dimensions data...'
            nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_segmentation_orient)
            print '.. '+str(nx)+' x '+str(ny)+' y '+str(nz)+' z '+str(nt)

            print '\nOpen segmentation volume...'
            file_seg = nibabel.load(fname_segmentation_orient)
            data_seg = file_seg.get_data()
            hdr_seg = file_seg.get_header()

            # Extract min and max index in Z direction
            X, Y, Z = (data_seg>0).nonzero()
            min_z_index, max_z_index = min(Z), max(Z)
            x_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            y_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            z_centerline = [iz for iz in range(min_z_index, max_z_index+1)]
            # Extract segmentation points and average per slice
            for iz in range(min_z_index, max_z_index+1):
                x_seg, y_seg = (data_seg[:,:,iz]>0).nonzero()
                x_centerline[iz-min_z_index] = np.mean(x_seg)
                y_centerline[iz-min_z_index] = np.mean(y_seg)
            for k in range(len(X)):
                data_seg[X[k],Y[k],Z[k]] = 0
            # Fit the centerline points with splines and return the new fitted coordinates
                    #done with nurbs for now
            x_centerline_fit, y_centerline_fit,x_centerline_deriv,y_centerline_deriv,z_centerline_deriv = b_spline_centerline(x_centerline,y_centerline,z_centerline)


             # Create output text file
            file_name_seg = file_data_seg+'_centerline'+'.txt'
            sct.printv('\nWrite text file...', verbose)
            file_results = open(file_name_seg, 'w')
            for i in range(min_z_index, max_z_index+1):
                file_results.write(str(int(i)) + ' ' + str(x_centerline_fit[i-min_z_index]) + ' ' + str(y_centerline_fit[i-min_z_index]) + '\n')
            file_results.close()

            del data_seg


            print '\nRemoving overlap of the centerline obtain with label file if there are any:'

            ## Remove overlap from centerline file obtain with label file
            remove_overlap(file_name_label, file_name_seg, "generated_centerline_without_overlap1.txt", parameter=1)

            ## Concatenation of the two centerline files
            print '\nConcatenation of the two centerline files:'
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = 'centerline_total_from_label_and_seg.txt'

            f_output = open(file_name, "w")
            f_output.close()
            with open(file_name_seg, "r") as f_seg:
                with open("generated_centerline_without_overlap1.txt", "r") as f:
                    with open(file_name, "w") as f_output:
                        data_line_seg = f_seg.readlines()
                        data_line = f.readlines()
                        for line in data_line_seg :
                            f_output.write(line)
                        for line in data_line :
                            f_output.write(line)

            # Copy result into parent folder
            sct.run('cp '+file_name+' ../')

            # Come back to parent folder
            os.chdir('..')

            # Remove temporary centerline files
            if remove_temp_files:
                print('\nRemove temporary files...')
                sct.run('rm -rf '+path_tmp)

Example 42

View license
def main(segmentation_file=None, label_file=None, output_file_name=None, parameter = "binary_centerline", remove_temp_files = 1, verbose = 0 ):

#Process for a binary file as output:
    if parameter == "binary_centerline":

        # Binary_centerline: Process for only a segmentation file:
        if "-i" in arguments and "-l" not in arguments:
                    # Extract path, file and extension
            segmentation_file = os.path.abspath(segmentation_file)
            path_data, file_data, ext_data = sct.extract_fname(segmentation_file)

            # create temporary folder
            path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
            sct.run('mkdir '+path_tmp)

            # copy files into tmp folder
            sct.run('cp '+segmentation_file+' '+path_tmp)

            # go to tmp folder
            os.chdir(path_tmp)

            # Change orientation of the input segmentation into RPI
            print '\nOrient segmentation image to RPI orientation...'
            fname_segmentation_orient = 'tmp.segmentation_rpi' + ext_data
            set_orientation(file_data+ext_data, 'RPI', fname_segmentation_orient)

            # Extract orientation of the input segmentation
            orientation = get_orientation(file_data+ext_data)
            print '\nOrientation of segmentation image: ' + orientation

            # Get size of data
            print '\nGet dimensions data...'
            nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_segmentation_orient)
            print '.. '+str(nx)+' x '+str(ny)+' y '+str(nz)+' z '+str(nt)

            print '\nOpen segmentation volume...'
            file = nibabel.load(fname_segmentation_orient)
            data = file.get_data()
            hdr = file.get_header()

            # Extract min and max index in Z direction
            X, Y, Z = (data>0).nonzero()
            min_z_index, max_z_index = min(Z), max(Z)
            x_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            y_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            z_centerline = [iz for iz in range(min_z_index, max_z_index+1)]
            # Extract segmentation points and average per slice
            for iz in range(min_z_index, max_z_index+1):
                x_seg, y_seg = (data[:,:,iz]>0).nonzero()
                x_centerline[iz-min_z_index] = np.mean(x_seg)
                y_centerline[iz-min_z_index] = np.mean(y_seg)

            #ne sert a rien
            for k in range(len(X)):
                data[X[k],Y[k],Z[k]] = 0

            print len(x_centerline)
            # Fit the centerline points with splines and return the new fitted coordinates
                    #done with nurbs for now
            x_centerline_fit, y_centerline_fit,x_centerline_deriv,y_centerline_deriv,z_centerline_deriv = b_spline_centerline(x_centerline,y_centerline,z_centerline)
                        # Create an image with the centerline
            for iz in range(min_z_index, max_z_index+1):
                data[round(x_centerline_fit[iz-min_z_index]), round(y_centerline_fit[iz-min_z_index]), iz] = 1    #with nurbs fitting
                #data[round(x_centerline[iz-min_z_index]), round(y_centerline[iz-min_z_index]), iz] = 1             #without nurbs fitting


            # Write the centerline image in RPI orientation
            hdr.set_data_dtype('uint8') # set imagetype to uint8
            print '\nWrite NIFTI volumes...'
            img = nibabel.Nifti1Image(data, None, hdr)
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = file_data+'_centerline'+ext_data
            nibabel.save(img,'tmp.centerline.nii')
            sct.generate_output_file('tmp.centerline.nii',file_name)

            del data

            # come back to parent folder
            os.chdir('..')

            # Change orientation of the output centerline into input orientation
            print '\nOrient centerline image to input orientation: ' + orientation
            set_orientation(path_tmp+'/'+file_name, orientation, file_name)

           # Remove temporary files
            if remove_temp_files:
                print('\nRemove temporary files...')
                sct.run('rm -rf '+path_tmp)

            return file_name


        # Binary_centerline: Process for only a label file:
        if "-l" in arguments and "-i" not in arguments:
            file = os.path.abspath(label_file)
            path_data, file_data, ext_data = sct.extract_fname(file)

            file = nibabel.load(label_file)
            data = file.get_data()
            hdr = file.get_header()

            X,Y,Z = (data>0).nonzero()
            Z_new = np.linspace(min(Z),max(Z),(max(Z)-min(Z)+1))

            # sort X and Y arrays using Z
            X = [X[i] for i in Z[:].argsort()]
            Y = [Y[i] for i in Z[:].argsort()]
            Z = [Z[i] for i in Z[:].argsort()]

            #print X, Y, Z

            f1 = interpolate.UnivariateSpline(Z, X)
            f2 = interpolate.UnivariateSpline(Z, Y)

            X_fit = f1(Z_new)
            Y_fit = f2(Z_new)

            #print X_fit
            #print Y_fit

            if verbose==1 :
                import matplotlib.pyplot as plt

                plt.figure()
                plt.plot(Z_new,X_fit)
                plt.plot(Z,X,'o',linestyle = 'None')
                plt.show()

                plt.figure()
                plt.plot(Z_new,Y_fit)
                plt.plot(Z,Y,'o',linestyle = 'None')
                plt.show()

            data =data*0

            for i in xrange(len(X_fit)):
                data[X_fit[i],Y_fit[i],Z_new[i]] = 1


            # Create NIFTI image
            print '\nSave volume ...'
            hdr.set_data_dtype('float32') # set image type to uint8
            img = nibabel.Nifti1Image(data, None, hdr)
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = file_data+'_centerline'+ext_data
            # save volume
            nibabel.save(img,file_name)
            print '\nFile created : ' + file_name

            del data



        #### Binary_centerline: Process for a segmentation file and a label file:
        if "-l" and "-i" in arguments:

            ## Creation of a temporary file that will contain each centerline file of the process
            path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
            sct.run('mkdir '+path_tmp)

            ##From label file create centerline image
            print '\nPROCESS PART 1: From label file create centerline image.'
            file_label = os.path.abspath(label_file)
            path_data_label, file_data_label, ext_data_label = sct.extract_fname(file_label)

            file_label = nibabel.load(label_file)

            #Copy label_file into temporary folder
            sct.run('cp '+label_file+' '+path_tmp)

            data_label = file_label.get_data()
            hdr_label = file_label.get_header()

            if verbose == 1:
                from copy import copy
                data_label_to_show = copy(data_label)

            X,Y,Z = (data_label>0).nonzero()
            Z_new = np.linspace(min(Z),max(Z),(max(Z)-min(Z)+1))

            # sort X and Y arrays using Z
            X = [X[i] for i in Z[:].argsort()]
            Y = [Y[i] for i in Z[:].argsort()]
            Z = [Z[i] for i in Z[:].argsort()]

            #print X, Y, Z

            f1 = interpolate.UnivariateSpline(Z, X)
            f2 = interpolate.UnivariateSpline(Z, Y)

            X_fit = f1(Z_new)
            Y_fit = f2(Z_new)

            #print X_fit
            #print Y_fit

            if verbose==1 :
                import matplotlib.pyplot as plt

                plt.figure()
                plt.plot(Z_new,X_fit)
                plt.plot(Z,X,'o',linestyle = 'None')
                plt.show()

                plt.figure()
                plt.plot(Z_new,Y_fit)
                plt.plot(Z,Y,'o',linestyle = 'None')
                plt.show()

            data_label =data_label*0

            for i in xrange(len(X_fit)):
                data_label[X_fit[i],Y_fit[i],Z_new[i]] = 1

            # Create NIFTI image
            print '\nSave volume ...'
            hdr_label.set_data_dtype('float32') # set image type to uint8
            img = nibabel.Nifti1Image(data_label, None, hdr_label)
            # save volume
            file_name_label = file_data_label + '_centerline' + ext_data_label
            nibabel.save(img, file_name_label)
            print '\nFile created : ' + file_name_label

            # copy files into tmp folder
            sct.run('cp '+file_name_label+' '+path_tmp)
            #effacer fichier dans folder parent
            os.remove(file_name_label)
            del data_label


            ##From segmentation file create centerline image
            print '\nPROCESS PART 2: From segmentation file create centerline image.'
            # Extract path, file and extension
            segmentation_file = os.path.abspath(segmentation_file)
            path_data_seg, file_data_seg, ext_data_seg = sct.extract_fname(segmentation_file)

            # copy files into tmp folder
            sct.run('cp '+segmentation_file+' '+path_tmp)

            # go to tmp folder
            os.chdir(path_tmp)

            # Change orientation of the input segmentation into RPI
            print '\nOrient segmentation image to RPI orientation...'
            fname_segmentation_orient = 'tmp.segmentation_rpi' + ext_data_seg
            set_orientation(file_data_seg+ext_data_seg, 'RPI', fname_segmentation_orient)

            # Extract orientation of the input segmentation
            orientation = get_orientation(file_data_seg+ext_data_seg)
            print '\nOrientation of segmentation image: ' + orientation

            # Get size of data
            print '\nGet dimensions data...'
            nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_segmentation_orient)
            print '.. '+str(nx)+' x '+str(ny)+' y '+str(nz)+' z '+str(nt)

            print '\nOpen segmentation volume...'
            file_seg = nibabel.load(fname_segmentation_orient)
            data_seg = file_seg.get_data()
            hdr_seg = file_seg.get_header()

            if verbose == 1:
                data_seg_to_show = copy(data_seg)

            # Extract min and max index in Z direction
            X, Y, Z = (data_seg>0).nonzero()
            min_z_index, max_z_index = min(Z), max(Z)
            x_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            y_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            z_centerline = [iz for iz in range(min_z_index, max_z_index+1)]
            # Extract segmentation points and average per slice
            for iz in range(min_z_index, max_z_index+1):
                x_seg, y_seg = (data_seg[:,:,iz]>0).nonzero()
                x_centerline[iz-min_z_index] = np.mean(x_seg)
                y_centerline[iz-min_z_index] = np.mean(y_seg)
            for k in range(len(X)):
                data_seg[X[k],Y[k],Z[k]] = 0
            # Fit the centerline points with splines and return the new fitted coordinates
                    #done with nurbs for now
            x_centerline_fit, y_centerline_fit,x_centerline_deriv,y_centerline_deriv,z_centerline_deriv = b_spline_centerline(x_centerline,y_centerline,z_centerline)


            # Create an image with the centerline
            for iz in range(min_z_index, max_z_index+1):
                data_seg[round(x_centerline_fit[iz-min_z_index]), round(y_centerline_fit[iz-min_z_index]), iz] = 1
            # Write the centerline image in RPI orientation
            hdr_seg.set_data_dtype('uint8') # set imagetype to uint8
            print '\nWrite NIFTI volumes...'
            img = nibabel.Nifti1Image(data_seg, None, hdr_seg)
            nibabel.save(img,'tmp.centerline.nii')
            file_name_seg = file_data_seg+'_centerline'+ext_data_seg
            sct.generate_output_file('tmp.centerline.nii',file_name_seg)   #pb ici

            # copy files into parent folder
            #sct.run('cp '+file_name_seg+' ../')

            del data_seg

            # come back to parent folder
#            os.chdir('..')

            # Change orientation of the output centerline into input orientation
            print '\nOrient centerline image to input orientation: ' + orientation
            set_orientation(file_name_seg, orientation, file_name_seg)



            print '\nRemoving overlap of the centerline obtain with label file if there are any:'

            ## Remove overlap from centerline file obtain with label file
            remove_overlap(file_name_label, file_name_seg, "generated_centerline_without_overlap.nii.gz")


            ## Concatenation of the two centerline files
            print '\nConcatenation of the two centerline files:'
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = 'centerline_total_from_label_and_seg'

            sct.run('fslmaths generated_centerline_without_overlap.nii.gz -add ' + file_name_seg + ' ' + file_name)



            if verbose == 1 :
                import matplotlib.pyplot as plt
                from scipy import ndimage

                #Get back concatenation of segmentation and labels before any processing
                data_concatenate = data_seg_to_show + data_label_to_show
                z_centerline = [iz for iz in range(0, nz, 1) if data_concatenate[:, :, iz].any()]
                nz_nonz = len(z_centerline)
                x_centerline = [0 for iz in range(0, nz_nonz, 1)]
                y_centerline = [0 for iz in range(0, nz_nonz, 1)]


                # Calculate centerline coordinates and create image of the centerline
                for iz in range(0, nz_nonz, 1):
                    x_centerline[iz], y_centerline[iz] = ndimage.measurements.center_of_mass(data_concatenate[:, :, z_centerline[iz]])

                #Load file with resulting centerline
                file_centerline_fit = nibabel.load(file_name)
                data_centerline_fit = file_centerline_fit.get_data()

                z_centerline_fit = [iz for iz in range(0, nz, 1) if data_centerline_fit[:, :, iz].any()]
                nz_nonz_fit = len(z_centerline_fit)
                x_centerline_fit_total = [0 for iz in range(0, nz_nonz_fit, 1)]
                y_centerline_fit_total = [0 for iz in range(0, nz_nonz_fit, 1)]

                #Convert to array
                x_centerline_fit_total = np.asarray(x_centerline_fit_total)
                y_centerline_fit_total = np.asarray(y_centerline_fit_total)
                #Calculate overlap between seg and label
                length_overlap = X_fit.shape[0] + x_centerline_fit.shape[0] - x_centerline_fit_total.shape[0]
                # The total fitting is the concatenation of the two fitting (
                for i in range(x_centerline_fit.shape[0]):
                    x_centerline_fit_total[i] = x_centerline_fit[i]
                    y_centerline_fit_total[i] = y_centerline_fit[i]
                for i in range(X_fit.shape[0]-length_overlap):
                    x_centerline_fit_total[x_centerline_fit.shape[0] + i] = X_fit[i+length_overlap]
                    y_centerline_fit_total[x_centerline_fit.shape[0] + i] = Y_fit[i+length_overlap]
                    print x_centerline_fit.shape[0] + i

                #for iz in range(0, nz_nonz_fit, 1):
                #    x_centerline_fit[iz], y_centerline_fit[iz] = ndimage.measurements.center_of_mass(data_centerline_fit[:, :, z_centerline_fit[iz]])

                #Creation of a vector x that takes into account the distance between the labels
                #x_centerline_fit = np.asarray(x_centerline_fit)
                #y_centerline_fit = np.asarray(y_centerline_fit)
                x_display = [0 for i in range(x_centerline_fit_total.shape[0])]
                y_display = [0 for i in range(y_centerline_fit_total.shape[0])]


                for i in range(0, nz_nonz, 1):
                    x_display[z_centerline[i]-z_centerline[0]] = x_centerline[i]
                    y_display[z_centerline[i]-z_centerline[0]] = y_centerline[i]

                plt.figure(1)
                plt.subplot(2,1,1)
                plt.plot(z_centerline_fit, x_display, 'ro')
                plt.plot(z_centerline_fit, x_centerline_fit_total)
                plt.xlabel("Z")
                plt.ylabel("X")
                plt.title("x and x_fit coordinates")

                plt.subplot(2,1,2)
                plt.plot(z_centerline_fit, y_display, 'ro')
                plt.plot(z_centerline_fit, y_centerline_fit_total)
                plt.xlabel("Z")
                plt.ylabel("Y")
                plt.title("y and y_fit coordinates")
                plt.show()

                del data_concatenate, data_label_to_show, data_seg_to_show, data_centerline_fit

            sct.run('cp '+file_name+' ../')

            # Copy result into parent folder
            sct.run('cp '+file_name+' ../')

            # Come back to parent folder
            os.chdir('..')

            # Remove temporary centerline files
            if remove_temp_files:
                print('\nRemove temporary files...')
                sct.run('rm -rf '+path_tmp)


  #Process for a text file as output:
    if parameter == "text_file" :
        print "\nText file process"
        #Process for only a segmentation file:
        if "-i" in arguments and "-l" not in arguments:

                    # Extract path, file and extension
            segmentation_file = os.path.abspath(segmentation_file)
            path_data, file_data, ext_data = sct.extract_fname(segmentation_file)


            # create temporary folder
            path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
            sct.run('mkdir '+path_tmp)

            # copy files into tmp folder
            sct.run('cp '+segmentation_file+' '+path_tmp)

            # go to tmp folder
            os.chdir(path_tmp)

            # Change orientation of the input segmentation into RPI
            print '\nOrient segmentation image to RPI orientation...'
            fname_segmentation_orient = 'tmp.segmentation_rpi' + ext_data
            set_orientation(file_data+ext_data, 'RPI', fname_segmentation_orient)

            # Extract orientation of the input segmentation
            orientation = get_orientation(file_data+ext_data)
            print '\nOrientation of segmentation image: ' + orientation

            # Get size of data
            print '\nGet dimensions data...'
            nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_segmentation_orient)
            print '.. '+str(nx)+' x '+str(ny)+' y '+str(nz)+' z '+str(nt)

            print '\nOpen segmentation volume...'
            file = nibabel.load(fname_segmentation_orient)
            data = file.get_data()
            hdr = file.get_header()

            # Extract min and max index in Z direction
            X, Y, Z = (data>0).nonzero()
            min_z_index, max_z_index = min(Z), max(Z)
            x_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            y_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            z_centerline = [iz for iz in range(min_z_index, max_z_index+1)]
            # Extract segmentation points and average per slice
            for iz in range(min_z_index, max_z_index+1):
                x_seg, y_seg = (data[:,:,iz]>0).nonzero()
                x_centerline[iz-min_z_index] = np.mean(x_seg)
                y_centerline[iz-min_z_index] = np.mean(y_seg)
            for k in range(len(X)):
                data[X[k],Y[k],Z[k]] = 0
            # Fit the centerline points with splines and return the new fitted coordinates
            x_centerline_fit, y_centerline_fit,x_centerline_deriv,y_centerline_deriv,z_centerline_deriv = b_spline_centerline(x_centerline,y_centerline,z_centerline)

            # Create output text file
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = file_data+'_centerline'+'.txt'

            sct.printv('\nWrite text file...', verbose)
            #file_results = open("../"+file_name, 'w')
            file_results = open(file_name, 'w')
            for i in range(min_z_index, max_z_index+1):
                file_results.write(str(int(i)) + ' ' + str(x_centerline_fit[i-min_z_index]) + ' ' + str(y_centerline_fit[i-min_z_index]) + '\n')
            file_results.close()

            # Copy result into parent folder
            sct.run('cp '+file_name+' ../')

            del data

            # come back to parent folder
            os.chdir('..')


           # Remove temporary files
            if remove_temp_files:
                print('\nRemove temporary files...')
                sct.run('rm -rf '+path_tmp)

            return file_name


        #Process for only a label file:
        if "-l" in arguments and "-i" not in arguments:
            file = os.path.abspath(label_file)
            path_data, file_data, ext_data = sct.extract_fname(file)

            file = nibabel.load(label_file)
            data = file.get_data()
            hdr = file.get_header()

            X,Y,Z = (data>0).nonzero()
            Z_new = np.linspace(min(Z),max(Z),(max(Z)-min(Z)+1))

            # sort X and Y arrays using Z
            X = [X[i] for i in Z[:].argsort()]
            Y = [Y[i] for i in Z[:].argsort()]
            Z = [Z[i] for i in Z[:].argsort()]

            #print X, Y, Z

            f1 = interpolate.UnivariateSpline(Z, X)
            f2 = interpolate.UnivariateSpline(Z, Y)

            X_fit = f1(Z_new)
            Y_fit = f2(Z_new)

            #print X_fit
            #print Y_fit

            if verbose==1 :
                import matplotlib.pyplot as plt

                plt.figure()
                plt.plot(Z_new,X_fit)
                plt.plot(Z,X,'o',linestyle = 'None')
                plt.show()

                plt.figure()
                plt.plot(Z_new,Y_fit)
                plt.plot(Z,Y,'o',linestyle = 'None')
                plt.show()

            data =data*0

            for iz in xrange(len(X_fit)):
                data[X_fit[iz],Y_fit[iz],Z_new[iz]] = 1

            # Create output text file
            sct.printv('\nWrite text file...', verbose)
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = file_data+'_centerline'+ext_data
            file_results = open(file_name, 'w')
            min_z_index, max_z_index = min(Z), max(Z)
            for i in range(min_z_index, max_z_index+1):
                file_results.write(str(int(i)) + ' ' + str(X_fit[i-min_z_index]) + ' ' + str(Y_fit[i-min_z_index]) + '\n')
            file_results.close()

            del data

        #Process for a segmentation file and a label file:
        if "-l" and "-i" in arguments:

            ## Creation of a temporary file that will contain each centerline file of the process
            path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
            sct.run('mkdir '+path_tmp)

            ##From label file create centerline text file
            print '\nPROCESS PART 1: From label file create centerline text file.'
            file_label = os.path.abspath(label_file)
            path_data_label, file_data_label, ext_data_label = sct.extract_fname(file_label)

            file_label = nibabel.load(label_file)

            #Copy label_file into temporary folder
            sct.run('cp '+label_file+' '+path_tmp)

            data_label = file_label.get_data()
            hdr_label = file_label.get_header()

            X,Y,Z = (data_label>0).nonzero()
            Z_new = np.linspace(min(Z),max(Z),(max(Z)-min(Z)+1))

            # sort X and Y arrays using Z
            X = [X[i] for i in Z[:].argsort()]
            Y = [Y[i] for i in Z[:].argsort()]
            Z = [Z[i] for i in Z[:].argsort()]

            #print X, Y, Z

            f1 = interpolate.UnivariateSpline(Z, X)
            f2 = interpolate.UnivariateSpline(Z, Y)

            X_fit = f1(Z_new)
            Y_fit = f2(Z_new)

            #print X_fit
            #print Y_fit

            if verbose==1 :
                import matplotlib.pyplot as plt

                plt.figure()
                plt.plot(Z_new,X_fit)
                plt.plot(Z,X,'o',linestyle = 'None')
                plt.show()

                plt.figure()
                plt.plot(Z_new,Y_fit)
                plt.plot(Z,Y,'o',linestyle = 'None')
                plt.show()

            data_label =data_label*0

            for i in xrange(len(X_fit)):
                data_label[X_fit[i],Y_fit[i],Z_new[i]] = 1

            # Create output text file
            sct.printv('\nWrite text file...', verbose)
            file_name_label = file_data_label+'_centerline'+'.txt'
            file_results = open(path_tmp + '/' + file_name_label, 'w')
            min_z_index, max_z_index = min(Z), max(Z)
            for i in range(min_z_index, max_z_index+1):
                file_results.write(str(int(i)) + ' ' + str(X_fit[i-min_z_index]) + ' ' + str(Y_fit[i-min_z_index]) + '\n')
            file_results.close()

            # copy files into tmp folder
            #sct.run('cp '+file_name_label+' '+path_tmp)

            del data_label


            ##From segmentation file create centerline text file
            print '\nPROCESS PART 2: From segmentation file create centerline image.'
            # Extract path, file and extension
            segmentation_file = os.path.abspath(segmentation_file)
            path_data_seg, file_data_seg, ext_data_seg = sct.extract_fname(segmentation_file)

            # copy files into tmp folder
            sct.run('cp '+segmentation_file+' '+path_tmp)

            # go to tmp folder
            os.chdir(path_tmp)

            # Change orientation of the input segmentation into RPI
            print '\nOrient segmentation image to RPI orientation...'
            fname_segmentation_orient = 'tmp.segmentation_rpi' + ext_data_seg
            set_orientation(file_data_seg+ext_data_seg, 'RPI', fname_segmentation_orient)

            # Extract orientation of the input segmentation
            orientation = get_orientation(file_data_seg+ext_data_seg)
            print '\nOrientation of segmentation image: ' + orientation

            # Get size of data
            print '\nGet dimensions data...'
            nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_segmentation_orient)
            print '.. '+str(nx)+' x '+str(ny)+' y '+str(nz)+' z '+str(nt)

            print '\nOpen segmentation volume...'
            file_seg = nibabel.load(fname_segmentation_orient)
            data_seg = file_seg.get_data()
            hdr_seg = file_seg.get_header()

            # Extract min and max index in Z direction
            X, Y, Z = (data_seg>0).nonzero()
            min_z_index, max_z_index = min(Z), max(Z)
            x_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            y_centerline = [0 for i in range(0,max_z_index-min_z_index+1)]
            z_centerline = [iz for iz in range(min_z_index, max_z_index+1)]
            # Extract segmentation points and average per slice
            for iz in range(min_z_index, max_z_index+1):
                x_seg, y_seg = (data_seg[:,:,iz]>0).nonzero()
                x_centerline[iz-min_z_index] = np.mean(x_seg)
                y_centerline[iz-min_z_index] = np.mean(y_seg)
            for k in range(len(X)):
                data_seg[X[k],Y[k],Z[k]] = 0
            # Fit the centerline points with splines and return the new fitted coordinates
                    #done with nurbs for now
            x_centerline_fit, y_centerline_fit,x_centerline_deriv,y_centerline_deriv,z_centerline_deriv = b_spline_centerline(x_centerline,y_centerline,z_centerline)


             # Create output text file
            file_name_seg = file_data_seg+'_centerline'+'.txt'
            sct.printv('\nWrite text file...', verbose)
            file_results = open(file_name_seg, 'w')
            for i in range(min_z_index, max_z_index+1):
                file_results.write(str(int(i)) + ' ' + str(x_centerline_fit[i-min_z_index]) + ' ' + str(y_centerline_fit[i-min_z_index]) + '\n')
            file_results.close()

            del data_seg


            print '\nRemoving overlap of the centerline obtain with label file if there are any:'

            ## Remove overlap from centerline file obtain with label file
            remove_overlap(file_name_label, file_name_seg, "generated_centerline_without_overlap1.txt", parameter=1)

            ## Concatenation of the two centerline files
            print '\nConcatenation of the two centerline files:'
            if output_file_name != None :
                file_name = output_file_name
            else: file_name = 'centerline_total_from_label_and_seg.txt'

            f_output = open(file_name, "w")
            f_output.close()
            with open(file_name_seg, "r") as f_seg:
                with open("generated_centerline_without_overlap1.txt", "r") as f:
                    with open(file_name, "w") as f_output:
                        data_line_seg = f_seg.readlines()
                        data_line = f.readlines()
                        for line in data_line_seg :
                            f_output.write(line)
                        for line in data_line :
                            f_output.write(line)

            # Copy result into parent folder
            sct.run('cp '+file_name+' ../')

            # Come back to parent folder
            os.chdir('..')

            # Remove temporary centerline files
            if remove_temp_files:
                print('\nRemove temporary files...')
                sct.run('rm -rf '+path_tmp)

Example 43

Project: twistranet
Source File: twistranet_project.py
View license
def twistranet_project():
    """
    Copies the contents of the project_template directory to a new directory
    specified as an argument to the command line.
    """
    # That is just to check that we can import the TN package.
    # If something goes wrong here, just add a --pythonpath option.     
    parser = OptionParser(
        usage = "usage: %prog [options] [<template>] <project_path>\n"
            "  where project_name is the name of the directory that will be created for your site,\n"
            "  <path> is the (optional) path where you want to install it.\n"
    )
    parser.add_option("-n", "--no-bootstrap",
        action="store_false", dest="bootstrap", default=True,
        help="Don't bootstrap project immediately. Use this if you want to review your settings before bootstraping.",
    )
    parser.add_option("-d", "--develmode",
        action="store_true", dest="develmode", default=False,
        help="Use this option if you want to work on sources in your project. eg: static files",
    )
    (options, args) = parser.parse_args()

    # Check template and path args
    if len(args) < 1:
        parser.error("project_path must be specified.")
    elif len(args) == 1:
        project_path = args[0]
        project_template = 'default'
    elif len(args) == 2:
        project_path = args[1]
        project_template = args[0]
    
    # Check if we can import TN.
    import twistranet
    
    # We decompose the given path to split installdir and project_name
    project_path = os.path.abspath(project_path)
    if os.path.lexists(project_path):
        parser.error("Directory '%s' already exists. "
            "Please specify a non-existing directory." % (project_path, ))
    project_name = os.path.split(project_path)[1]
    
    # Check if project_name is correct
    try:
        eval("%s is True" % project_name)
    except SyntaxError:
        parser.error("Directory '%s' must be a valid python identifier." % project_name)
    except:
        pass
    # Ensure the given directory name doesn't clash with an existing Python
    # package/module.
    try:
        __import__(project_name)
    except ImportError:
        pass
    except ValueError:
        pass # It's ok to install in the same directory ;)
    else:
        parser.error("'%s' conflicts with the name of an existing "
            "Python module and cannot be used as a project name. "
            "Please try another name." % project_name)

    # Build the project up copying over the twistranet project_template
    twist_package = __import__('twistranet')
    twist_package_path = os.path.dirname(os.path.abspath(twist_package.__file__))
    template_dir = os.path.join(twist_package_path, "project_templates", "default")
    if not os.path.isdir(template_dir):
        parser.error("Template '%s' is invalid." % project_template)
    try:
        shutil.copytree(template_dir, project_path, ignore=shutil.ignore_patterns(*IGNORE_PATTERNS))
    except AttributeError:
        print "shutil.copytree is likely not to have the 'ignore' attribute available.\n"
        shutil.copytree(template_dir, project_path)
    
    replaceable_files = [(os.path.join(project_path, "local_settings.py")),]

    # project variables replaced in the project files
    replacement = {
        "SECRET_KEY.*$":        "SECRET_KEY = '%s'" % (uuid1(), ),
        "__INSTANCE_NAME__":    '%s' % project_name,
    }

    # If project_template <> default, we copy the project_template-specific files as well
    if project_template != "default":
        source_root = os.path.join(twist_package_path, "project_templates", project_template)
        if not os.path.isdir(source_root):
            source_root = os.path.abspath(os.path.join(os.path.curdir, project_template))
        if not os.path.isdir(source_root):
            raise ValueError("Invalid template directory: '%s'" % source_root)
        dest_root = project_path
        for root, dirs, files in os.walk(source_root):
            # Ugly wat to deal with dotted dirs
            if '/.' in root:
                continue
            relative_root = root[len(source_root) + 1:]
            
            for d in dirs:
                if d.startswith('.'):
                    continue
                dest_dir = os.path.join(dest_root, relative_root, d)
                if not os.path.isdir(dest_dir):
                    os.mkdir(dest_dir)
                    
            for fname in files:
                # Ignore doted files, and rename if it contains any replacement string
                if fname.startswith('.'):
                    continue
                # if a file is named with __INSTANCE_NAME__.ext rename it with project_name
                dname = fname
                for regex, repl in replacement.items():
                    dname = re.sub(regex, repl, dname)

                # Actually copy
                dest_file = os.path.join(dest_root, relative_root, dname)
                shutil.copy(
                    os.path.join(source_root, root, fname),
                    dest_file,
                )
                # in project files some files must be changed (as done for local_settings, see bellow)
                replaceable_files.append(dest_file)


    # replace some settings.
    for replaceable_path in replaceable_files:
        with open(replaceable_path, "r") as f:
            data = f.read()
        with open(replaceable_path, "w") as f:
            for regex, repl in replacement.items():
                data = re.sub(regex, repl, data)
            f.write(data)
            f.close()

    # we append project_path to sys.path, used for bootstrap and for devel mode configuration (-d).
    sys.path.insert(0, project_path)        # Here is how we're gonna find the 'settings' module from here.
    # XXX NOT VERY DJANGOISH TO USE JUST 'settings' HERE !
    os.environ["DJANGO_SETTINGS_MODULE"] = 'settings'
    os.environ["TWISTRANET_NOMAIL"] = "1"   # Disable emails
    import settings
    
    # update settings.TWISTRANET_STATIC_PATH in devel mode
    # to use theme from product itself
    if options.develmode:
        from django.utils.importlib import import_module
        theme_app = import_module(settings.TWISTRANET_THEME_APP)
        theme_app_dir = os.path.split(theme_app.__file__)[0]
        DEVEL_TWISTRANET_STATIC_PATH = os.path.abspath(os.path.join(theme_app_dir, 'static'))
        DEVEL_TWISTRANET_LOCALE_PATHS = "("
        for twapp in ('core', 'notifier', 'search', 'sharing', 'tagging', 'twistapp', ):
            DEVEL_TWISTRANET_LOCALE_PATHS += "r'%s', " %os.path.abspath(os.path.join(twist_package_path, twapp, 'locale'))
        DEVEL_TWISTRANET_LOCALE_PATHS += ")"
        settings_path = os.path.join(project_path, "settings.py")
        f = open(settings_path, "r")
        data = f.read()
        f.close()
        f = open(settings_path, "w")
        #  XXX TODO : change with a replace
        data += '''
# ADDED FOR DEVEL MODE ONLY

TWISTRANET_STATIC_PATH = r"%(static_path)s"
STATICFILES_DIRS = ( r"%(static_path)s",)
COMPRESS_ROOT = r"%(static_path)s"
LOCALE_PATHS = %(locale_paths)s

        ''' % { 'static_path': DEVEL_TWISTRANET_STATIC_PATH ,
                'locale_paths': DEVEL_TWISTRANET_LOCALE_PATHS,
              }


        f.write(data)
        f.close()
        # fix settings for the first server start
        settings.TWISTRANET_STATIC_PATH = DEVEL_TWISTRANET_STATIC_PATH
        settings.STATICFILES_DIRS = (DEVEL_TWISTRANET_STATIC_PATH ,)
        settings.COMPRESS_ROOT = DEVEL_TWISTRANET_STATIC_PATH

    # As we use a standard sqlite configuration, we can boostrap quite safely just now.
    # then we start the server
    if options.bootstrap:
        from django.core.management import call_command
        from django import conf
        # update static files,
        # excepted in devel mode
        if not options.develmode:
            call_command('twistranet_update')
        call_command('twistranet_bootstrap')
        
        # Now we can start the server!
        os.environ["TWISTRANET_NOMAIL"] = ""    # Re-enable emails
        call_command("runserver", "0.0.0.0:8000", use_reloader = False,  )

Example 44

Project: osc
Source File: build.py
View license
def main(apiurl, opts, argv):

    repo = argv[0]
    arch = argv[1]
    build_descr = argv[2]
    xp = []
    build_root = None
    cache_dir  = None
    build_uid = ''
    vm_type = config['build-type']
    vm_telnet = None

    build_descr = os.path.abspath(build_descr)
    build_type = os.path.splitext(build_descr)[1][1:]
    if os.path.basename(build_descr) == 'PKGBUILD':
        build_type = 'arch'
    if os.path.basename(build_descr) == 'build.collax':
        build_type = 'collax'
    if os.path.basename(build_descr) == 'snapcraft.yaml':
        build_type = 'snapcraft'
    if build_type not in ['spec', 'dsc', 'kiwi', 'arch', 'collax', 'livebuild', 'snapcraft']:
        raise oscerr.WrongArgs(
                'Unknown build type: \'%s\'. Build description should end in .spec, .dsc, .kiwi, .yaml or .livebuild.' \
                        % build_type)
    if not os.path.isfile(build_descr):
        raise oscerr.WrongArgs('Error: build description file named \'%s\' does not exist.' % build_descr)

    buildargs = []
    if not opts.userootforbuild:
        buildargs.append('--norootforbuild')
    if opts.clean:
        buildargs.append('--clean')
    if opts.noinit:
        buildargs.append('--noinit')
    if opts.nochecks:
        buildargs.append('--no-checks')
    if not opts.no_changelog:
        buildargs.append('--changelog')
    if opts.root:
        build_root = opts.root
    if opts.target:
        buildargs.append('--target=%s' % opts.target)
    if opts.threads:
        buildargs.append('--threads=%s' % opts.threads)
    if opts.jobs:
        buildargs.append('--jobs=%s' % opts.jobs)
    elif config['build-jobs'] > 1:
        buildargs.append('--jobs=%s' % config['build-jobs'])
    if opts.icecream or config['icecream'] != '0':
        if opts.icecream:
            num = opts.icecream
        else:
            num = config['icecream']

        if int(num) > 0:
            buildargs.append('--icecream=%s' % num)
            xp.append('icecream')
            xp.append('gcc-c++')
    if opts.ccache:
        buildargs.append('--ccache')
        xp.append('ccache')
    if opts.linksources:
        buildargs.append('--linksources')
    if opts.baselibs:
        buildargs.append('--baselibs')
    if opts.debuginfo:
        buildargs.append('--debug')
    if opts._with:
        for o in opts._with:
            buildargs.append('--with=%s' % o)
    if opts.without:
        for o in opts.without:
            buildargs.append('--without=%s' % o)
    if opts.define:
        for o in opts.define:
            buildargs.append('--define=%s' % o)
    if config['build-uid']:
        build_uid = config['build-uid']
    if opts.build_uid:
        build_uid = opts.build_uid
    if build_uid:
        buildidre = re.compile('^[0-9]{1,5}:[0-9]{1,5}$')
        if build_uid == 'caller':
            buildargs.append('--uid=%s:%s' % (os.getuid(), os.getgid()))
        elif buildidre.match(build_uid):
            buildargs.append('--uid=%s' % build_uid)
        else:
            print('Error: build-uid arg must be 2 colon separated numerics: "uid:gid" or "caller"', file=sys.stderr)
            return 1
    if opts.vm_type:
        vm_type = opts.vm_type
    if opts.vm_telnet:
        vm_telnet = opts.vm_telnet
    if opts.alternative_project:
        prj = opts.alternative_project
        pac = '_repository'
    else:
        prj = store_read_project(os.curdir)
        if opts.local_package:
            pac = '_repository'
        else:
            pac = store_read_package(os.curdir)
    if opts.shell:
        buildargs.append("--shell")

    orig_build_root = config['build-root']
    # make it possible to override configuration of the rc file
    for var in ['OSC_PACKAGECACHEDIR', 'OSC_SU_WRAPPER', 'OSC_BUILD_ROOT']:
        val = os.getenv(var)
        if val:
            if var.startswith('OSC_'): var = var[4:]
            var = var.lower().replace('_', '-')
            if var in config:
                print('Overriding config value for %s=\'%s\' with \'%s\'' % (var, config[var], val))
            config[var] = val

    pacname = pac
    if pacname == '_repository':
        if not opts.local_package:
            try:
                pacname = store_read_package(os.curdir)
            except oscerr.NoWorkingCopy:
                opts.local_package = True
        if opts.local_package:
            pacname = os.path.splitext(build_descr)[0]
    apihost = urlsplit(apiurl)[1]
    if not build_root:
        build_root = config['build-root']
        if build_root == orig_build_root:
            # ENV var was not set
            build_root = config['api_host_options'][apiurl].get('build-root', build_root)
        try:
            build_root = build_root % {'repo': repo, 'arch': arch,
                         'project': prj, 'package': pacname, 'apihost': apihost}
        except:
            pass

    cache_dir = config['packagecachedir'] % {'apihost': apihost}

    extra_pkgs = []
    if not opts.extra_pkgs:
        extra_pkgs = config['extra-pkgs']
    elif opts.extra_pkgs != ['']:
        extra_pkgs = opts.extra_pkgs

    if xp:
        extra_pkgs += xp

    prefer_pkgs = {}
    build_descr_data = open(build_descr).read()

    # XXX: dirty hack but there's no api to provide custom defines
    if opts.without:
        s = ''
        for i in opts.without:
            s += "%%define _without_%s 1\n" % i
        build_descr_data = s + build_descr_data
    if opts._with:
        s = ''
        for i in opts._with:
            s += "%%define _with_%s 1\n" % i
        build_descr_data = s + build_descr_data
    if opts.define:
        s = ''
        for i in opts.define:
            s += "%%define %s\n" % i
        build_descr_data = s + build_descr_data

    cpiodata = None
    servicefile = os.path.join(os.path.dirname(build_descr), "_service")
    if not os.path.isfile(servicefile):
        servicefile = os.path.join(os.path.dirname(build_descr), "_service")
        if not os.path.isfile(servicefile):
            servicefile = None
        else:
            print('Using local _service file')
    buildenvfile = os.path.join(os.path.dirname(build_descr), "_buildenv." + repo + "." + arch)
    if not os.path.isfile(buildenvfile):
        buildenvfile = os.path.join(os.path.dirname(build_descr), "_buildenv")
        if not os.path.isfile(buildenvfile):
            buildenvfile = None
        else:
            print('Using local buildenv file: %s' % os.path.basename(buildenvfile))
    if buildenvfile or servicefile:
        from .util import cpio
        if not cpiodata:
            cpiodata = cpio.CpioWrite()

    if opts.prefer_pkgs:
        print('Scanning the following dirs for local packages: %s' % ', '.join(opts.prefer_pkgs))
        from .util import cpio
        if not cpiodata:
            cpiodata = cpio.CpioWrite()
        prefer_pkgs = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type, cpiodata)

    if cpiodata:
        cpiodata.add(os.path.basename(build_descr), build_descr_data)
        # buildenv must come last for compatibility reasons...
        if buildenvfile:
            cpiodata.add("buildenv", open(buildenvfile).read())
        if servicefile:
            cpiodata.add("_service", open(servicefile).read())
        build_descr_data = cpiodata.get()

    # special handling for overlay and rsync-src/dest
    specialcmdopts = []
    if opts.rsyncsrc or opts.rsyncdest :
        if not opts.rsyncsrc or not opts.rsyncdest:
            raise oscerr.WrongOptions('When using --rsync-{src,dest} both parameters have to be specified.')
        myrsyncsrc = os.path.abspath(os.path.expanduser(os.path.expandvars(opts.rsyncsrc)))
        if not os.path.isdir(myrsyncsrc):
            raise oscerr.WrongOptions('--rsync-src %s is no valid directory!' % opts.rsyncsrc)
        # can't check destination - its in the target chroot ;) - but we can check for sanity
        myrsyncdest = os.path.expandvars(opts.rsyncdest)
        if not os.path.isabs(myrsyncdest):
            raise oscerr.WrongOptions('--rsync-dest %s is no absolute path (starting with \'/\')!' % opts.rsyncdest)
        specialcmdopts = ['--rsync-src='+myrsyncsrc, '--rsync-dest='+myrsyncdest]
    if opts.overlay:
        myoverlay = os.path.abspath(os.path.expanduser(os.path.expandvars(opts.overlay)))
        if not os.path.isdir(myoverlay):
            raise oscerr.WrongOptions('--overlay %s is no valid directory!' % opts.overlay)
        specialcmdopts += ['--overlay='+myoverlay]

    bi_file = None
    bc_file = None
    bi_filename = '_buildinfo-%s-%s.xml' % (repo, arch)
    bc_filename = '_buildconfig-%s-%s' % (repo, arch)
    if is_package_dir('.') and os.access(osc.core.store, os.W_OK):
        bi_filename = os.path.join(os.getcwd(), osc.core.store, bi_filename)
        bc_filename = os.path.join(os.getcwd(), osc.core.store, bc_filename)
    elif not os.access('.', os.W_OK):
        bi_file = NamedTemporaryFile(prefix=bi_filename)
        bi_filename = bi_file.name
        bc_file = NamedTemporaryFile(prefix=bc_filename)
        bc_filename = bc_file.name
    else:
        bi_filename = os.path.abspath(bi_filename)
        bc_filename = os.path.abspath(bc_filename)

    try:
        if opts.noinit:
            if not os.path.isfile(bi_filename):
                raise oscerr.WrongOptions('--noinit is not possible, no local buildinfo file')
            print('Use local \'%s\' file as buildinfo' % bi_filename)
            if not os.path.isfile(bc_filename):
                raise oscerr.WrongOptions('--noinit is not possible, no local buildconfig file')
            print('Use local \'%s\' file as buildconfig' % bc_filename)
        elif opts.offline:
            if not os.path.isfile(bi_filename):
                raise oscerr.WrongOptions('--offline is not possible, no local buildinfo file')
            print('Use local \'%s\' file as buildinfo' % bi_filename)
            if not os.path.isfile(bc_filename):
                raise oscerr.WrongOptions('--offline is not possible, no local buildconfig file')
        else:
            print('Getting buildinfo from server and store to %s' % bi_filename)
            bi_text = ''.join(get_buildinfo(apiurl,
                                            prj,
                                            pac,
                                            repo,
                                            arch,
                                            specfile=build_descr_data,
                                            addlist=extra_pkgs))
            if not bi_file:
                bi_file = open(bi_filename, 'w')
            # maybe we should check for errors before saving the file
            bi_file.write(bi_text)
            bi_file.flush()
            print('Getting buildconfig from server and store to %s' % bc_filename)
            bc = get_buildconfig(apiurl, prj, repo)
            if not bc_file:
                bc_file = open(bc_filename, 'w')
            bc_file.write(bc)
            bc_file.flush()
    except HTTPError as e:
        if e.code == 404:
            # check what caused the 404
            if meta_exists(metatype='prj', path_args=(quote_plus(prj), ),
                           template_args=None, create_new=False, apiurl=apiurl):
                pkg_meta_e = None
                try:
                    # take care, not to run into double trouble.
                    pkg_meta_e = meta_exists(metatype='pkg', path_args=(quote_plus(prj),
                                        quote_plus(pac)), template_args=None, create_new=False,
                                        apiurl=apiurl)
                except:
                    pass

                if pkg_meta_e:
                    print('ERROR: Either wrong repo/arch as parameter or a parse error of .spec/.dsc/.kiwi file due to syntax error', file=sys.stderr)
                else:
                    print('The package \'%s\' does not exist - please ' \
                                        'rerun with \'--local-package\'' % pac, file=sys.stderr)
            else:
                print('The project \'%s\' does not exist - please ' \
                                    'rerun with \'--alternative-project <alternative_project>\'' % prj, file=sys.stderr)
            sys.exit(1)
        else:
            raise

    bi = Buildinfo(bi_filename, apiurl, build_type, list(prefer_pkgs.keys()))

    if bi.debuginfo and not (opts.disable_debuginfo or '--debug' in buildargs):
        buildargs.append('--debug')

    if opts.release:
        bi.release = opts.release

    if bi.release:
        buildargs.append('--release=%s' % bi.release)

    # real arch of this machine
    # vs.
    # arch we are supposed to build for
    if bi.hostarch != None:
        if hostarch != bi.hostarch and not bi.hostarch in can_also_build.get(hostarch, []):
            print('Error: hostarch \'%s\' is required.' % (bi.hostarch), file=sys.stderr)
            return 1
    elif hostarch != bi.buildarch:
        if not bi.buildarch in can_also_build.get(hostarch, []):
            # OBSOLETE: qemu_can_build should not be needed anymore since OBS 2.3
            if vm_type != "emulator" and not bi.buildarch in qemu_can_build:
                print('Error: hostarch \'%s\' cannot build \'%s\'.' % (hostarch, bi.buildarch), file=sys.stderr)
                return 1
            print('WARNING: It is guessed to build on hostarch \'%s\' for \'%s\' via QEMU.' % (hostarch, bi.buildarch), file=sys.stderr)

    rpmlist_prefers = []
    if prefer_pkgs:
        print('Evaluating preferred packages')
        for name, path in prefer_pkgs.items():
            if bi.has_dep(name):
                # We remove a preferred package from the buildinfo, so that the
                # fetcher doesn't take care about them.
                # Instead, we put it in a list which is appended to the rpmlist later.
                # At the same time, this will make sure that these packages are
                # not verified.
                bi.remove_dep(name)
                rpmlist_prefers.append((name, path))
                print(' - %s (%s)' % (name, path))

    print('Updating cache of required packages')

    urllist = []
    if not opts.download_api_only:
        # transform 'url1, url2, url3' form into a list
        if 'urllist' in config:
            if isinstance(config['urllist'], str):
                re_clist = re.compile('[, ]+')
                urllist = [ i.strip() for i in re_clist.split(config['urllist'].strip()) ]
            else:
                urllist = config['urllist']

        # OBS 1.5 and before has no downloadurl defined in buildinfo
        if bi.downloadurl:
            urllist.append(bi.downloadurl + '/%(extproject)s/%(extrepository)s/%(arch)s/%(filename)s')
    if opts.disable_cpio_bulk_download:
        urllist.append( '%(apiurl)s/build/%(project)s/%(repository)s/%(repoarch)s/%(repopackage)s/%(repofilename)s' )

    fetcher = Fetcher(cache_dir,
                      urllist = urllist,
                      api_host_options = config['api_host_options'],
                      offline = opts.noinit or opts.offline,
                      http_debug = config['http_debug'],
                      enable_cpio = not opts.disable_cpio_bulk_download,
                      cookiejar=cookiejar)

    if not opts.trust_all_projects:
        # implicitly trust the project we are building for
        check_trusted_projects(apiurl, [ i for i in bi.projects.keys() if not i == prj ])

    imagefile = ''
    imagesource = ''
    imagebins = []
    if (not config['no_preinstallimage'] and not opts.nopreinstallimage and
        bi.preinstallimage and
        not opts.noinit and not opts.offline and
        (opts.clean or (not os.path.exists(build_root + "/installed-pkg") and
                        not os.path.exists(build_root + "/.build/init_buildsystem.data")))):
        (imagefile, imagesource, imagebins) = get_preinstall_image(apiurl, arch, cache_dir, bi.preinstallimage)
        if imagefile:
            # remove binaries from build deps which are included in preinstall image
            for i in bi.deps:
                if i.name in imagebins:
                    bi.remove_dep(i.name)

    # now update the package cache
    fetcher.run(bi)

    old_pkg_dir = None
    if opts.oldpackages:
        old_pkg_dir = opts.oldpackages
        if not old_pkg_dir.startswith('/') and not opts.offline:
            data = [ prj, pacname, repo, arch]
            if old_pkg_dir == '_link':
                p = osc.core.findpacs(os.curdir)[0]
                if not p.islink():
                    raise oscerr.WrongOptions('package is not a link')
                data[0] = p.linkinfo.project
                data[1] = p.linkinfo.package
                repos = osc.core.get_repositories_of_project(apiurl, data[0])
                # hack for links to e.g. Factory
                if not data[2] in repos and 'standard' in repos:
                    data[2] = 'standard'
            elif old_pkg_dir != '' and old_pkg_dir != '_self':
                a = old_pkg_dir.split('/')
                for i in range(0, len(a)):
                    data[i] = a[i]

            destdir = os.path.join(cache_dir, data[0], data[2], data[3])
            old_pkg_dir = None
            try:
                print("Downloading previous build from %s ..." % '/'.join(data))
                binaries = get_binarylist(apiurl, data[0], data[2], data[3], package=data[1], verbose=True)
            except Exception as e:
                print("Error: failed to get binaries: %s" % str(e))
                binaries = []

            if binaries:
                class mytmpdir:
                    """ temporary directory that removes itself"""
                    def __init__(self, *args, **kwargs):
                        self.name = mkdtemp(*args, **kwargs)
                    _rmtree = staticmethod(shutil.rmtree)
                    def cleanup(self):
                        self._rmtree(self.name)
                    def __del__(self):
                        self.cleanup()
                    def __exit__(self):
                        self.cleanup()
                    def __str__(self):
                        return self.name

                old_pkg_dir = mytmpdir(prefix='.build.oldpackages', dir=os.path.abspath(os.curdir))
                if not os.path.exists(destdir):
                    os.makedirs(destdir)
            for i in binaries:
                fname = os.path.join(destdir, i.name)
                os.symlink(fname, os.path.join(str(old_pkg_dir), i.name))
                if os.path.exists(fname):
                    st = os.stat(fname)
                    if st.st_mtime == i.mtime and st.st_size == i.size:
                        continue
                get_binary_file(apiurl,
                                data[0],
                                data[2], data[3],
                                i.name,
                                package = data[1],
                                target_filename = fname,
                                target_mtime = i.mtime,
                                progress_meter = True)

        if old_pkg_dir != None:
            buildargs.append('--oldpackages=%s' % old_pkg_dir)

    # Make packages from buildinfo available as repos for kiwi
    if build_type == 'kiwi':
        if os.path.exists('repos'):
            shutil.rmtree('repos')
        os.mkdir('repos')
        for i in bi.deps:
            if not i.extproject:
                # remove
                bi.deps.remove(i)
                continue
            # project
            pdir = str(i.extproject).replace(':/', ':')
            # repo
            rdir = str(i.extrepository).replace(':/', ':')
            # arch
            adir = i.repoarch
            # project/repo
            prdir = "repos/"+pdir+"/"+rdir
            # project/repo/arch
            pradir = prdir+"/"+adir
            # source fullfilename
            sffn = i.fullfilename
            filename = sffn.split("/")[-1]
            # target fullfilename
            tffn = pradir+"/"+filename
            if not os.path.exists(os.path.join(pradir)):
                os.makedirs(os.path.join(pradir))
            if not os.path.exists(tffn):
                print("Using package: "+sffn)
                if opts.linksources:
                    os.link(sffn, tffn)
                else:
                    os.symlink(sffn, tffn)
            if prefer_pkgs:
                for name, path in prefer_pkgs.items():
                    if name == filename:
                        print("Using prefered package: " + path + "/" + filename)
                        os.unlink(tffn)
                        if opts.linksources:
                            os.link(path + "/" + filename, tffn)
                        else:
                            os.symlink(path + "/" + filename, tffn)
        # Is a obsrepositories tag used?
        try:
            tree = ET.parse(build_descr)
        except:
            print('could not parse the kiwi file:', file=sys.stderr)
            print(open(build_descr).read(), file=sys.stderr)
            sys.exit(1)
        root = tree.getroot()
        # product
        for xml in root.findall('instsource'):
            if xml.find('instrepo').find('source').get('path') == 'obsrepositories:/':
                print("obsrepositories:/ for product builds is not yet supported in osc!")
                sys.exit(1)
        # appliance
        expand_obsrepos=None
        for xml in root.findall('repository'):
            if xml.find('source').get('path') == 'obsrepositories:/':
                expand_obsrepos=True
        if expand_obsrepos:
          buildargs.append('--kiwi-parameter')
          buildargs.append('--ignore-repos')
          for xml in root.findall('repository'):
              if xml.find('source').get('path') == 'obsrepositories:/':
                  for path in bi.pathes:
                      if not os.path.isdir("repos/"+path):
                          continue
                      buildargs.append('--kiwi-parameter')
                      buildargs.append('--add-repo')
                      buildargs.append('--kiwi-parameter')
                      buildargs.append("repos/"+path)
                      buildargs.append('--kiwi-parameter')
                      buildargs.append('--add-repotype')
                      buildargs.append('--kiwi-parameter')
                      buildargs.append('rpm-md')
                      if xml.get('priority'):
                          buildargs.append('--kiwi-parameter')
                          buildargs.append('--add-repoprio='+xml.get('priority'))
              else:
                   m = re.match(r"obs://[^/]+/([^/]+)/(\S+)", xml.find('source').get('path'))
                   if not m:
                       # short path without obs instance name
                       m = re.match(r"obs://([^/]+)/(.+)", xml.find('source').get('path'))
                   project=m.group(1).replace(":", ":/")
                   repo=m.group(2)
                   buildargs.append('--kiwi-parameter')
                   buildargs.append('--add-repo')
                   buildargs.append('--kiwi-parameter')
                   buildargs.append("repos/"+project+"/"+repo)
                   buildargs.append('--kiwi-parameter')
                   buildargs.append('--add-repotype')
                   buildargs.append('--kiwi-parameter')
                   buildargs.append('rpm-md')
                   if xml.get('priority'):
                       buildargs.append('--kiwi-parameter')
                       buildargs.append('--add-repopriority='+xml.get('priority'))

    if vm_type == "xen" or vm_type == "kvm" or vm_type == "lxc":
        print('Skipping verification of package signatures due to secure VM build')
    elif bi.pacsuffix == 'rpm':
        if opts.no_verify:
            print('Skipping verification of package signatures')
        else:
            print('Verifying integrity of cached packages')
            verify_pacs(bi)
    elif bi.pacsuffix == 'deb':
        if opts.no_verify or opts.noinit:
            print('Skipping verification of package signatures')
        else:
            print('WARNING: deb packages get not verified, they can compromise your system !')
    else:
        print('WARNING: unknown packages get not verified, they can compromise your system !')

    for i in bi.deps:
        if i.hdrmd5:
            from .util import packagequery
            hdrmd5 = packagequery.PackageQuery.queryhdrmd5(i.fullfilename)
            if not hdrmd5:
                print("Error: cannot get hdrmd5 for %s" % i.fullfilename)
                sys.exit(1)
            if hdrmd5 != i.hdrmd5:
                print("Error: hdrmd5 mismatch for %s: %s != %s" % (i.fullfilename, hdrmd5, i.hdrmd5))
                sys.exit(1)

    print('Writing build configuration')

    if build_type == 'kiwi':
        rpmlist = [ '%s %s\n' % (i.name, i.fullfilename) for i in bi.deps if not i.noinstall ]
    else:
        rpmlist = [ '%s %s\n' % (i.name, i.fullfilename) for i in bi.deps ]
    for i in imagebins:
        rpmlist.append('%s preinstallimage\n' % i)
    rpmlist += [ '%s %s\n' % (i[0], i[1]) for i in rpmlist_prefers ]

    if imagefile:
        rpmlist.append('preinstallimage: %s\n' % imagefile)
    if imagesource:
        rpmlist.append('preinstallimagesource: %s\n' % imagesource)

    rpmlist.append('preinstall: ' + ' '.join(bi.preinstall_list) + '\n')
    rpmlist.append('vminstall: ' + ' '.join(bi.vminstall_list) + '\n')
    rpmlist.append('runscripts: ' + ' '.join(bi.runscripts_list) + '\n')
    if build_type != 'kiwi' and bi.noinstall_list:
        rpmlist.append('noinstall: ' + ' '.join(bi.noinstall_list) + '\n')
    if build_type != 'kiwi' and bi.installonly_list:
        rpmlist.append('installonly: ' + ' '.join(bi.installonly_list) + '\n')

    rpmlist_file = NamedTemporaryFile(prefix='rpmlist.')
    rpmlist_filename = rpmlist_file.name
    rpmlist_file.writelines(rpmlist)
    rpmlist_file.flush()

    subst = { 'repo': repo, 'arch': arch, 'project' : prj, 'package' : pacname }
    vm_options = []
    # XXX check if build-device present
    my_build_device = ''
    if config['build-device']:
        my_build_device = config['build-device'] % subst
    else:
        # obs worker uses /root here but that collides with the
        # /root directory if the build root was used without vm
        # before
        my_build_device = build_root + '/img'

    need_root = True
    if vm_type:
        if config['build-swap']:
            my_build_swap = config['build-swap'] % subst
        else:
            my_build_swap = build_root + '/swap'

        vm_options = [ '--vm-type=%s' % vm_type ]
        if vm_telnet:
            vm_options += [ '--vm-telnet=' + vm_telnet ]
        if config['build-memory']:
            vm_options += [ '--memory=' + config['build-memory'] ]
        if vm_type != 'lxc':
            vm_options += [ '--vm-disk=' + my_build_device ]
            vm_options += [ '--vm-swap=' + my_build_swap ]
            vm_options += [ '--logfile=%s/.build.log' % build_root ]
            if vm_type == 'kvm':
                if os.access(build_root, os.W_OK) and os.access('/dev/kvm', os.W_OK):
                    # so let's hope there's also an fstab entry
                    need_root = False
                if config['build-kernel']:
                    vm_options += [ '--vm-kernel=' + config['build-kernel'] ]
                if config['build-initrd']:
                    vm_options += [ '--vm-initrd=' + config['build-initrd'] ]

            build_root += '/.mount'

        if config['build-memory']:
            vm_options += [ '--memory=' + config['build-memory'] ]
        if config['build-vmdisk-rootsize']:
            vm_options += [ '--vmdisk-rootsize=' + config['build-vmdisk-rootsize'] ]
        if config['build-vmdisk-swapsize']:
            vm_options += [ '--vmdisk-swapsize=' + config['build-vmdisk-swapsize'] ]
        if config['build-vmdisk-filesystem']:
            vm_options += [ '--vmdisk-filesystem=' + config['build-vmdisk-filesystem'] ]
        if config['build-vm-user']:
            vm_options += [ '--vm-user=' + config['build-vm-user'] ]


    if opts.preload:
        print("Preload done for selected repo/arch.")
        sys.exit(0)

    print('Running build')
    cmd = [ config['build-cmd'], '--root='+build_root,
                    '--rpmlist='+rpmlist_filename,
                    '--dist='+bc_filename,
                    '--arch='+bi.buildarch ]
    cmd += specialcmdopts + vm_options + buildargs
    cmd += [ build_descr ]

    if need_root:
        sucmd = config['su-wrapper'].split()
        if sucmd[0] == 'su':
            if sucmd[-1] == '-c':
                sucmd.pop()
            cmd = sucmd + ['-s', cmd[0], 'root', '--' ] + cmd[1:]
        else:
            cmd = sucmd + cmd

    # change personality, if needed
    if hostarch != bi.buildarch and bi.buildarch in change_personality:
        cmd = [ change_personality[bi.buildarch] ] + cmd

    try:
        rc = run_external(cmd[0], *cmd[1:])
        if rc:
            print()
            print('The buildroot was:', build_root)
            sys.exit(rc)
    except KeyboardInterrupt as i:
        print("keyboard interrupt, killing build ...")
        cmd.append('--kill')
        run_external(cmd[0], *cmd[1:])
        raise i

    pacdir = os.path.join(build_root, '.build.packages')
    if os.path.islink(pacdir):
        pacdir = os.readlink(pacdir)
        pacdir = os.path.join(build_root, pacdir)

    if os.path.exists(pacdir):
        (s_built, b_built) = get_built_files(pacdir, bi.buildtype)

        print()
        if s_built: print(s_built)
        print()
        print(b_built)

        if opts.keep_pkgs:
            for i in b_built.splitlines() + s_built.splitlines():
                shutil.copy2(i, os.path.join(opts.keep_pkgs, os.path.basename(i)))

    if bi_file:
        bi_file.close()
    if bc_file:
        bc_file.close()
    rpmlist_file.close()

Example 45

Project: TimeSide
Source File: test_samples.py
View license
def generateSamples(overwrite=False, samples_dir=None):
    if not samples_dir:
        from timeside import __file__ as ts_file
        ts_path = os.path.split(os.path.abspath(ts_file))[0]
        tests_dir = os.path.abspath(os.path.join(ts_path, '../tests'))
        if os.path.isdir(tests_dir):
            samples_dir = os.path.abspath(os.path.join(tests_dir, 'samples'))
            if not os.path.isdir(samples_dir):
                os.makedirs(samples_dir)
        else:
            import tempfile
            samples_dir = tempfile.mkdtemp(suffix="ts_samples")
    else:
        if not os.path.isdir(samples_dir):
            os.makedirs(samples_dir)

    samples = dict()

    # --------- Sweeps ---------
    # sweep 44100 mono wav
    filename = 'sweep_mono.wav'
    samplerate = 44100
    gst_audio_encoder = 'wavenc'
    sweep_mono = SweepArray(duration=8, samplerate=samplerate)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_mono,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # sweep 44100 stereo wav
    sweep_stereo = sweep_mono & sweep_mono
    filename = 'sweep.wav'
    gst_audio_encoder = 'wavenc'
    sweep_mono = SweepArray(duration=8, samplerate=samplerate)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

   # sweep 44100 stereo mp3
    filename = 'sweep.mp3'
    gst_audio_encoder = ['lamemp3enc', 'xingmux', 'id3v2mux']
    sweep_mono = SweepArray(duration=8, samplerate=samplerate)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

   # sweep 44100 stereo flac
    filename = 'sweep.flac'
    gst_audio_encoder = 'flacenc'
    sweep_mono = SweepArray(duration=8, samplerate=samplerate)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

   # sweep 44100 stereo ogg
    filename = 'sweep.ogg'
    gst_audio_encoder = ['vorbisenc', 'oggmux']
    sweep_mono = SweepArray(duration=8, samplerate=samplerate)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # sweep 32000 stereo wav
    samplerate = 32000
    sweep_mono = SweepArray(duration=8, samplerate=samplerate)
    sweep_stereo = sweep_mono & sweep_mono

    filename = 'sweep_32000.wav'
    gst_audio_encoder = 'wavenc'
    sweep_mono = SweepArray(duration=8, samplerate=samplerate)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # --------- Sines ---------
    # sine at 440Hz,  44100 mono wav
    filename = 'sine440Hz_mono.wav'
    samplerate = 44100
    gst_audio_encoder = 'wavenc'
    sweep_mono = SineArray(duration=8, samplerate=samplerate, frequency=440)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_mono,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # Short 1s sine at 440Hz,  44100 mono wav
    filename = 'sine440Hz_mono_1s.wav'
    samplerate = 44100
    gst_audio_encoder = 'wavenc'
    sweep_mono = SineArray(duration=1, samplerate=samplerate, frequency=440)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_mono,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # sine at 440Hz,  44100 stereo wav
    filename = 'sine440Hz.wav'
    sweep_stereo = sweep_mono & sweep_mono
    gst_audio_encoder = 'wavenc'
    sweep_mono = SineArray(duration=8, samplerate=samplerate, frequency=440)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # sine at 440Hz,  44100 stereo mp3
    filename = 'sine440Hz.mp3'
    gst_audio_encoder = ['lamemp3enc', 'xingmux', 'id3v2mux']
    sweep_mono = SineArray(duration=8, samplerate=samplerate, frequency=440)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # sine at 440Hz,  44100 stereo ogg
    filename = 'sine440Hz.ogg'
    gst_audio_encoder = ['vorbisenc', 'oggmux']
    sweep_mono = SineArray(duration=8, samplerate=samplerate, frequency=440)
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=sweep_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # --------- Equal tempered scale ---------
    filename = 'C4_scale.wav'
    samplerate = 44100
    f_C4 = 261.63
    f_D4 = 293.66
    f_E4 = 329.63
    f_F4 = 349.23
    f_G4 = 392.00
    f_A4 = 440.00
    f_B4 = 493.88
    f_C5 = 523.25
    sineC4 = SineArray(duration=1, samplerate=samplerate, frequency=f_C4)
    sineD4 = SineArray(duration=1, samplerate=samplerate, frequency=f_D4)
    sineE4 = SineArray(duration=1, samplerate=samplerate, frequency=f_E4)
    sineF4 = SineArray(duration=1, samplerate=samplerate, frequency=f_F4)
    sineG4 = SineArray(duration=1, samplerate=samplerate, frequency=f_G4)
    sineA4 = SineArray(duration=1, samplerate=samplerate, frequency=f_A4)
    sineB4 = SineArray(duration=1, samplerate=samplerate, frequency=f_B4)
    sineC5 = SineArray(duration=1, samplerate=samplerate, frequency=f_C5)

    silence = SilenceArray(duration=0.2, samplerate=samplerate)

    scale = (sineC4 + silence + sineD4 + silence + sineE4 + silence +
             sineF4 + silence + sineG4 + silence + sineA4 + silence +
             sineB4 + silence + sineC5)

    gst_audio_encoder = 'wavenc'
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=scale,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # --------- White noise ---------
    # white noise - 44100Hz mono
    filename = 'white_noise_mono.wav'
    samplerate = 44100
    noise = WhiteNoiseArray(duration=8, samplerate=samplerate)
    gst_audio_encoder = 'wavenc'
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=noise,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # white noise - 44100Hz stereo
    filename = 'white_noise.wav'
    samplerate = 44100
    noise = WhiteNoiseArray(duration=8, samplerate=samplerate)
    noise_stereo = noise & noise
    gst_audio_encoder = 'wavenc'
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=noise_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    # white noise - 32000Hz stereo
    filename = 'white_noise_32000.wav'
    samplerate = 32000
    noise = WhiteNoiseArray(duration=8, samplerate=samplerate)
    noise_stereo = noise & noise
    gst_audio_encoder = 'wavenc'
    sample_file = generate_sample_file(filename, samples_dir,
                                       gst_audio_encoder,
                                       sample_array=noise_stereo,
                                       overwrite=overwrite)
    samples.update({filename: sample_file})

    return samples

Example 46

Project: yadapy
Source File: install.py
View license
    def run(self, options, args):
        cmdoptions.resolve_wheel_no_use_binary(options)
        cmdoptions.check_install_build_global(options)

        if options.allow_external:
            warnings.warn(
                "--allow-external has been deprecated and will be removed in "
                "the future. Due to changes in the repository protocol, it no "
                "longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_all_external:
            warnings.warn(
                "--allow-all-external has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_unverified:
            warnings.warn(
                "--allow-unverified has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.download_dir:
            warnings.warn(
                "pip install --download has been deprecated and will be "
                "removed in the future. Pip now has a download command that "
                "should be used instead.",
                RemovedInPip10Warning,
            )
            options.ignore_installed = True

        if options.build_dir:
            options.build_dir = os.path.abspath(options.build_dir)

        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if options.prefix_path:
                raise CommandError(
                    "Can not combine '--user' and '--prefix' as they imply "
                    "different installation locations"
                )
            if virtualenv_no_global():
                raise InstallationError(
                    "Can not perform a '--user' install. User site-packages "
                    "are not visible in this virtualenv."
                )
            install_options.append('--user')
            install_options.append('--prefix=')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if (os.path.exists(options.target_dir) and not
                    os.path.isdir(options.target_dir)):
                raise CommandError(
                    "Target path exists but is not a directory, will not "
                    "continue."
                )
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []

        with self._build_session(options) as session:

            finder = self._build_package_finder(options, session)
            build_delete = (not (options.no_clean or options.build_dir))
            wheel_cache = WheelCache(options.cache_dir, options.format_control)
            if options.cache_dir and not check_path_owner(options.cache_dir):
                logger.warning(
                    "The directory '%s' or its parent directory is not owned "
                    "by the current user and caching wheels has been "
                    "disabled. check the permissions and owner of that "
                    "directory. If executing pip with sudo, you may want "
                    "sudo's -H flag.",
                    options.cache_dir,
                )
                options.cache_dir = None

            with BuildDirectory(options.build_dir,
                                delete=build_delete) as build_dir:
                requirement_set = RequirementSet(
                    build_dir=build_dir,
                    src_dir=options.src_dir,
                    download_dir=options.download_dir,
                    upgrade=options.upgrade,
                    as_egg=options.as_egg,
                    ignore_installed=options.ignore_installed,
                    ignore_dependencies=options.ignore_dependencies,
                    force_reinstall=options.force_reinstall,
                    use_user_site=options.use_user_site,
                    target_dir=temp_target_dir,
                    session=session,
                    pycompile=options.compile,
                    isolated=options.isolated_mode,
                    wheel_cache=wheel_cache,
                    require_hashes=options.require_hashes,
                )

                self.populate_requirement_set(
                    requirement_set, args, options, finder, session, self.name,
                    wheel_cache
                )

                if not requirement_set.has_requirements:
                    return

                try:
                    if (options.download_dir or not wheel or not
                            options.cache_dir):
                        # on -d don't do complex things like building
                        # wheels, and don't try to build wheels when wheel is
                        # not installed.
                        requirement_set.prepare_files(finder)
                    else:
                        # build wheels before install.
                        wb = WheelBuilder(
                            requirement_set,
                            finder,
                            build_options=[],
                            global_options=[],
                        )
                        # Ignore the result: a failed wheel will be
                        # installed from the sdist/vcs whatever.
                        wb.build(autobuilding=True)

                    if not options.download_dir:
                        requirement_set.install(
                            install_options,
                            global_options,
                            root=options.root_path,
                            prefix=options.prefix_path,
                        )
                        reqs = sorted(
                            requirement_set.successfully_installed,
                            key=operator.attrgetter('name'))
                        items = []
                        for req in reqs:
                            item = req.name
                            try:
                                if hasattr(req, 'installed_version'):
                                    if req.installed_version:
                                        item += '-' + req.installed_version
                            except Exception:
                                pass
                            items.append(item)
                        installed = ' '.join(items)
                        if installed:
                            logger.info('Successfully installed %s', installed)
                    else:
                        downloaded = ' '.join([
                            req.name
                            for req in requirement_set.successfully_downloaded
                        ])
                        if downloaded:
                            logger.info(
                                'Successfully downloaded %s', downloaded
                            )
                except PreviousBuildDirError:
                    options.no_clean = True
                    raise
                finally:
                    # Clean up
                    if not options.no_clean:
                        requirement_set.cleanup_files()

        if options.target_dir:
            ensure_dir(options.target_dir)

            lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']

            for item in os.listdir(lib_dir):
                target_item_dir = os.path.join(options.target_dir, item)
                if os.path.exists(target_item_dir):
                    if not options.upgrade:
                        logger.warning(
                            'Target directory %s already exists. Specify '
                            '--upgrade to force replacement.',
                            target_item_dir
                        )
                        continue
                    if os.path.islink(target_item_dir):
                        logger.warning(
                            'Target directory %s already exists and is '
                            'a link. Pip will not automatically replace '
                            'links, please remove if replacement is '
                            'desired.',
                            target_item_dir
                        )
                        continue
                    if os.path.isdir(target_item_dir):
                        shutil.rmtree(target_item_dir)
                    else:
                        os.remove(target_item_dir)

                shutil.move(
                    os.path.join(lib_dir, item),
                    target_item_dir
                )
            shutil.rmtree(temp_target_dir)
        return requirement_set

Example 47

Project: penelope
Source File: format_bookeen.py
View license
def write(dictionary, args, output_file_path):
    # result to be returned
    result = None

    # get absolute path
    output_file_path_absolute = os.path.abspath(output_file_path)

    # get absolute path for collation function file
    bookeen_collation_function_path = None
    if args.bookeen_collation_function is not None:
        bookeen_collation_function_path = os.path.abspath(args.bookeen_collation_function)

    # create tmp directory
    cwd = os.getcwd()
    tmp_path = create_temp_directory()
    print_debug("Working in temp dir '%s'" % (tmp_path), args.debug)
    os.chdir(tmp_path)

    # get the basename
    base = os.path.basename(output_file_path)
    if base.endswith(".zip"):
        base = base[:-4]

    # copy empty.idx into tmp_path
    idx_file_path = base + u".dict.idx"
    dict_file_path = base + u".dict"
    copy_file(EMPTY_FILE_PATH, idx_file_path)

    # open index
    sql_connection = sqlite3.connect(idx_file_path)

    # install collation in the index
    collation_function = collate_function_default
    if bookeen_collation_function_path is not None:
        try:
            collation_function = imp.load_source("", bookeen_collation_function_path).collate_function
            print_debug("Using collation function from '%s'" % (bookeen_collation_function_path), args.debug)
        except:
            print_error("Unable to load collation function from '%s'. Using the default collation function instead." % (bookeen_collation_function_path))
    sql_connection.create_collation("IcuNoCase", collation_function)
    sql_connection.text_factory = str

    # get a cursor and delete any data from the index file
    sql_cursor = sql_connection.cursor()
    sql_cursor.execute("delete from T_DictIndex")

    # write c_* files
    # each c_* file has MAX_CHUNK_SIZE < size <= (MAX_CHUNK_SIZE * 2) bytes (tentatively)
    print_debug("Writing c_* files...", args.debug)
    files_to_compress = []
    current_offset = 0
    chunk_index = 1
    chunk_file_path = "%s%d" % (CHUNK_FILE_PREFIX, chunk_index)
    files_to_compress.append(chunk_file_path)
    chunk_file_obj = io.open(chunk_file_path, "wb")
    for entry_index in dictionary.entries_index_sorted:
        entry = dictionary.entries[entry_index]
        definition_bytes = entry.definition.encode("utf-8")
        definition_size = len(definition_bytes)
        chunk_file_obj.write(definition_bytes)
        # insert headword into index file
        sql_tuple = (0, entry.headword, current_offset, definition_size, chunk_index)
        sql_cursor.execute("insert into T_DictIndex values (?,?,?,?,?)", sql_tuple)
        # insert synonyms into index file
        if not args.ignore_synonyms:
            for synonym in entry.get_synonyms():
                sql_tuple = (0, synonym[0], current_offset, definition_size, chunk_index)
                sql_cursor.execute("insert into T_DictIndex values (?,?,?,?,?)", sql_tuple)
        # update offset
        current_offset += definition_size
        # if we reached CHUNK_SIZE, open the next c_* file
        if current_offset > CHUNK_SIZE:
            chunk_file_obj.close()
            chunk_index += 1
            chunk_file_path = "%s%d" % (CHUNK_FILE_PREFIX, chunk_index)
            files_to_compress.append(chunk_file_path)
            chunk_file_obj = io.open(chunk_file_path, "wb")
            current_offset = 0
    chunk_file_obj.close()
    print_debug("Writing c_* files... done", args.debug)

    # compress
    print_debug("Compressing c_* files...", args.debug)
    file_zip_obj = zipfile.ZipFile(dict_file_path, "w", zipfile.ZIP_DEFLATED)
    for file_to_compress in files_to_compress:
        file_to_compress = os.path.basename(file_to_compress)
        file_zip_obj.write(file_to_compress)
    file_zip_obj.close()
    print_debug("Compressing c_* files... done", args.debug)

    # update index metadata
    print_debug("Updating index metadata...", args.debug)
    header = HEADER % (args.language_from)
    sql_cursor.execute("update T_DictInfo set F_xhtmlHeader=?", (header,))
    sql_cursor.execute("update T_DictInfo set F_LangFrom=?", (args.language_from,))
    sql_cursor.execute("update T_DictInfo set F_LangTo=?", (args.language_to,))
    sql_cursor.execute("update T_DictInfo set F_Licence=?", (args.license,))
    sql_cursor.execute("update T_DictInfo set F_Copyright=?", (args.copyright,))
    sql_cursor.execute("update T_DictInfo set F_Title=?", (args.title,))
    sql_cursor.execute("update T_DictInfo set F_Description=?", (args.description,))
    sql_cursor.execute("update T_DictInfo set F_Year=?", (args.year,))
    # the meaning of the following is unknown
    sql_cursor.execute("update T_DictInfo set F_Alphabet=?", ("Z",))
    sql_cursor.execute("update T_DictInfo set F_CollationLevel=?", ("1",))
    sql_cursor.execute("update T_DictVersion set F_DictType=?", ("stardict",))
    sql_cursor.execute("update T_DictVersion set F_Version=?", ("11",))
    print_debug("Updating index metadata... done", args.debug)

    # compact and close
    sql_cursor.execute("vacuum")
    sql_cursor.close()
    sql_connection.close()

    # create .install file or copy .dict.idx and .dict into requested output directory
    parent_output_directory = os.path.split(output_file_path_absolute)[0]
    if args.bookeen_install_file:
        print_debug("Creating .install file...", args.debug)
        file_zip_path = os.path.join(parent_output_directory, base + u".install")
        file_zip_obj = zipfile.ZipFile(file_zip_path, "w", zipfile.ZIP_DEFLATED)
        for file_to_compress in [dict_file_path, idx_file_path]:
            file_to_compress = os.path.basename(file_to_compress)
            file_zip_obj.write(file_to_compress)
        file_zip_obj.close()
        result = [file_zip_path]
        print_debug("Creating .install file... done", args.debug)
    else:
        print_debug("Copying .dict.idx and .dict files...", args.debug)
        dict_file_path_final = os.path.join(parent_output_directory, os.path.basename(dict_file_path))
        idx_file_path_final = os.path.join(parent_output_directory, os.path.basename(idx_file_path))
        copy_file(dict_file_path, dict_file_path_final)
        copy_file(idx_file_path, idx_file_path_final)
        result = [idx_file_path_final, dict_file_path_final]
        print_debug("Copying .dict.idx and .dict files... done", args.debug)

    # delete tmp directory
    os.chdir(cwd)
    if args.keep:
        print_info("Not deleting temp dir '%s'" % (tmp_path))
    else:
        delete_directory(tmp_path)
        print_debug("Deleted temp dir '%s'" % (tmp_path), args.debug)

    return result

Example 48

Project: penelope
Source File: format_stardict.py
View license
def write(dictionary, args, output_file_path):
    # result to be returned
    result = None

    # get absolute path
    output_file_path_absolute = os.path.abspath(output_file_path)

    # create tmp directory
    cwd = os.getcwd()
    tmp_path = create_temp_directory()
    print_debug("Working in temp dir '%s'" % (tmp_path), args.debug)
    os.chdir(tmp_path)

    # get the basename and compute output file paths
    base = os.path.basename(output_file_path)
    if base.endswith(".zip"):
        base = base[:-4]
    ifo_file_path = base + ".ifo"
    idx_file_path = base + ".idx"
    dict_file_path = base + ".dict"
    dict_dz_file_path = base + ".dict.dz"
    syn_file_path = base + ".syn"

    # TODO by spec, the index should be sorted
    # TODO using the comparator stardict_strcmp() defined in the spec
    # TODO (it calls g_ascii_strcasecmp() and/or strcmp() ),
    # TODO or with a user-defined collation function
    #
    # From https://developer.gnome.org/glib/2.28/glib-String-Utility-Functions.html#g-ascii-strcasecmp
    # gint g_ascii_strcasecmp (const gchar *s1, const gchar *s2);
    # Compare two strings, ignoring the case of ASCII characters.
    # Unlike the BSD strcasecmp() function, this only recognizes standard ASCII letters and ignores the locale, treating all non-ASCII bytes as if they are not letters.
    # This function should be used only on strings that are known to be in encodings where the bytes corresponding to ASCII letters always represent themselves. This includes UTF-8 and the ISO-8859-* charsets, but not for instance double-byte encodings like the Windows Codepage 932, where the trailing bytes of double-byte characters include all ASCII letters. If you compare two CP932 strings using this function, you will get false matches.
    #
    # using Python's builtin lower() and sort() by headword
    # should be equivalent for UTF-8 encoded dictionaries (and it is fast)
    #
    dictionary.sort(by_headword=True, ignore_case=True)

    # write .idx and .dict files
    print_debug("Writing .idx and .dict files...", args.debug)
    idx_file_obj = io.open(idx_file_path, "wb")
    dict_file_obj = io.open(dict_file_path, "wb")
    current_offset = 0
    current_idx_size = 0
    for entry_index in dictionary.entries_index_sorted:
        entry = dictionary.entries[entry_index]
        headword_bytes = entry.headword.encode("utf-8")
        definition_bytes = entry.definition.encode("utf-8")
        definition_size = len(definition_bytes)
        # write .idx
        idx_file_obj.write(headword_bytes)
        idx_file_obj.write(b"\0")
        idx_file_obj.write(struct.pack('>i', current_offset))
        idx_file_obj.write(struct.pack('>i', definition_size))
        current_idx_size += (len(headword_bytes) + 1 + 4 + 4)
        # write .dict
        dict_file_obj.write(definition_bytes)
        current_offset += definition_size
    idx_file_obj.close()
    dict_file_obj.close()
    print_debug("Writing .idx and .dict files... done", args.debug)

    # list files to compress
    files_to_compress = []
    files_to_compress.append(ifo_file_path)
    files_to_compress.append(idx_file_path)

    # write .syn file
    dict_syns_len = 0
    if dictionary.has_synonyms:
        if args.ignore_synonyms:
            print_debug("Dictionary has synonyms, but ignoring them", args.debug)
        else:
            print_debug("Dictionary has synonyms, writing .syn file...", args.debug)
            syn_file_obj = io.open(syn_file_path, "wb")
            dict_syns = dictionary.get_synonyms()
            dict_syns_len = len(dict_syns)
            for pair in dict_syns:
                synonym_bytes = pair[0].encode("utf-8")
                index = pair[1]
                syn_file_obj.write(synonym_bytes)
                syn_file_obj.write(b"\0")
                syn_file_obj.write(struct.pack('>i', index))
            syn_file_obj.close()
            files_to_compress.append(syn_file_path)
            print_debug("Dictionary has synonyms, writing .syn file... done", args.debug)

    # compress .dict file
    if args.sd_no_dictzip:
        print_debug("Not compressing .dict file with dictzip", args.debug)
        files_to_compress.append(dict_file_path)
        result = [dict_file_path]
    else:
        try:
            print_debug("Compressing .dict file with dictzip...", args.debug)
            dictzip_path = DICTZIP
            if args.dictzip_path is None:
                print_info("  Running '%s' from $PATH" % DICTZIP)
            else:
                dictzip_path = args.dictzip_path
                print_info("  Running '%s' from '%s'" % (DICTZIP, dictzip_path))
            proc = subprocess.Popen(
                [dictzip_path, "-k", dict_file_path],
                stdout=subprocess.PIPE,
                stdin=subprocess.PIPE,
                stderr=subprocess.PIPE
            )
            proc.communicate()
            result = [dict_dz_file_path]
            files_to_compress.append(dict_dz_file_path)
            print_debug("Compressing .dict file with dictzip... done", args.debug)
        except OSError as exc:
            print_error("  Unable to run '%s' as '%s'" % (DICTZIP, dictzip_path))
            print_error("  Please make sure '%s':" % DICTZIP)
            print_error("    1. is available on your $PATH or")
            print_error("    2. specify its path with --dictzip-path or")
            print_error("    3. specify --no-dictzip to avoid compressing the .dict file")
            result = None

    if result is not None:
        # create ifo file
        ifo_file_obj = io.open(ifo_file_path, "wb")
        ifo_file_obj.write((u"StarDict's dict ifo file\n").encode("utf-8"))
        ifo_file_obj.write((u"version=2.4.2\n").encode("utf-8"))
        ifo_file_obj.write((u"wordcount=%d\n" % (len(dictionary))).encode("utf-8"))
        ifo_file_obj.write((u"idxfilesize=%d\n" % (current_idx_size)).encode("utf-8"))
        ifo_file_obj.write((u"bookname=%s\n" % (args.title)).encode("utf-8"))
        ifo_file_obj.write((u"date=%s\n" % (args.year)).encode("utf-8"))
        ifo_file_obj.write((u"sametypesequence=m\n").encode("utf-8"))
        ifo_file_obj.write((u"description=%s\n" % (args.description)).encode("utf-8"))
        ifo_file_obj.write((u"author=%s\n" % (args.author)).encode("utf-8"))
        ifo_file_obj.write((u"email=%s\n" % (args.email)).encode("utf-8"))
        ifo_file_obj.write((u"website=%s\n" % (args.website)).encode("utf-8"))
        if dict_syns_len > 0:
            ifo_file_obj.write((u"synwordcount=%d\n" % (dict_syns_len)).encode("utf-8"))
        ifo_file_obj.close()

        # create output zip file
        try:
            print_debug("Writing to file '%s'..." % (output_file_path_absolute), args.debug)
            file_zip_obj = zipfile.ZipFile(output_file_path_absolute, "w", zipfile.ZIP_DEFLATED)
            for file_to_compress in files_to_compress:
                file_to_compress = os.path.basename(file_to_compress)
                file_zip_obj.write(file_to_compress)
                print_debug("Written %s" % (file_to_compress), args.debug)
            file_zip_obj.close()
            result = [output_file_path]
            print_debug("Writing to file '%s'... success" % (output_file_path_absolute), args.debug)
        except:
            print_error("Writing to file '%s'... failure" % (output_file_path_absolute))

    # delete tmp directory
    os.chdir(cwd)
    if args.keep:
        print_info("Not deleting temp dir '%s'" % (tmp_path))
    else:
        delete_directory(tmp_path)
        print_debug("Deleted temp dir '%s'" % (tmp_path), args.debug)

    return result

Example 49

Project: OwnTube
Source File: btmakemetafile.py
View license
def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1, gethash = None):
    if gethash is None:
        gethash = {}
    
    if not 'md5' in gethash:
        gethash['md5'] = False
    if not 'crc32' in gethash:
        gethash['crc32'] = False
    if not 'sha1' in gethash:
        gethash['sha1'] = False
        
    file = abspath(file)
    if isdir(file):
        subs = subfiles(file)
        subs.sort()
        pieces = []
        sh = sha()
        done = 0L
        fs = []
        totalsize = 0.0
        totalhashed = 0L
        for p, f in subs:
            totalsize += getsize(f)

        for p, f in subs:
            pos = 0L
            size = getsize(f)
            h = open(f, 'rb')

            if gethash['md5']:
                hash_md5 = md5.new()
            if gethash['sha1']:
                hash_sha1 = sha()
            if gethash['crc32']:
                hash_crc32 = zlib.crc32('')
            
            while pos < size:
                a = min(size - pos, piece_length - done)
                
                readpiece = h.read(a)

                # See if the user cancelled
                if flag.isSet():
                    return
                
                sh.update(readpiece)

                # See if the user cancelled
                if flag.isSet():
                    return

                if gethash['md5']:                
                    # Update MD5
                    hash_md5.update(readpiece)
    
                    # See if the user cancelled
                    if flag.isSet():
                        return

                if gethash['crc32']:                
                    # Update CRC32
                    hash_crc32 = zlib.crc32(readpiece, hash_crc32)
    
                    # See if the user cancelled
                    if flag.isSet():
                        return
                
                if gethash['sha1']:                
                    # Update SHA1
                    hash_sha1.update(readpiece)
    
                    # See if the user cancelled
                    if flag.isSet():
                        return
                
                done += a
                pos += a
                totalhashed += a
                
                if done == piece_length:
                    pieces.append(sh.digest())
                    done = 0
                    sh = sha()
                if progress_percent:
                    progress(totalhashed / totalsize)
                else:
                    progress(a)
                    
            newdict = {'length': size,
                       'path': uniconvertl(p, encoding) }
            if gethash['md5']:
                newdict['md5sum'] = hash_md5.hexdigest()
            if gethash['crc32']:
                newdict['crc32'] = "%08X" % hash_crc32
            if gethash['sha1']:
                newdict['sha1'] = hash_sha1.digest()
                    
            fs.append(newdict)
                    
            h.close()
        if done > 0:
            pieces.append(sh.digest())
        return {'pieces': ''.join(pieces), 
                'piece length': piece_length,
                'files': fs, 
                'name': uniconvert(split(file)[1], encoding) }
    else:
        size = getsize(file)
        pieces = []
        p = 0L
        h = open(file, 'rb')
        
        if gethash['md5']:
            hash_md5 = md5.new()
        if gethash['crc32']:
            hash_crc32 = zlib.crc32('')
        if gethash['sha1']:
            hash_sha1 = sha()
        
        while p < size:
            x = h.read(min(piece_length, size - p))

            # See if the user cancelled
            if flag.isSet():
                return
            
            if gethash['md5']:
                # Update MD5
                hash_md5.update(x)
    
                # See if the user cancelled
                if flag.isSet():
                    return
            
            if gethash['crc32']:
                # Update CRC32
                hash_crc32 = zlib.crc32(x, hash_crc32)
    
                # See if the user cancelled
                if flag.isSet():
                    return
            
            if gethash['sha1']:
                # Update SHA-1
                hash_sha1.update(x)
    
                # See if the user cancelled
                if flag.isSet():
                    return
                
            pieces.append(sha(x).digest())

            # See if the user cancelled
            if flag.isSet():
                return

            p += piece_length
            if p > size:
                p = size
            if progress_percent:
                progress(float(p) / size)
            else:
                progress(min(piece_length, size - p))
        h.close()
        newdict = {'pieces': ''.join(pieces), 
                   'piece length': piece_length,
                   'length': size, 
                   'name': uniconvert(split(file)[1], encoding) }
        if gethash['md5']:
            newdict['md5sum'] = hash_md5.hexdigest()
        if gethash['crc32']:
            newdict['crc32'] = "%08X" % hash_crc32
        if gethash['sha1']:
            newdict['sha1'] = hash_sha1.digest()
                   

Example 50

Project: plotly.py
Source File: offline.py
View license
def plot(figure_or_data,
         show_link=True, link_text='Export to plot.ly',
         validate=True, output_type='file',
         include_plotlyjs=True,
         filename='temp-plot.html', auto_open=True,
         image=None, image_filename='plot_image',
         image_width=800, image_height=600):
    """ Create a plotly graph locally as an HTML document or string.

    Example:
    ```
    from plotly.offline import plot
    import plotly.graph_objs as go

    plot([go.Scatter(x=[1, 2, 3], y=[3, 2, 6])], filename='my-graph.html')
    # We can also download an image of the plot by setting the image parameter
    # to the image format we want
    plot([go.Scatter(x=[1, 2, 3], y=[3, 2, 6])], filename='my-graph.html'
         image='jpeg')
    ```
    More examples below.

    figure_or_data -- a plotly.graph_objs.Figure or plotly.graph_objs.Data or
                      dict or list that describes a Plotly graph.
                      See https://plot.ly/python/ for examples of
                      graph descriptions.

    Keyword arguments:
    show_link (default=True) -- display a link in the bottom-right corner of
        of the chart that will export the chart to Plotly Cloud or
        Plotly Enterprise
    link_text (default='Export to plot.ly') -- the text of export link
    validate (default=True) -- validate that all of the keys in the figure
        are valid? omit if your version of plotly.js has become outdated
        with your version of graph_reference.json or if you need to include
        extra, unnecessary keys in your figure.
    output_type ('file' | 'div' - default 'file') -- if 'file', then
        the graph is saved as a standalone HTML file and `plot`
        returns None.
        If 'div', then `plot` returns a string that just contains the
        HTML <div> that contains the graph and the script to generate the
        graph.
        Use 'file' if you want to save and view a single graph at a time
        in a standalone HTML file.
        Use 'div' if you are embedding these graphs in an HTML file with
        other graphs or HTML markup, like a HTML report or an website.
    include_plotlyjs (default=True) -- If True, include the plotly.js
        source code in the output file or string.
        Set as False if your HTML file already contains a copy of the plotly.js
        library.
    filename (default='temp-plot.html') -- The local filename to save the
        outputted chart to. If the filename already exists, it will be
        overwritten. This argument only applies if `output_type` is 'file'.
    auto_open (default=True) -- If True, open the saved file in a
        web browser after saving.
        This argument only applies if `output_type` is 'file'.
    image (default=None |'png' |'jpeg' |'svg' |'webp') -- This parameter sets
        the format of the image to be downloaded, if we choose to download an
        image. This parameter has a default value of None indicating that no
        image should be downloaded.
    image_filename (default='plot_image') -- Sets the name of the file your image
        will be saved to. The extension should not be included.
    image_height (default=600) -- Specifies the height of the image in `px`.
    image_width (default=800) -- Specifies the width of the image in `px`.
    """
    if output_type not in ['div', 'file']:
        raise ValueError(
            "`output_type` argument must be 'div' or 'file'. "
            "You supplied `" + output_type + "``")
    if not filename.endswith('.html') and output_type == 'file':
        warnings.warn(
            "Your filename `" + filename + "` didn't end with .html. "
            "Adding .html to the end of your file.")
        filename += '.html'

    plot_html, plotdivid, width, height = _plot_html(
        figure_or_data, show_link, link_text, validate,
        '100%', '100%', global_requirejs=False)

    resize_script = ''
    if width == '100%' or height == '100%':
        resize_script = (
            ''
            '<script type="text/javascript">'
            'window.removeEventListener("resize");'
            'window.addEventListener("resize", function(){{'
            'Plotly.Plots.resize(document.getElementById("{id}"));}});'
            '</script>'
        ).format(id=plotdivid)

    if output_type == 'file':
        with open(filename, 'w') as f:
            if include_plotlyjs:
                plotly_js_script = ''.join([
                    '<script type="text/javascript">',
                    get_plotlyjs(),
                    '</script>',
                ])
            else:
                plotly_js_script = ''

            if image:
                if image not in __IMAGE_FORMATS:
                    raise ValueError('The image parameter must be one of the '
                                     'following: {}'.format(__IMAGE_FORMATS)
                                     )
                # if the check passes then download script is injected.
                # write the download script:
                script = get_image_download_script('plot')
                script = script.format(format=image,
                                       width=image_width,
                                       height=image_height,
                                       filename=image_filename,
                                       plot_id=plotdivid)
            else:
                script = ''

            f.write(''.join([
                '<html>',
                '<head><meta charset="utf-8" /></head>',
                '<body>',
                plotly_js_script,
                plot_html,
                resize_script,
                script,
                '</body>',
                '</html>']))

        url = 'file://' + os.path.abspath(filename)
        if auto_open:
            webbrowser.open(url)

        return url

    elif output_type == 'div':
        if include_plotlyjs:
            return ''.join([
                '<div>',
                '<script type="text/javascript">',
                get_plotlyjs(),
                '</script>',
                plot_html,
                '</div>'
            ])
        else:
            return plot_html