sys.platform

Here are the examples of the python api sys.platform taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: nuxeo-drive
Source File: setup.py
View license
    def __init__(self, driveAttributes):

        from distutils.core import setup

        attribs = driveAttributes
        freeze_options = {}
        ext_modules = []

        script = attribs.get_script()
        scripts = attribs.get_scripts()
        name = attribs.get_name()
        packages = Packages(attribs.get_package_dirs()).load()

        # special handling for data files, except for Linux
        if ((sys.platform == "win32" or sys.platform == 'darwin')
                and 'nxdrive.data' in packages):
            packages.remove('nxdrive.data')
        package_data = attribs.get_package_data()
        icons_home = attribs.get_icons_home()
        ui5_home = attribs.get_ui5_home()

        win_icon = os.path.join(icons_home, attribs.get_win_icon())
        png_icon = os.path.join(icons_home, attribs.get_png_icon())
        osx_icon = os.path.join(icons_home, attribs.get_osx_icon())

        if sys.platform == 'win32':
            icon = win_icon
        elif sys.platform == 'darwin':
            icon = osx_icon
        else:
            icon = png_icon

        # Files to include in frozen app
        # build_exe freeze with cx_Freeze (Windows)
        include_files = attribs.get_includes()
        # bdist_esky freeze with cx_Freeze (Windows) and py2app (OS X)
        # In fact this is a global setup option
        # TODO NXP-13810: check removed data_files from py2app and added to
        # global setup
        icon_files = data_file_dir(icons_home, 'icons', include_files).load()
        ui5_files = data_file_dir(ui5_home, 'ui5', include_files).load_recursive()
        data_files = [('icons', icon_files)]
        data_files.extend(ui5_files)
        data_files.extend(attribs.get_data_files())
        old_version = None
        init_file = attribs.get_init_file()
        version = read_version(init_file)

        if '-dev' in version:
            # timestamp the dev artifacts as distutils only accepts "b" + digit
            timestamp = datetime.utcnow().isoformat()
            timestamp = timestamp.replace(":", "")
            timestamp = timestamp.replace(".", "")
            timestamp = timestamp.replace("T", "")
            timestamp = timestamp.replace("-", "")
            old_version = version
            # distutils imposes a max 3 levels integer version
            # (+ prerelease markers which are not allowed in a
            # msi package version). On the other hand,
            # msi imposes the a.b.c.0 or a.b.c.d format where
            # a, b, c and d are all 16 bits integers
            # TODO: align on latest distutils versioning
            month_day = timestamp[4:8]
            if month_day.startswith('0'):
                month_day = month_day[1:]
            version = version.replace('-dev', ".%s" % (
                month_day))
            update_version(init_file, version)
            print "Updated version to " + version

        # Create JSON metadata file for the frozen application
        json_file = create_json_metadata(version, SERVER_MIN_VERSION)
        print "Created JSON metadata file for frozen app: " + json_file

        includes = [
            "PyQt4",
            "PyQt4.QtCore",
            "PyQt4.QtNetwork",
            "PyQt4.QtGui",
            "atexit"  # implicitly required by PyQt4
        ]
        if attribs.include_xattr_binaries():
            includes.append('cffi')
            includes.append('xattr')

        attribs.append_includes(includes)
        excludes = [
            "ipdb",
            "clf",
            "IronPython",
            "pydoc",
            "tkinter",
        ]
        if not attribs.include_xattr_binaries():
            excludes.append('cffi')
            excludes.append('xattr')

        if '--freeze' in sys.argv:
            print "Building standalone executable..."
            sys.argv.remove('--freeze')
            from nx_cx_Freeze import setup
            from cx_Freeze import Executable as cx_Executable
            from esky.util import get_platform

            # build_exe does not seem to take the package_dir info into account
            sys.path.append(attribs.get_path_append())

            executables = [cx_Executable(script)]
            freeze_options = dict()
            if sys.platform == "win32":
                # Windows GUI program that can be launched without a cmd
                # console
                script_w = attribs.get_win_script()
                if script_w is not None:
                    scripts.append(
                        es_Executable(script_w, icon=icon,
                                      shortcutDir="ProgramMenuFolder",
                                      shortcutName=attribs.shortcutName()))

                    executables.append(
                        cx_Executable(script_w,
                                      targetName=attribs.get_win_targetName(),
                                      base="Win32GUI", icon=icon,
                                      shortcutDir="ProgramMenuFolder",
                                      shortcutName=attribs.shortcutName()))
                freeze_options.update({'attribs': attribs})

            package_data = {}
            esky_app_name = (attribs.get_name()
                             + '-' + version + '.' + get_platform())
            esky_dist_dir = os.path.join(OUTPUT_DIR, esky_app_name)
            freeze_options.update({
                'executables': executables,
                'options': {
                    "build": {
                        "exe_command": "bdist_esky",
                    },
                    "build_exe": {
                        "includes": includes,
                        "packages": packages + [
                            "nose",
                        ],
                        "excludes": excludes,
                        "include_files": include_files,
                    },
                    "bdist_esky": {
                        "excludes": excludes,
                        "enable_appdata_dir": True,
                        "freezer_options": {
                            "packages": packages + [
                                "nose",
                            ],
                        },
                        "rm_freeze_dir_after_zipping": False,
                    },
                    "install": {
                        "skip_sub_commands":
                            "install_lib,install_scripts,install_data",
                    },
                    "install_exe": {
                        "skip_build": True,
                        "build_dir": esky_dist_dir,
                    },
                    "bdist_msi": {
                        "add_to_path": True,
                        "upgrade_code":
                            attribs.get_uid(),
                    },
                },
            })

            # Include cffi compiled C extension under Linux
            if sys.platform.startswith('linux') and attribs.include_xattr_binaries():
                import xattr
                includeFiles = [(os.path.join(os.path.dirname(xattr.__file__), '_cffi__x7c9e2f59xb862c7dd.so'),
                                 '_cffi__x7c9e2f59xb862c7dd.so')]
                freeze_options['options']['bdist_esky']['freezer_options'].update({
                    "includeFiles": includeFiles
                })

        if sys.platform == 'darwin':
            # Under OSX we use py2app instead of cx_Freeze because we need:
            # - argv_emulation=True for nxdrive:// URL scheme handling
            # - easy Info.plist customization
            import py2app  # install the py2app command
            if attribs.include_xattr_binaries():
                import xattr
                ext_modules = [xattr.lib.ffi.verifier.get_extension()]
                includes.append("_cffi__x7c9e2f59xb862c7dd")
            name = attribs.get_CFBundleName()
            py2app_options = dict(
                iconfile=icon,
                qt_plugins='imageformats',
                argv_emulation=False,  # We use QT for URL scheme handling
                plist=dict(
                    CFBundleDisplayName=attribs.get_CFBundleDisplayName(),
                    CFBundleName=attribs.get_CFBundleName(),
                    CFBundleIdentifier=attribs.get_CFBundleIdentifier(),
                    LSUIElement=True,  # Do not launch as a Dock application
                    CFBundleURLTypes=[
                        dict(
                            CFBundleURLName=attribs.get_CFBundleURLName(),
                            CFBundleURLSchemes=(attribs
                                                .get_CFBundleURLSchemes()),
                        )
                    ],
                    NSServices=[
                        dict(
                            NSMenuItem=dict(
                                default=attribs.get_CFBundleDisplayName()
                            ),
                            NSMessage=u"macRightClick",
                            NSPortName=attribs.get_CFBundleDisplayName(),
                            NSRequiredContext=dict(),
                            NSSendTypes=[
                                u'NSStringPboardType',
                            ],
                            NSSendFileTypes=[
                                u"public.item"
                            ]
                        )
                    ]
                ),
                includes=includes,
                excludes=excludes,
            )
            freeze_options = dict(
                app=attribs.get_app(),
                options=dict(
                    py2app=py2app_options,
                    bdist_esky=dict(
                        enable_appdata_dir=True,
                        create_zipfile=False,
                        freezer_options=py2app_options,
                    )
                )
            )
        setup(
            name=name,
            version=version,
            description=attribs.get_description(),
            author=attribs.get_author(),
            author_email=attribs.get_author_email(),
            url=attribs.get_url(),
            packages=packages,
            package_dir=attribs.get_package_dir(),
            package_data=package_data,
            scripts=scripts,
            long_description=attribs.get_long_description(),
            data_files=data_files,
            ext_modules=ext_modules,
            **freeze_options
        )

        if old_version is not None:
            update_version(init_file, old_version)
            print "Restored version to " + old_version

Example 2

Project: ramona
Source File: svrapp.py
View license
	def __init__(self):
		server_app_singleton.__init__(self)

		# Create own process group
		if os.name == 'posix':
			try:
				os.setpgrp()
			except:
				#When launched from upstart, following command will fail
				pass

		# Parse command line arguments
		parser = argparse.ArgumentParser()
		parser.add_argument('-S','--server-only', action='store_true', help='Start only server, programs are not launched')
		parser.add_argument('program', nargs='*', help='Optionally specify program(s) in scope of the command (if nothing is specified, all enabled programs will be launched)')

		# This is to support debuging of pythonservice.exe on Windows
		if sys.platform == 'win32':
			parser.add_argument('-debug', action='store', help=argparse.SUPPRESS)

		self.args = parser.parse_args()

		# Read configuration
		read_config()

		# Configure logging
		loglvl = get_numeric_loglevel(config.get('ramona:server','loglevel'))
		logging.basicConfig(
			level=loglvl,
			stream=sys.stderr,
			format="%(asctime)s %(levelname)s: %(message)s",
			)
		L.info("Ramona server started")
		L.debug("Configuration loaded from: {0}".format(', '.join(itertools.chain(config_files,config_includes))))

		# Prepare message yield logger
		my_logger = logging.getLogger('my')
		my_logger.setLevel(logging.DEBUG) 
		my_logger.addHandler(message_yield_loghandler(self))
		my_logger.propagate = False

		# Open console communication sockets (listen mode)
		self.cnssockets = []
		consoleuri = config.get("ramona:server", "consoleuri")
		for cnsuri in consoleuri.split(','):
			socket_factory = socketuri.socket_uri(cnsuri)
			
			# Special casing for UNIX domain socket 
			# There can be abandoned/stalled file entry - we need to find out if this is the case ...
			# (see http://stackoverflow.com/questions/7405932/how-to-know-whether-any-process-is-bound-to-a-unix-domain-socket)
			if socket_factory.protocol == 'unix':
				# Try to connect ...
				if os.path.exists(socket_factory.uri.path):
					try:
						s = socket_factory.create_socket_connect()
					except socket.error, e:
						if e.errno == errno.ECONNREFUSED:
							L.debug("Removing stalled UNIX socket '{0}'".format(socket_factory.uri.path))
							os.unlink(socket_factory.uri.path)
					else:
						s.close()
						L.fatal("It looks like that server is already running, there is active UNIX socket '{0}'".format(socket_factory.uri.path))
						sys.exit(1)

			try:
				socks = socket_factory.create_socket_listen()
			except socket.error, e:
				L.fatal("It looks like that server is already running: {0}".format(e))
				sys.exit(1)
			self.cnssockets.extend(socks)
		if len(self.cnssockets) == 0:
			L.fatal("There is no console socket configured - considering this as fatal error")
			sys.exit(1)

		self.loop = pyev.default_loop()
		self.watchers = [pyev.Signal(sig, self.loop, self.__terminal_signal_cb) for sig in self.STOPSIGNALS]
		self.watchers.append(pyev.Signal(signal.SIGHUP, self.loop, self.__restart_signal_cb))
		self.watchers.append(pyev.Periodic(0, 1.0, self.loop, self.__tick_cb))

		if sys.platform == 'win32':
			# There is no pyev.Child watcher on Windows; periodic check is used instead
			self.watchers.append(pyev.Periodic(0, 0.5, self.loop, self.__check_childs_cb))
		else:
			self.watchers.append(pyev.Child(0, False, self.loop, self.__child_signal_cb))


		for sock in self.cnssockets:
			sock.setblocking(0)
			# Watcher data are used (instead logical watcher.fd due to Win32 mismatch)
			self.watchers.append(pyev.Io(sock._sock, pyev.EV_READ, self.loop, self.__accept_cb, data=sock._sock.fileno()))

		self.conns = weakref.WeakSet()
		self.termstatus =  None
		self.termstatus_change = None

		# Enable non-terminating SIGALARM handler - this is to protect supervisor for ALARM signals from subprocesses
		if sys.platform != 'win32':
			signal.signal(signal.SIGALRM, _SIGALARM_handler)

		# Prepare also exit watcher - can be used to 'simulate' terminal signal (useful on Win32)
		self.exitwatcher = pyev.Async(self.loop, self.__terminal_signal_cb)
		self.exitwatcher.start()

		program_roaster.__init__(self)
		idlework_appmixin.__init__(self)

		# Build notificator component
		self.notificator = notificator(self)

		# Reopen stdout and stderr - if pointing to log file, this includes also log rotate check
		self.__rotate_stdout_stderr()

Example 3

Project: Pcode
Source File: Projects.py
View license
    def loadProject(self, path, show, new):
        if not self.pcode.showProject(path):
            QtGui.QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
            projectPathDict = {
                "notes": os.path.join(path, "Data", "wpad.txt"),
                "session": os.path.join(path, "Data", "session.xml"),
                "usedata": os.path.join(path, "Data", "usedata.xml"),
                "windata": os.path.join(path, "Data", "windata.xml"),
                "projectdata": os.path.join(path, "Data", "projectdata.xml"),
                "snippetsdir": os.path.join(path, "Data", "templates"),
                "tempdir": os.path.join(path, "temp"),
                "backupdir": os.path.join(path, "temp", "Backup", "Files"),
                "backupfile": os.path.join(path, "temp", "Backup", "bak"),
                "sourcedir": os.path.join(path, "src"),
                "ropeFolder": os.path.join(path, "Rope"),
                "buildprofile": os.path.join(path, "Build", "profile.xml"),
                "ropeprofile": os.path.join(path, "Rope", "profile.xml"),
                "projectmainfile": os.path.join(path, "project.xml"),
                "iconsdir": os.path.join(path, "Resources", "Icons"),
                "root": path
                }

            if sys.platform == 'win32':
                projectPathDict["venvdir"] = os.path.join(path,
                               "Resources", "VirtualEnv", "Windows", "Venv")
            elif sys.platform == 'darwin':
                projectPathDict["venvdir"] = os.path.join(path,
                               "Resources", "VirtualEnv", "Mac", "Venv")
            else:
                projectPathDict["venvdir"] = os.path.join(path,
                               "Resources", "VirtualEnv", "Linux", "Venv")

            try:
                project_data = self.readProject(path)
                if project_data is False:
                    QtGui.QApplication.restoreOverrideCursor()
                    message = QtGui.QMessageBox.warning(self, "Open Project",
                                                        "Failed:\n\n" + path)
                    return
                projectPathDict["name"] = project_data[1]["Name"]
                projectPathDict["type"] = project_data[1]["Type"]
                projectPathDict["mainscript"] = os.path.join(path, "src",
                               project_data[1]["MainScript"])
                if sys.platform == 'win32':
                    projectPathDict["builddir"] = os.path.join(
                        path, "Build", "Windows")
                elif sys.platform == 'darwin':
                    projectPathDict["builddir"] = os.path.join(
                        path, "Build", "Mac")
                else:
                    projectPathDict["builddir"] = os.path.join(
                        path, "Build", "Linux")

                p_name = os.path.basename(path)

                projectWindow = EditorWindow(projectPathDict, self.library,
                                             self.busyWidget, self.settingsWidget.colorScheme,
                                             self.useData, self.app, self)
                if new:
                    projectWindow.editorTabWidget.loadfile(
                        projectPathDict["mainscript"])
                else:
                    projectWindow.restoreSession()
                projectWindow.editorTabWidget.updateWindowTitle.connect(
                    self.pcode.updateWindowTitle)

                self.pcode.addProject(projectWindow, p_name)

                if path in self.useData.OPENED_PROJECTS:
                    self.useData.OPENED_PROJECTS.remove(path)
                    self.useData.OPENED_PROJECTS.insert(0, path)
                else:
                    self.useData.OPENED_PROJECTS.insert(0, path)
                if show:
                    self.pcode.showProject(path)
            except Exception as err:
                exc_type, exc_value, exc_traceback = sys.exc_info()
                logging.error(
                    repr(traceback.format_exception(exc_type, exc_value,
                             exc_traceback)))
                QtGui.QApplication.restoreOverrideCursor()
                message = QtGui.QMessageBox.warning(self, "Failed Open",
                                                    "Problem opening project: \n\n" + str(err))
            QtGui.QApplication.restoreOverrideCursor()

Example 4

Project: gramps
Source File: grampslocale.py
View license
    def __init_first_instance(self):
        """
        Initialize the primary locale from whatever might be
        available. We only do this once, and the resulting
        GrampsLocale is returned by default.
        """
        global _hdlr
        _hdlr = logging.StreamHandler()
        _hdlr.setFormatter(logging.Formatter(fmt="%(name)s.%(levelname)s: %(message)s"))
        LOG.addHandler(_hdlr)

        #Now that we have a logger set up we can issue the icu error if needed.
        if not HAVE_ICU:
            LOG.warning(_icu_err)

        # Even the first instance can be overridden by passing lang
        # and languages to the constructor. If it isn't (which is the
        # expected behavior), do platform-specific setup:
        if not (self.lang and self.language):
            if sys.platform == 'darwin':
                from . import maclocale
                maclocale.mac_setup_localization(self)
            elif sys.platform == 'win32':
                self._win_init_environment()
            else:
                self._init_from_environment()
        else:
            self.numeric = self.currency = self.calendar = self.collation = self.lang

        if not self.lang:
            self.lang = 'en_US.UTF-8'
        if not self.language:
            self.language.append('en')
        if not self.localedir and not self.lang.startswith('en'):
            LOG.warning("No translations for %s were found, setting localization to U.S. English", self.localedomain)
            self.lang = 'en_US.UTF-8'
            self.language = ['en']

#Next, we need to know what is the encoding from the native
#environment. This is used by python standard library funcions which
#localize their output, e.g. time.strftime(). NB: encoding is a class variable.
        if not self.encoding:
            self.encoding = (locale.getpreferredencoding()
                             or sys.getdefaultencoding())
        LOG.debug("Setting encoding to %s", self.encoding)

        # Make sure that self.lang and self.language are reflected
        # back into the environment for Gtk to use when its
        # initialized. If self.lang isn't 'C', make sure that it has a
        # 'UTF-8' suffix, because that's all that GtkBuilder can
        # digest.

        # Gtk+ has an 'en' po, but we don't. This is worked-around for
        # our GrampsTranslation class but that isn't used to retrieve
        # translations in GtkBuilder (glade), a direct call to libintl
        # (gettext) is. If 'en' is in the translation list it gets
        # skipped in favor of the next language, which can cause
        # inappropriate translations of strings in glade/ui files. To
        # prevent this, if 'en' is in self.language it's the last
        # entry:

        if 'en' in self.language:
            self.language = self.language[:self.language.index('en') + 1]

        # Linux note: You'll get unsupported locale errors from Gtk
        # and untranslated strings if the requisite UTF-8 locale isn't
        # installed. This is particularly a problem on Debian and
        # Debian-derived distributions which by default don't install
        # a lot of locales.
        lang = locale.normalize(self.language[0] if self.language[0] else 'C')
        check_lang = lang.split('.')
        if not check_lang[0]  in ('C', 'en'):
            if len(check_lang) < 2  or check_lang[1] not in ("utf-8", "UTF-8"):
                lang = '.'.join((check_lang[0], 'UTF-8'))

        os.environ["LANG"] = lang
        #We need to convert 'en' and 'en_US' to 'C' to avoid confusing
        #GtkBuilder when it's retrieving strings from our Glade files
        #since we have neither an en.po nor an en_US.po.

        os.environ["LANGUAGE"] = ':'.join(self.language)

        # GtkBuilder uses GLib's g_dgettext wrapper, which oddly is bound
        # with locale instead of gettext. Win32 doesn't support bindtextdomain.
        if self.localedir:
            if not sys.platform == 'win32':
                locale.bindtextdomain(self.localedomain, self.localedir)
            else:
                self._win_bindtextdomain(self.localedomain.encode('utf-8'),
                                         self.localedir.encode('utf-8'))

Example 5

Project: hamster
Source File: python.py
View license
def check_python_headers(conf,mandatory=True):
	if not conf.env['CC_NAME']and not conf.env['CXX_NAME']:
		conf.fatal('load a compiler first (gcc, g++, ..)')
	if not conf.env['PYTHON_VERSION']:
		conf.check_python_version()
	env=conf.env
	python=env['PYTHON']
	if not python:
		conf.fatal('could not find the python executable')
	if Options.platform=='darwin':
		conf.check_tool('osx')
	try:
		v='prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split()
		(python_prefix,python_SO,python_SYSLIBS,python_LDFLAGS,python_SHLIBS,python_LIBDIR,python_LIBPL,INCLUDEPY,Py_ENABLE_SHARED,python_MACOSX_DEPLOYMENT_TARGET)=_get_python_variables(python,["get_config_var('%s')"%x for x in v],['from distutils.sysconfig import get_config_var'])
	except RuntimeError:
		conf.fatal("Python development headers not found (-v for details).")
	conf.log.write("""Configuration returned from %r:
python_prefix = %r
python_SO = %r
python_SYSLIBS = %r
python_LDFLAGS = %r
python_SHLIBS = %r
python_LIBDIR = %r
python_LIBPL = %r
INCLUDEPY = %r
Py_ENABLE_SHARED = %r
MACOSX_DEPLOYMENT_TARGET = %r
"""%(python,python_prefix,python_SO,python_SYSLIBS,python_LDFLAGS,python_SHLIBS,python_LIBDIR,python_LIBPL,INCLUDEPY,Py_ENABLE_SHARED,python_MACOSX_DEPLOYMENT_TARGET))
	if python_MACOSX_DEPLOYMENT_TARGET:
		conf.env['MACOSX_DEPLOYMENT_TARGET']=python_MACOSX_DEPLOYMENT_TARGET
		conf.environ['MACOSX_DEPLOYMENT_TARGET']=python_MACOSX_DEPLOYMENT_TARGET
	env['pyext_PATTERN']='%s'+python_SO
	if python_SYSLIBS is not None:
		for lib in python_SYSLIBS.split():
			if lib.startswith('-l'):
				lib=lib[2:]
			env.append_value('LIB_PYEMBED',lib)
	if python_SHLIBS is not None:
		for lib in python_SHLIBS.split():
			if lib.startswith('-l'):
				env.append_value('LIB_PYEMBED',lib[2:])
			else:
				env.append_value('LINKFLAGS_PYEMBED',lib)
	if Options.platform!='darwin'and python_LDFLAGS:
		env.append_value('LINKFLAGS_PYEMBED',python_LDFLAGS.split())
	result=False
	name='python'+env['PYTHON_VERSION']
	if python_LIBDIR is not None:
		path=[python_LIBDIR]
		conf.log.write("\n\n# Trying LIBDIR: %r\n"%path)
		result=conf.check(lib=name,uselib='PYEMBED',libpath=path)
	if not result and python_LIBPL is not None:
		conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
		path=[python_LIBPL]
		result=conf.check(lib=name,uselib='PYEMBED',libpath=path)
	if not result:
		conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
		path=[os.path.join(python_prefix,"libs")]
		name='python'+env['PYTHON_VERSION'].replace('.','')
		result=conf.check(lib=name,uselib='PYEMBED',libpath=path)
	if result:
		env['LIBPATH_PYEMBED']=path
		env.append_value('LIB_PYEMBED',name)
	else:
		conf.log.write("\n\n### LIB NOT FOUND\n")
	if(sys.platform=='win32'or sys.platform.startswith('os2')or sys.platform=='darwin'or Py_ENABLE_SHARED):
		env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED']
		env['LIB_PYEXT']=env['LIB_PYEMBED']
	python_config=conf.find_program('python%s-config'%('.'.join(env['PYTHON_VERSION'].split('.')[:2])),var='PYTHON_CONFIG')
	if not python_config:
		python_config=conf.find_program('python-config-%s'%('.'.join(env['PYTHON_VERSION'].split('.')[:2])),var='PYTHON_CONFIG')
	includes=[]
	if python_config:
		for incstr in Utils.cmd_output("%s %s --includes"%(python,python_config)).strip().split():
			if(incstr.startswith('-I')or incstr.startswith('/I')):
				incstr=incstr[2:]
			if incstr not in includes:
				includes.append(incstr)
		conf.log.write("Include path for Python extensions ""(found via python-config --includes): %r\n"%(includes,))
		env['CPPPATH_PYEXT']=includes
		env['CPPPATH_PYEMBED']=includes
	else:
		conf.log.write("Include path for Python extensions ""(found via distutils module): %r\n"%(INCLUDEPY,))
		env['CPPPATH_PYEXT']=[INCLUDEPY]
		env['CPPPATH_PYEMBED']=[INCLUDEPY]
	if env['CC_NAME']=='gcc':
		env.append_value('CCFLAGS_PYEMBED','-fno-strict-aliasing')
		env.append_value('CCFLAGS_PYEXT','-fno-strict-aliasing')
	if env['CXX_NAME']=='gcc':
		env.append_value('CXXFLAGS_PYEMBED','-fno-strict-aliasing')
		env.append_value('CXXFLAGS_PYEXT','-fno-strict-aliasing')
	conf.check(define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG_2,errmsg='Could not find the python development headers',mandatory=mandatory)

Example 6

Project: dd-agent
Source File: unix.py
View license
    def check(self, agentConfig):
        if Platform.is_linux():
            proc_location = agentConfig.get('procfs_path', '/proc').rstrip('/')
            try:
                proc_meminfo = "{}/meminfo".format(proc_location)
                with open(proc_meminfo, 'r') as mem_info:
                    lines = mem_info.readlines()
            except Exception:
                self.logger.exception('Cannot get memory metrics from %s', proc_meminfo)
                return False

            # NOTE: not all of the stats below are present on all systems as
            # not all kernel versions report all of them.
            #
            # $ cat /proc/meminfo
            # MemTotal:        7995360 kB
            # MemFree:         1045120 kB
            # MemAvailable:    1253920 kB
            # Buffers:          226284 kB
            # Cached:           775516 kB
            # SwapCached:       248868 kB
            # Active:          1004816 kB
            # Inactive:        1011948 kB
            # Active(anon):     455152 kB
            # Inactive(anon):   584664 kB
            # Active(file):     549664 kB
            # Inactive(file):   427284 kB
            # Unevictable:     4392476 kB
            # Mlocked:         4392476 kB
            # SwapTotal:      11120632 kB
            # SwapFree:       10555044 kB
            # Dirty:              2948 kB
            # Writeback:             0 kB
            # AnonPages:       5203560 kB
            # Mapped:            50520 kB
            # Shmem:             10108 kB
            # Slab:             161300 kB
            # SReclaimable:     136108 kB
            # SUnreclaim:        25192 kB
            # KernelStack:        3160 kB
            # PageTables:        26776 kB
            # NFS_Unstable:          0 kB
            # Bounce:                0 kB
            # WritebackTmp:          0 kB
            # CommitLimit:    15118312 kB
            # Committed_AS:    6703508 kB
            # VmallocTotal:   34359738367 kB
            # VmallocUsed:      400668 kB
            # VmallocChunk:   34359329524 kB
            # HardwareCorrupted:     0 kB
            # HugePages_Total:       0
            # HugePages_Free:        0
            # HugePages_Rsvd:        0
            # HugePages_Surp:        0
            # Hugepagesize:       2048 kB
            # DirectMap4k:       10112 kB
            # DirectMap2M:     8243200 kB

            regexp = re.compile(r'^(\w+):\s+([0-9]+)')  # We run this several times so one-time compile now
            meminfo = {}

            parse_error = False
            for line in lines:
                try:
                    match = re.search(regexp, line)
                    if match is not None:
                        meminfo[match.group(1)] = match.group(2)
                except Exception:
                    parse_error = True
            if parse_error:
                self.logger.error("Error parsing %s", proc_meminfo)

            memData = {}

            # Physical memory
            # FIXME units are in MB, we should use bytes instead
            try:
                memData['physTotal'] = int(meminfo.get('MemTotal', 0)) / 1024
                memData['physFree'] = int(meminfo.get('MemFree', 0)) / 1024
                memData['physBuffers'] = int(meminfo.get('Buffers', 0)) / 1024
                memData['physCached'] = int(meminfo.get('Cached', 0)) / 1024
                memData['physShared'] = int(meminfo.get('Shmem', 0)) / 1024
                memData['physSlab'] = int(meminfo.get('Slab', 0)) / 1024
                memData['physPageTables'] = int(meminfo.get('PageTables', 0)) / 1024
                memData['physUsed'] = memData['physTotal'] - memData['physFree']

                if 'MemAvailable' in meminfo:
                    memData['physUsable'] = int(meminfo.get('MemAvailable', 0)) / 1024
                else:
                    # Usable is relative since cached and buffers are actually used to speed things up.
                    memData['physUsable'] = memData['physFree'] + memData['physBuffers'] + memData['physCached']

                if memData['physTotal'] > 0:
                    memData['physPctUsable'] = float(memData['physUsable']) / float(memData['physTotal'])
            except Exception:
                self.logger.exception('Cannot compute stats from %s', proc_meminfo)

            # Swap
            # FIXME units are in MB, we should use bytes instead
            try:
                memData['swapTotal'] = int(meminfo.get('SwapTotal', 0)) / 1024
                memData['swapFree'] = int(meminfo.get('SwapFree', 0)) / 1024
                memData['swapCached'] = int(meminfo.get('SwapCached', 0)) / 1024

                memData['swapUsed'] = memData['swapTotal'] - memData['swapFree']

                if memData['swapTotal'] > 0:
                    memData['swapPctFree'] = float(memData['swapFree']) / float(memData['swapTotal'])
            except Exception:
                self.logger.exception('Cannot compute swap stats')

            return memData

        elif sys.platform == 'darwin':
            if psutil is None:
                self.logger.error("psutil must be installed on MacOS to collect memory metrics")
                return False

            phys_memory = psutil.virtual_memory()
            swap = psutil.swap_memory()
            return {'physUsed': phys_memory.used / float(1024**2),
                'physFree': phys_memory.free / float(1024**2),
                'physUsable': phys_memory.available / float(1024**2),
                'physPctUsable': (100 - phys_memory.percent) / 100.0,
                'swapUsed': swap.used / float(1024**2),
                'swapFree': swap.free / float(1024**2)}

        elif sys.platform.startswith("freebsd"):
            try:
                output, _, _ = get_subprocess_output(['sysctl', 'vm.stats.vm'], self.logger)
                sysctl = output.splitlines()
            except Exception:
                self.logger.exception('getMemoryUsage')
                return False

            # ...
            # vm.stats.vm.v_page_size: 4096
            # vm.stats.vm.v_page_count: 759884
            # vm.stats.vm.v_wire_count: 122726
            # vm.stats.vm.v_active_count: 109350
            # vm.stats.vm.v_cache_count: 17437
            # vm.stats.vm.v_inactive_count: 479673
            # vm.stats.vm.v_free_count: 30542
            # ...

            # We run this several times so one-time compile now
            regexp = re.compile(r'^vm\.stats\.vm\.(\w+):\s+([0-9]+)')
            meminfo = {}

            parse_error = False
            for line in sysctl:
                try:
                    match = re.search(regexp, line)
                    if match is not None:
                        meminfo[match.group(1)] = match.group(2)
                except Exception:
                    parse_error = True
            if parse_error:
                self.logger.error("Error parsing vm.stats.vm output: %s", sysctl)

            memData = {}

            # Physical memory
            try:
                pageSize = int(meminfo.get('v_page_size'))

                memData['physTotal'] = (int(meminfo.get('v_page_count', 0))
                                        * pageSize) / 1048576
                memData['physFree'] = (int(meminfo.get('v_free_count', 0))
                                       * pageSize) / 1048576
                memData['physCached'] = (int(meminfo.get('v_cache_count', 0))
                                         * pageSize) / 1048576
                memData['physUsed'] = ((int(meminfo.get('v_active_count'), 0) +
                                        int(meminfo.get('v_wire_count', 0)))
                                       * pageSize) / 1048576
                memData['physUsable'] = ((int(meminfo.get('v_free_count'), 0) +
                                          int(meminfo.get('v_cache_count', 0)) +
                                          int(meminfo.get('v_inactive_count', 0))) *
                                         pageSize) / 1048576

                if memData['physTotal'] > 0:
                    memData['physPctUsable'] = float(memData['physUsable']) / float(memData['physTotal'])
            except Exception:
                self.logger.exception('Cannot compute stats from %s', proc_meminfo)

            # Swap
            try:
                output, _, _ = get_subprocess_output(['swapinfo', '-m'], self.logger)
                sysctl = output.splitlines()
            except Exception:
                self.logger.exception('getMemoryUsage')
                return False

            # ...
            # Device          1M-blocks     Used    Avail Capacity
            # /dev/ad0s1b           570        0      570     0%
            # ...

            assert "Device" in sysctl[0]

            try:
                memData['swapTotal'] = 0
                memData['swapFree'] = 0
                memData['swapUsed'] = 0
                for line in sysctl[1:]:
                    if len(line) > 0:
                        line = line.split()
                        memData['swapTotal'] += int(line[1])
                        memData['swapFree'] += int(line[3])
                        memData['swapUsed'] += int(line[2])
            except Exception:
                self.logger.exception('Cannot compute stats from swapinfo')

            return memData
        elif sys.platform == 'sunos5':
            try:
                memData = {}
                cmd = ["kstat", "-m", "memory_cap", "-c", "zone_memory_cap", "-p"]
                output, _, _ = get_subprocess_output(cmd, self.logger)
                kmem = output.splitlines()

                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:anon_alloc_fail   0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:anonpgin  0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:class     zone_memory_cap
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:crtime    16359935.0680834
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:execpgin  185
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:fspgin    2556
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:n_pf_throttle     0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:n_pf_throttle_usec        0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:nover     0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:pagedout  0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:pgpgin    2741
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:physcap   536870912  <--
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:rss       115544064  <--
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:snaptime  16787393.9439095
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:swap      91828224   <--
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:swapcap   1073741824 <--
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:zonename  53aa9b7e-48ba-4152-a52b-a6368c3d9e7c

                # turn memory_cap:360:zone_name:key value
                # into { "key": value, ...}
                kv = [l.strip().split() for l in kmem if len(l) > 0]
                entries = dict([(k.split(":")[-1], v) for (k, v) in kv])
                # extract rss, physcap, swap, swapcap, turn into MB
                convert = lambda v: int(long(v))/2**20
                memData["physTotal"] = convert(entries["physcap"])
                memData["physUsed"] = convert(entries["rss"])
                memData["physFree"] = memData["physTotal"] - memData["physUsed"]
                memData["swapTotal"] = convert(entries["swapcap"])
                memData["swapUsed"] = convert(entries["swap"])
                memData["swapFree"] = memData["swapTotal"] - memData["swapUsed"]

                if memData['swapTotal'] > 0:
                    memData['swapPctFree'] = float(memData['swapFree']) / float(memData['swapTotal'])
                return memData
            except Exception:
                self.logger.exception("Cannot compute mem stats from kstat -c zone_memory_cap")
                return False
        else:
            return False

Example 7

Project: invesalius3
Source File: slice_menu.py
View license
    def __init__(self):
        wx.Menu.__init__(self)
        self.ID_TO_TOOL_ITEM = {}
        self.cdialog = None

        #------------ Sub menu of the window and level ----------
        submenu_wl = wx.Menu()

        self._gen_event = True

        #Window and level from DICOM
        new_id = self.id_wl_first = wx.NewId()
        wl_item = wx.MenuItem(submenu_wl, new_id,\
                            _('Default'), kind=wx.ITEM_RADIO)
        submenu_wl.AppendItem(wl_item)
        self.ID_TO_TOOL_ITEM[new_id] = wl_item

        #Case the user change window and level
        new_id = self.other_wl_id = wx.NewId()
        wl_item = wx.MenuItem(submenu_wl, new_id,\
                            _('Manual'), kind=wx.ITEM_RADIO)
        submenu_wl.AppendItem(wl_item)
        self.ID_TO_TOOL_ITEM[new_id] = wl_item

        for name in sorted(const.WINDOW_LEVEL):
            if not(name == _('Default') or name == _('Manual')):
                new_id = wx.NewId()
                wl_item = wx.MenuItem(submenu_wl, new_id,\
                                    name, kind=wx.ITEM_RADIO)
                submenu_wl.AppendItem(wl_item)
                self.ID_TO_TOOL_ITEM[new_id] = wl_item

        #----------- Sub menu of the save and load options ---------
        #submenu_wl.AppendSeparator()
        #options = [_("Save current values"),
        #           _("Save current values as..."),_("Load values")]

        #for name in options:
        #    new_id = wx.NewId()
        #    wl_item = wx.MenuItem(submenu_wl, new_id,\
        #                    name)
        #    submenu_wl.AppendItem(wl_item)
        #    self.ID_TO_TOOL_ITEM[new_id] = wl_item


        #------------ Sub menu of the pseudo colors ----------------
        if sys.platform == 'linux2':
            mkind = wx.ITEM_CHECK
        else:
            mkind = wx.ITEM_RADIO

        self.pseudo_color_items = {}
        submenu_pseudo_colours = wx.Menu()
        self.pseudo_color_items = {}
        new_id = self.id_pseudo_first = wx.NewId()
        color_item = wx.MenuItem(submenu_pseudo_colours, new_id,\
                            _("Default "), kind=mkind)
        submenu_pseudo_colours.AppendItem(color_item)
        color_item.Check(1)
        self.ID_TO_TOOL_ITEM[new_id] = color_item
        self.pseudo_color_items[new_id] = color_item

        for name in sorted(const.SLICE_COLOR_TABLE):
            if not(name == _("Default ")):
                new_id = wx.NewId()
                color_item = wx.MenuItem(submenu_wl, new_id,\
                                    name, kind=mkind)
                submenu_pseudo_colours.AppendItem(color_item)
                self.ID_TO_TOOL_ITEM[new_id] = color_item
                self.pseudo_color_items[new_id] = color_item

        self.plist_presets = presets.get_wwwl_presets()
        for name in sorted(self.plist_presets):
            new_id = wx.NewId()
            color_item = wx.MenuItem(submenu_wl, new_id, name,
                                     kind=mkind)
            submenu_pseudo_colours.AppendItem(color_item)
            self.ID_TO_TOOL_ITEM[new_id] = color_item
            self.pseudo_color_items[new_id] = color_item

        new_id = wx.NewId()
        color_item = wx.MenuItem(submenu_wl, new_id, _('Custom'),
                                 kind=mkind)
        submenu_pseudo_colours.AppendItem(color_item)
        self.ID_TO_TOOL_ITEM[new_id] = color_item
        self.pseudo_color_items[new_id] = color_item

        # --------------- Sub menu of the projection type ---------------------
        self.projection_items = {}
        submenu_projection = wx.Menu()
        for name in PROJECTIONS_ID:
            new_id = wx.NewId()
            projection_item = wx.MenuItem(submenu_projection, new_id, name,
                                          kind=wx.ITEM_RADIO)
            submenu_projection.AppendItem(projection_item)
            self.ID_TO_TOOL_ITEM[new_id] = projection_item
            self.projection_items[PROJECTIONS_ID[name]] = projection_item
        
        flag_tiling = False
        #------------ Sub menu of the image tiling ---------------
        submenu_image_tiling = wx.Menu()
        for name in sorted(const.IMAGE_TILING):
            new_id = wx.NewId()
            image_tiling_item = wx.MenuItem(submenu_image_tiling, new_id,\
                                name, kind=wx.ITEM_RADIO)
            submenu_image_tiling.AppendItem(image_tiling_item)
            self.ID_TO_TOOL_ITEM[new_id] = image_tiling_item
            
            #Save first id item
            if not(flag_tiling):
                self.id_tiling_first = new_id
                flag_tiling = True

        # Add sub itens in the menu
        self.AppendMenu(-1, _("Window width and level"), submenu_wl)
        self.AppendMenu(-1, _("Pseudo color"), submenu_pseudo_colours)
        self.AppendMenu(-1, _("Projection type"), submenu_projection)
        ###self.AppendMenu(-1, _("Image Tiling"), submenu_image_tiling)

        # It doesn't work in Linux
        self.Bind(wx.EVT_MENU, self.OnPopup)
        # In Linux the bind must be putted in the submenu
        if sys.platform == 'linux2' or sys.platform == 'darwin':
            submenu_wl.Bind(wx.EVT_MENU, self.OnPopup)
            submenu_pseudo_colours.Bind(wx.EVT_MENU, self.OnPopup)
            submenu_image_tiling.Bind(wx.EVT_MENU, self.OnPopup)
            submenu_projection.Bind(wx.EVT_MENU, self.OnPopup)

        self.__bind_events()

Example 8

Project: robothon
Source File: setup.py
View license
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration,dot_join
    from numpy.distutils.system_info import get_info, default_lib_dirs

    config = Configuration('core',parent_package,top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir,'code_generators')

    generate_umath_py = join(codegen_dir,'generate_umath.py')
    n = dot_join(config.name,'generate_umath')
    generate_umath = imp.load_module('_'.join(n.split('.')),
                                     open(generate_umath_py,'U'),generate_umath_py,
                                     ('.py','U',1))

    header_dir = 'include/numpy' # this is relative to config.path_in_package

    def generate_config_h(ext, build_dir):
        target = join(build_dir,header_dir,'config.h')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        if newer(__file__,target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s',target)
            tc = generate_testcode(target)
            from distutils import sysconfig
            python_include = sysconfig.get_python_inc()
            python_h = join(python_include, 'Python.h')
            if not os.path.isfile(python_h):
                raise SystemError,\
                      "Non-existing %s. Perhaps you need to install"\
                      " python-dev|python-devel." % (python_h)
            result = config_cmd.try_run(tc,include_dirs=[python_include],
                                        library_dirs = default_lib_dirs)
            if not result:
                raise SystemError,"Failed to test configuration. "\
                      "See previous error messages for more information."

            moredefs = []
            #
            mathlibs = []
            tc = testcode_mathlib()
            mathlibs_choices = [[],['m'],['cpml']]
            mathlib = os.environ.get('MATHLIB')
            if mathlib:
                mathlibs_choices.insert(0,mathlib.split(','))
            for libs in mathlibs_choices:
                if config_cmd.try_run(tc,libraries=libs):
                    mathlibs = libs
                    break
            else:
                raise EnvironmentError("math library missing; rerun "
                                       "setup.py after setting the "
                                       "MATHLIB env variable")
            ext.libraries.extend(mathlibs)
            moredefs.append(('MATHLIB',','.join(mathlibs)))

            def check_func(func_name):
                return config_cmd.check_func(func_name,
                                             libraries=mathlibs, decl=False,
                                             headers=['math.h'])

            for func_name, defsymbol in FUNCTIONS_TO_CHECK:
                if check_func(func_name):
                    moredefs.append(defsymbol)

            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            if sys.platform=='win32' or os.name=='nt':
                from numpy.distutils.misc_util import get_build_architecture
                a = get_build_architecture()
                print 'BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' % (a, os.name, sys.platform)
                if a == 'AMD64':
                    moredefs.append('DISTUTILS_USE_SDK')

            if sys.version[:3] < '2.4':
                if config_cmd.check_func('strtod', decl=False,
                                         headers=['stdlib.h']):
                    moredefs.append(('PyOS_ascii_strtod', 'strtod'))

            target_f = open(target,'a')
            for d in moredefs:
                if isinstance(d,str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0],d[1]))
            target_f.close()
            cmd_ = 'ed - %s < /SourceCache/python_modules/python_modules-21/numpy/config.h.ed' % target
            print cmd_
            os.system(cmd_)
            print 'File:',target
            target_f = open(target)
            print target_f.read()
            target_f.close()
            print 'EOF'
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f.readlines():
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        target = join(build_dir,header_dir,'numpyconfig.h')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        if newer(__file__,target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s',target)
            testcode = generate_numpyconfig_code(target)

            from distutils import sysconfig
            python_include = sysconfig.get_python_inc()
            python_h = join(python_include, 'Python.h')
            if not os.path.isfile(python_h):
                raise SystemError,\
                      "Non-existing %s. Perhaps you need to install"\
                      " python-dev|python-devel." % (python_h)

            config.numpy_include_dirs
            result = config_cmd.try_run(testcode,
                                include_dirs = [python_include] + \
                                                       config.numpy_include_dirs,
                                        library_dirs = default_lib_dirs)

            if not result:
                raise SystemError,"Failed to generate numpy configuration. "\
                      "See previous error messages for more information."

            moredefs = []

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers = ['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))
            else:
                moredefs.append(('NPY_USE_C99_FORMATS', 0))

            # Add moredefs to header
            target_f = open(target,'a')
            for d in moredefs:
                if isinstance(d,str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0],d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            cmd_ = 'ed - %s < /SourceCache/python_modules/python_modules-21/numpy/numpyconfig.h.ed' % target
            print cmd_
            os.system(cmd_)
            print 'File: %s' % target
            target_f = open(target)
            print target_f.read()
            target_f.close()
            print 'EOF'
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file),
                                  (header_dir, doc_file))
            return (h_file,)
        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    def generate_umath_c(ext,build_dir):
        target = join(build_dir,header_dir,'__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script,target):
            f = open(target,'w')
            f.write(generate_umath.make_code(generate_umath.defdict,
                                             generate_umath.__file__))
            f.close()
        return []

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs('src')

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [join('src','arrayobject.c'),
            join('src','arraymethods.c'),
            join('src','scalartypes.inc.src'),
            join('src','arraytypes.inc.src'),
            join('src','_signbit.c'),
            join('src','_isnan.c'),
            join('src','ucsnarrow.c'),
            join('include','numpy','*object.h'),
            'include/numpy/fenv/fenv.c',
            'include/numpy/fenv/fenv.h',
            join(codegen_dir,'genapi.py'),
            join(codegen_dir,'*.txt')
            ]

    # Don't install fenv unless we need them.
    if sys.platform == 'cygwin':
        config.add_data_dir('include/numpy/fenv')

    config.add_extension('multiarray',
                         sources = [join('src','multiarraymodule.c'),
                                    generate_config_h,
                                    generate_numpyconfig_h,
                                    generate_numpy_api,
                                    join('src','scalartypes.inc.src'),
                                    join('src','arraytypes.inc.src'),
                                    join(codegen_dir,'generate_numpy_api.py'),
                                    join('*.py')
                                    ],
                         depends = deps,
                         )

    config.add_extension('umath',
                         sources = [generate_config_h,
                                    generate_numpyconfig_h,
                                    join('src','umathmodule.c.src'),
                                    generate_umath_c,
                                    generate_ufunc_api,
                                    join('src','scalartypes.inc.src'),
                                    join('src','arraytypes.inc.src'),
                                    ],
                         depends = [join('src','ufuncobject.c'),
                                    generate_umath_py,
                                    join(codegen_dir,'generate_ufunc_api.py'),
                                    ]+deps,
                         )

    config.add_extension('_sort',
                         sources=[join('src','_sortmodule.c.src'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  ],
                         )

    config.add_extension('scalarmath',
                         sources=[join('src','scalarmathmodule.c.src'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  generate_ufunc_api],
                         )

    # Configure blasdot
    blas_info = get_info('blas_opt',0)
    #blas_info = {}
    def get_dotblas_sources(ext, build_dir):
        if blas_info:
            if ('NO_ATLAS_INFO',1) in blas_info.get('define_macros',[]):
                return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient.
            return ext.depends[:1]
        return None # no extension module will be built

    config.add_extension('_dotblas',
                         sources = [get_dotblas_sources],
                         depends=[join('blasdot','_dotblas.c'),
                                  join('blasdot','cblas.h'),
                                  ],
                         include_dirs = ['blasdot'],
                         extra_info = blas_info
                         )


    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config

Example 9

Project: abstract_rendering
Source File: setup.py
View license
def getsitepackages():
    """Returns a list containing all global site-packages directories
    (and possibly site-python)."""

    _is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32
    _is_pypy = hasattr(sys, 'pypy_version_info')
    _is_jython = sys.platform[:4] == 'java'

    prefixes = [sys.prefix, sys.exec_prefix]

    sitepackages = []
    seen = set()

    for prefix in prefixes:
        if not prefix or prefix in seen:
            continue
        seen.add(prefix)

        if sys.platform in ('os2emx', 'riscos') or _is_jython:
            sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
        elif _is_pypy:
            sitedirs = [os.path.join(prefix, 'site-packages')]
        elif sys.platform == 'darwin' and prefix == sys.prefix:
            if prefix.startswith("/System/Library/Frameworks/"):   # Apple's Python
                sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
                            os.path.join(prefix, "Extras", "lib", "python")]

            else:  # any other Python distros on OSX work this way
                sitedirs = [os.path.join(prefix, "lib",
                            "python" + sys.version[:3], "site-packages")]

        elif os.sep == '/':
            sitedirs = [os.path.join(prefix,
                                     "lib",
                                     "python" + sys.version[:3],
                                     "site-packages"),
                        os.path.join(prefix, "lib", "site-python"),
                        os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
            lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
            if (os.path.exists(lib64_dir) and
                os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
                if _is_64bit:
                    sitedirs.insert(0, lib64_dir)
                else:
                    sitedirs.append(lib64_dir)
            try:
                # sys.getobjects only available in --with-pydebug build
                sys.getobjects
                sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
            except AttributeError:
                pass
            # Debian-specific dist-packages directories:
            if sys.version[0] == '2':
                sitedirs.append(os.path.join(prefix, "lib",
                                             "python" + sys.version[:3],
                                             "dist-packages"))
            else:
                sitedirs.append(os.path.join(prefix, "lib",
                                             "python" + sys.version[0],
                                             "dist-packages"))
            sitedirs.append(os.path.join(prefix, "local/lib",
                                         "python" + sys.version[:3],
                                         "dist-packages"))
            sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
        else:
            sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
        if sys.platform == 'darwin':
            # for framework builds *only* we add the standard Apple
            # locations. Currently only per-user, but /Library and
            # /Network/Library could be added too
            if 'Python.framework' in prefix:
                home = os.environ.get('HOME')
                if home:
                    sitedirs.append(
                        os.path.join(home,
                                     'Library',
                                     'Python',
                                     sys.version[:3],
                                     'site-packages'))
        for sitedir in sitedirs:
            sitepackages.append(os.path.abspath(sitedir))
    return sitepackages

Example 10

Project: esky
Source File: f_cxfreeze.py
View license
def freeze(dist):
    """Freeze the given distribution data using cx_Freeze."""
    includes = dist.includes
    excludes = dist.excludes
    options = dist.freezer_options
    #  Merge in any encludes/excludes given in freezer_options
    for inc in options.pop("includes",()):
        includes.append(inc)
    for exc in options.pop("excludes",()):
        excludes.append(exc)
    if "esky" not in includes and "esky" not in excludes:
        includes.append("esky")
    if "pypy" not in includes and "pypy" not in excludes:
        excludes.append("pypy")
    #  cx_Freeze doesn't seem to respect __path__ properly; hack it so
    #  that the required distutils modules are always found correctly.
    def load_distutils(finder,module):
        module.path = distutils.__path__ + module.path
        finder.IncludeModule("distutils.dist")
    cx_Freeze.hooks.load_distutils = load_distutils
    #  Build kwds arguments out of the given freezer opts.
    kwds = {}
    for (nm,val) in options.iteritems():
        kwds[_normalise_opt_name(nm)] = val
    kwds["includes"] = includes
    kwds["excludes"] = excludes
    kwds["targetDir"] = dist.freeze_dir
    #  Build an Executable object for each script.
    #  To include the esky startup code, we write each to a tempdir.
    executables = []
    for exe in dist.get_executables():
        base = None
        if exe.gui_only and sys.platform == "win32":
            base = "Win32GUI"
        executables.append(cx_Freeze.Executable(exe.script,base=base,targetName=exe.name,icon=exe.icon,**exe._kwds))
    #  Freeze up the executables
    f = cx_Freeze.Freezer(executables,**kwds)
    f.Freeze()
    #  Copy data files into the freeze dir
    for (src,dst) in dist.get_data_files():
        dst = os.path.join(dist.freeze_dir,dst)
        dstdir = os.path.dirname(dst)
        if not os.path.isdir(dstdir):
            dist.mkpath(dstdir)
        dist.copy_file(src,dst)
    #  Copy package data into the library.zip
    #  For now, this only works if there's a shared "library.zip" file.
    if f.createLibraryZip:
        lib = zipfile.ZipFile(os.path.join(dist.freeze_dir,"library.zip"),"a")
        for (src,arcnm) in dist.get_package_data():
            lib.write(src,arcnm)
        lib.close()
    else:
        for (src,arcnm) in dist.get_package_data():
            err = "use of package_data currently requires createLibraryZip=True"
            raise RuntimeError(err)
    #  Create the bootstrap code, using custom code if specified.
    code_source = ["__name__ = '__main__'"]
    esky_name = dist.distribution.get_name()
    code_source.append("__esky_name__ = %r" % (esky_name,))
    code_source.append(inspect.getsource(esky.bootstrap))
    if dist.compile_bootstrap_exes:
        if sys.platform == "win32":
            #  Unfortunately this doesn't work, because the cxfreeze exe
            #  contains frozen modules that are inaccessible to a bootstrapped
            #  interpreter.  Disabled until I figure out a workaround. :-(
            pass
            #  The pypy-compiled bootstrap exe will try to load a python env
            #  into its own process and run this "take2" code to bootstrap.
            #take2_code = code_source[1:]
            #take2_code.append(_CUSTOM_WIN32_CHAINLOADER)
            #take2_code.append(dist.get_bootstrap_code())
            #take2_code = compile("\n".join(take2_code),"<string>","exec")
            #take2_code = marshal.dumps(take2_code)
            #clscript = "import marshal; "
            #clscript += "exec marshal.loads(%r); " % (take2_code,)
            #clscript = clscript.replace("%","%%")
            #clscript += "chainload(\"%s\")"
            #  Here's the actual source for the compiled bootstrap exe.
            #from esky.bdist_esky import pypy_libpython
            #code_source.append(inspect.getsource(pypy_libpython))
            #code_source.append("_PYPY_CHAINLOADER_SCRIPT = %r" % (clscript,))
            #code_source.append(_CUSTOM_PYPY_CHAINLOADER)
        code_source.append(dist.get_bootstrap_code())
        code_source = "\n".join(code_source)
        for exe in dist.get_executables(normalise=False):
            if not exe.include_in_bootstrap_env:
                continue
            bsexe = dist.compile_to_bootstrap_exe(exe,code_source)
            if sys.platform == "win32":
                fexe = os.path.join(dist.freeze_dir,exe.name)
                winres.copy_safe_resources(fexe,bsexe)
    else:
        if sys.platform == "win32":
            code_source.append(_CUSTOM_WIN32_CHAINLOADER)
        code_source.append(dist.get_bootstrap_code())
        code_source.append("bootstrap()")
        code_source = "\n".join(code_source)
        
        maincode = compile_to_bytecode(code_source, INITNAME+".py")
        eskycode = compile_to_bytecode("", "esky/__init__.py")
        eskybscode = compile_to_bytecode("", "esky/bootstrap.py")
        
        #  Copy any core dependencies
        if "fcntl" not in sys.builtin_module_names:
            for nm in os.listdir(dist.freeze_dir):
                if nm.startswith("fcntl"):
                    dist.copy_to_bootstrap_env(nm)
        for nm in os.listdir(dist.freeze_dir):
            if is_core_dependency(nm):
                dist.copy_to_bootstrap_env(nm)
                
        #  Copy the loader program for each script into the bootstrap env, and
        #  append the bootstrapping code to it as a zipfile.
        for exe in dist.get_executables(normalise=False):
            if not exe.include_in_bootstrap_env:
                continue
            
            exepath = dist.copy_to_bootstrap_env(exe.name)
            if not dist.detached_bootstrap_library:
                #append library to the bootstrap exe.
                exepath = dist.copy_to_bootstrap_env(exe.name)
                bslib = zipfile.PyZipFile(exepath,"a",zipfile.ZIP_STORED)
            else:
                #Create a separate library.zip for the bootstrap exe.
                bslib_path = dist.copy_to_bootstrap_env("library.zip")
                bslib = zipfile.PyZipFile(bslib_path,"w",zipfile.ZIP_STORED)
            cdate = (2000,1,1,0,0,0)
            bslib.writestr(zipfile.ZipInfo(INITNAME+".pyc",cdate),maincode)
            bslib.writestr(zipfile.ZipInfo("esky/__init__.pyc",cdate),eskycode)
            bslib.writestr(zipfile.ZipInfo("esky/bootstrap.pyc",cdate),eskybscode)
            bslib.close()

Example 11

Project: parlparse
Source File: runfilters.py
View license
def RunFilterFile(FILTERfunction, xprev, sdate, sdatever, dname, jfin, patchfile, jfout, bquietc):
    # now apply patches and parse
    patchtempfilename = tempfile.mktemp("", "pw-applypatchtemp-", miscfuncs.tmppath)

    if not bquietc:
        print "reading " + jfin

    # apply patch filter
    kfin = jfin
    if os.path.isfile(patchfile) and ApplyPatches(jfin, patchtempfilename, patchfile):
        kfin = patchtempfilename

    # read the text of the file
    ofin = open(kfin)
    text = ofin.read()
    ofin.close()

    # do the filtering according to the type.  Some stuff is being inlined here
    if dname == 'regmem' or dname == 'votes' or dname == 'ni':
        regmemout = open(tempfilename, 'w')
        try:
            FILTERfunction(regmemout, text, sdate, sdatever)  # totally different filter function format
        finally:
            regmemout.close()
        # in win32 this function leaves the file open and stops it being renamed
        if sys.platform != "win32":
            xmlvalidate.parse(tempfilename) # validate XML before renaming
        if os.path.isfile(jfout):
            os.remove(jfout)
        os.rename(tempfilename, jfout)
        return

    safejfout = jfout
    assert dname in ('wrans', 'debates', 'wms', 'westminhall', 'lordspages')

    decode_from_utf8 = False
    if sdate > '2014-01-01' or (sdate > '2006-05-07' and re.search('<notus-date', text)):
        decode_from_utf8 = True
        text = re.sub("\n", ' ', text)
        text = re.sub("\s{2,}", ' ', text) # No need for multiple spaces anywhere
        text = re.sub("</?notus-date[^>]*>", "", text)
        text = re.sub("\s*<meta[^>]*>\s*", "", text)
        text = re.sub('(<h5 align="left">)((?:<a name="(.*?)">)*)', r"\2\1", text) # If you can't beat them, ...
        text = re.sub("(<br><b>[^:<]*:\s*column\s*\d+(?:WH)?\s*</b>)(\s+)(?i)", r"\1<br>\2", text)
        text = re.sub("(\s+)(<b>[^:<]*:\s*column\s*\d+(?:WH)?\s*</b><br>)(?i)", r"\1<br>\2", text)

        # Make sure correction is before written answer question number
        text = re.sub('(<a href="[^"]*corrtext[^"]*")\s*shape="rect">\s*(.*?)\s*(</a>)', r'\1>\2\3', text)
        text = re.sub('(\[\d+\])\s*((?:</p>)?)\s*(<a href="[^"]*corrtext[^"]*">.*?</a>)', r'\3 \1\2', text)

        # Fix new thing where they sometimes put (a), (b) of wrans, or "Official Report", in separate paragraphs
        # Two regular expressions, so as not to lose needed end </p> of a column heading.
        italic_para = '\s*<p>\s*(<i>\s*(?:\(.\)|Official Report,?)\s*</i>)\s*</p>\s*'
        text = re.sub('(?<!</b>)</p>' + italic_para + '<p[^>]*>', r' \1 ', text)
        text = re.sub('(?<=</b></p>)' + italic_para + '<p[^>]*>', r' \1 ', text)

        # May also need the same thing with a space, and look behind requires a fixed width pattern.
        text = re.sub('(?<!</b>) </p>' + italic_para + '<p[^>]*>', r' \1 ', text)
        text = re.sub('(?<=</b> </p>)' + italic_para + '<p[^>]*>', r' \1 ', text)
                
        # Don't want bad XHTML self closed table cells.
        text = re.sub('<td([^>]*) ?/>', r'<td\1></td>', text)
        # Or pointless empty headings
        text = re.sub('<h[45] align="[^"]*" ?/>', '', text)

        # Lords, big overall replacements
        text = text.replace('<br></br>', '<br>')
        text = text.replace('<br/>', '<br>')
        if dname == 'lordspages':
            text = re.sub(' shape="rect">', '>', text)
            text = re.sub(' class="anchor"', '', text)
            text = re.sub(' class="anchor noCont"', '', text)
            text = re.sub(' class="anchor-column"', '', text)
            text = re.sub(' class="columnNum"', '', text)
            text = re.sub('(<a[^>]*>) (</a>)', r'\1\2', text)
            text = re.sub('(<h5>)((?:<a name="(.*?)">(?:</a>)?)*)', r"\2\1", text) # If you can't beat them, ...
            text = re.sub('<columnNum><br />( |\xc2\xa0)<br />', '<br>&nbsp;<br>', text)
            text = re.sub('<br />( |\xc2\xa0)<br /></columnNum>', '<br>&nbsp;<br>', text)
            text = text.replace('<b align="center">', '<b>')
            text = text.replace('<br />', '<br>')
            text = text.replace('CONTENTS', 'CONTENTS\n')
            text = re.sub('</?small>', '', text)
            text = re.sub('<div class="amendment(?:_heading)?">', '', text)
            text = re.sub('</?div>', '', text)
            # Double bolding sometimes has some <a> tags in between
            text = re.sub(r'<b>((?:</?a[^>]*>|\s)*)<b>', r'\1<b>', text)
            text = re.sub('</b></b>', '</b>', text)
            text = re.sub('</b><b>', '', text)
            text = re.sub('<I></I>', '', text)

    # Changes in 2008-09 session
    if sdate>'2008-12-01' and dname=='lordspages':
        text = re.sub('(?i)Asked By (<b>.*?)</b>', r'\1:</b>', text)
        text = re.sub('(?i)((?:Moved|Tabled) By) ?((?:<a name="[^"]*"></a>)*)<b>(.*?)</b>', r'\1 \2\3', text)
        text = re.sub('(?i)(Moved on .*? by) ?<b>(.*?)</b>', r'\1 \2', text)

    if decode_from_utf8:
        # Some UTF-8 gets post-processed into nonsense
        # XXX - should probably be in miscfuncs.py/StraightenHTMLrecurse with other character set evil
        text = text.replace("\xe2\x22\xa2", "&trade;")
        text = text.replace("\xc2(c)", "&copy;")
        text = text.replace("\xc2(r)", "&reg;")
        text = text.replace("\xc21/4", "&frac14;")
        text = text.replace("\xc21/2", "&frac12;")
        text = text.replace("\xc23/4", "&frac34;")
        text = text.replace("\xc3\"", "&#279;")
        text = text.replace("\xc3 ", "&agrave;")
        text = text.replace("\xc3(c)", "&eacute;")
        text = text.replace("\xc3(r)", "&icirc;")
        text = text.replace("\xc31/4", "&uuml;")
        # And it's true UTF-8 since the start of the 2009 session, let's pretend it isn't.
        try:
            text = text.decode('utf-8').encode('ascii', 'xmlcharrefreplace')
        except:
            print "Failed to decode text from utf-8"
            pass

    # They've started double bolding names, parts of names, splitting names up, and having a "[" on its own
    if sdate >= '2013-01-01':
        text = re.sub(r'</b>(\s*)<b>', r'\1', text)
        # <b> <b>Name</b> (Constituency) (Party):</b>
        text = re.sub('<b>\s*<b>([^<]*)</b>([^<]*)</b>', r'<b>\1\2</b>', text)
        # <b><b>Name bits:</b></b>
        text = re.sub('<b>\s*(<b>([^<]|<i>\s*\(Urgent Question\)\s*</i>)*</b>\s*)</b>', r'\1', text)
        # <p> <b>[</b> </p> <p> <b>TIME</b> </p>
        text = re.sub('<p>\s*<b>\[</b>\s*</p>\s*<p>\s*<b>([^<]*)</b>\s*</p>', r'<p> <b>[\1</b> </p>', text)
        # And have changed <center> to <span class="centred">
        text = re.sub('<span class="centred">(.*?)</span>', r'<center>\1</center>', text)

    if sdate >= '2015-10-12':
        # annoying double <b> round members rose text
        text = re.sub(r'<b><b>Several hon. Members </b>', '<b>Several hon. Members ', text)

    if sdate >= '2016-01-01':
        # Deal with big heading spotting aname appearing AFTER heading
        text = re.sub('(<h3(?:(?!<h3).)*?)(<a name="ordayhd_\d">)', r'\2\1', text)

    (flatb, gidname) = FILTERfunction(text, sdate)
    for i in range(len(gidname)):
        tempfilenameoldxml = None

        gidnam = gidname[i]
        if gidname[i] == 'lordswms':
            gidnam = 'wms'
        if gidname[i] == 'lordswrans':
            gidnam = 'wrans'
        CreateGIDs(gidnam, sdate, sdatever, flatb[i])
        jfout = safejfout
        if gidname[i] != 'lords':
            jfout = re.sub('(daylord|lordspages)', gidname[i], jfout)

        # wrans case is special, with its question-id numbered gids
        if dname == 'wrans':
            majblocks = CreateWransGIDs(flatb[i], (sdate + sdatever)) # combine the date and datever.  the old style gids stand on the paragraphs still
            bMakeOldWransGidsToNew = (sdate < "2005")

        fout = open(tempfilename, "w")
        WriteXMLHeader(fout);
        fout.write('<publicwhip scrapeversion="%s" latest="yes">\n' % sdatever)

        # go through and output all the records into the file
        if dname == 'wrans':
            for majblock in majblocks:
                WriteXMLspeechrecord(fout, majblock[0], bMakeOldWransGidsToNew, True)
                for qblock in majblock[1]:
                    qblock.WriteXMLrecords(fout, bMakeOldWransGidsToNew)
        else:
            for qb in flatb[i]:
                WriteXMLspeechrecord(fout, qb, False, False)
        fout.write("</publicwhip>\n\n")
        fout.close()

        # load in a previous file and over-write it if necessary
        if xprev:
            xprevin = xprev[0]
            if gidname[i] != 'lords':
                xprevin = re.sub('(daylord|lordspages)', gidname[i], xprevin)
            if os.path.isfile(xprevin):
                xin = open(xprevin, "r")
                xprevs = xin.read()
                xin.close()

                # separate out the scrape versions
                mpw = re.search('<publicwhip([^>]*)>\n([\s\S]*?)</publicwhip>', xprevs)
                if mpw.group(1):
                    re.match(' scrapeversion="([^"]*)" latest="yes"', mpw.group(1)).group(1) == xprev[1]
                # else it's old style xml files that had no scrapeversion or latest attributes
                if dname == 'wrans':
                    xprevcompress = FactorChangesWrans(majblocks, mpw.group(2))
                else:
                    xprevcompress = FactorChanges(flatb[i], mpw.group(2))

                tempfilenameoldxml = tempfile.mktemp(".xml", "pw-filtertempold-", miscfuncs.tmppath)
                foout = open(tempfilenameoldxml, "w")
                WriteXMLHeader(foout)
                foout.write('<publicwhip scrapeversion="%s" latest="no">\n' % xprev[1])
                foout.writelines(xprevcompress)
                foout.write("</publicwhip>\n\n")
                foout.close()

        # in win32 this function leaves the file open and stops it being renamed
        if sys.platform != "win32":
            xmlvalidate.parse(tempfilename) # validate XML before renaming

        # in case of error, an exception is thrown, so this line would not be reached
        # we rename both files (the old and new xml) at once

        if os.path.isfile(jfout):
            os.remove(jfout)
        if not os.path.isdir(os.path.dirname(jfout)):  # Lords output directories need making here
            os.mkdir(os.path.dirname(jfout))
        os.rename(tempfilename, jfout)

        # copy over onto old xml file
        if tempfilenameoldxml:
            if sys.platform != "win32":
                xmlvalidate.parse(tempfilenameoldxml) # validate XML before renaming
            assert os.path.isfile(xprevin)
            os.remove(xprevin)
            os.rename(tempfilenameoldxml, xprevin)

Example 12

Project: sd-agent
Source File: unix.py
View license
    def check(self, agentConfig):
        """Return an aggregate of CPU stats across all CPUs
        When figures are not available, False is sent back.
        """
        def format_results(us, sy, wa, idle, st, guest=None):
            data = {'cpuUser': us, 'cpuSystem': sy, 'cpuWait': wa, 'cpuIdle': idle, 'cpuStolen': st, 'cpuGuest': guest}
            return dict((k, v) for k, v in data.iteritems() if v is not None)

        def get_value(legend, data, name, filter_value=None):
            "Using the legend and a metric name, get the value or None from the data line"
            if name in legend:
                value = to_float(data[legend.index(name)])
                if filter_value is not None:
                    if value > filter_value:
                        return None
                return value

            else:
                # FIXME return a float or False, would trigger type error if not python
                self.logger.debug("Cannot extract cpu value %s from %s (%s)" % (name, data, legend))
                return 0.0
        try:
            if Platform.is_linux():
                output, _, _ = get_subprocess_output(['mpstat', '1', '3'], self.logger)
                mpstat = output.splitlines()
                # [email protected]:~$ mpstat 1 3
                # Linux 2.6.32-341-ec2 (ip)   01/19/2012  _x86_64_  (2 CPU)
                #
                # 04:22:41 PM  CPU    %usr   %nice    %sys %iowait    %irq   %soft  %steal  %guest   %idle
                # 04:22:42 PM  all    0.00    0.00    0.00    0.00    0.00    0.00    0.00    0.00  100.00
                # 04:22:43 PM  all    0.00    0.00    0.00    0.00    0.00    0.00    0.00    0.00  100.00
                # 04:22:44 PM  all    0.00    0.00    0.00    0.00    0.00    0.00    0.00    0.00  100.00
                # Average:     all    0.00    0.00    0.00    0.00    0.00    0.00    0.00    0.00  100.00
                #
                # OR
                #
                # Thanks to Mart Visser to spotting this one.
                # blah:/etc/sd-agent# mpstat
                # Linux 2.6.26-2-xen-amd64 (atira)  02/17/2012  _x86_64_
                #
                # 05:27:03 PM  CPU    %user   %nice   %sys %iowait    %irq   %soft  %steal  %idle   intr/s
                # 05:27:03 PM  all    3.59    0.00    0.68    0.69    0.00   0.00    0.01   95.03    43.65
                #
                legend = [l for l in mpstat if "%usr" in l or "%user" in l]
                avg = [l for l in mpstat if "Average" in l]
                if len(legend) == 1 and len(avg) == 1:
                    headers = [h for h in legend[0].split() if h not in ("AM", "PM")]
                    data = avg[0].split()

                    # Userland
                    # Debian lenny says %user so we look for both
                    # One of them will be 0
                    cpu_metrics = {
                        "%usr": None, "%user": None, "%nice": None,
                        "%iowait": None, "%idle": None, "%sys": None,
                        "%irq": None, "%soft": None, "%steal": None,
                        "%guest": None
                    }

                    for cpu_m in cpu_metrics:
                        cpu_metrics[cpu_m] = get_value(headers, data, cpu_m, filter_value=110)

                    if any([v is None for v in cpu_metrics.values()]):
                        self.logger.warning("Invalid mpstat data: %s" % data)

                    cpu_user = cpu_metrics["%usr"] + cpu_metrics["%user"] + cpu_metrics["%nice"]
                    cpu_system = cpu_metrics["%sys"] + cpu_metrics["%irq"] + cpu_metrics["%soft"]
                    cpu_wait = cpu_metrics["%iowait"]
                    cpu_idle = cpu_metrics["%idle"]
                    cpu_stolen = cpu_metrics["%steal"]
                    cpu_guest = cpu_metrics["%guest"]

                    return format_results(cpu_user,
                                          cpu_system,
                                          cpu_wait,
                                          cpu_idle,
                                          cpu_stolen,
                                          cpu_guest)
                else:
                    return False

            elif sys.platform == 'darwin':
                # generate 3 seconds of data
                # ['          disk0           disk1       cpu     load average', '    KB/t tps  MB/s     KB/t tps  MB/s  us sy id   1m   5m   15m', '   21.23  13  0.27    17.85   7  0.13  14  7 79  1.04 1.27 1.31', '    4.00   3  0.01     5.00   8  0.04  12 10 78  1.04 1.27 1.31', '']
                iostats, _, _ = get_subprocess_output(['iostat', '-C', '-w', '3', '-c', '2'], self.logger)
                lines = [l for l in iostats.splitlines() if len(l) > 0]
                legend = [l for l in lines if "us" in l]
                if len(legend) == 1:
                    headers = legend[0].split()
                    data = lines[-1].split()
                    cpu_user = get_value(headers, data, "us")
                    cpu_sys = get_value(headers, data, "sy")
                    cpu_wait = 0
                    cpu_idle = get_value(headers, data, "id")
                    cpu_st = 0
                    return format_results(cpu_user, cpu_sys, cpu_wait, cpu_idle, cpu_st)
                else:
                    self.logger.warn("Expected to get at least 4 lines of data from iostat instead of just " + str(iostats[:max(80, len(iostats))]))
                    return False

            elif sys.platform.startswith("freebsd"):
                # generate 3 seconds of data
                # tty            ada0              cd0            pass0             cpu
                # tin  tout  KB/t tps  MB/s   KB/t tps  MB/s   KB/t tps  MB/s  us ni sy in id
                # 0    69 26.71   0  0.01   0.00   0  0.00   0.00   0  0.00   2  0  0  1 97
                # 0    78  0.00   0  0.00   0.00   0  0.00   0.00   0  0.00   0  0  0  0 100
                iostats, _, _ = get_subprocess_output(['iostat', '-w', '3', '-c', '2'], self.logger)
                lines = [l for l in iostats.splitlines() if len(l) > 0]
                legend = [l for l in lines if "us" in l]
                if len(legend) == 1:
                    headers = legend[0].split()
                    data = lines[-1].split()
                    cpu_user = get_value(headers, data, "us")
                    cpu_nice = get_value(headers, data, "ni")
                    cpu_sys = get_value(headers, data, "sy")
                    cpu_intr = get_value(headers, data, "in")
                    cpu_wait = 0
                    cpu_idle = get_value(headers, data, "id")
                    cpu_stol = 0
                    return format_results(cpu_user + cpu_nice, cpu_sys + cpu_intr, cpu_wait, cpu_idle, cpu_stol)

                else:
                    self.logger.warn("Expected to get at least 4 lines of data from iostat instead of just " + str(iostats[:max(80, len(iostats))]))
                    return False

            elif sys.platform == 'sunos5':
                # mpstat -aq 1 2
                # SET minf mjf xcal  intr ithr  csw icsw migr smtx  srw syscl  usr sys  wt idl sze
                # 0 5239   0 12857 22969 5523 14628   73  546 4055    1 146856    5   6   0  89  24 <-- since boot
                # 1 ...
                # SET minf mjf xcal  intr ithr  csw icsw migr smtx  srw syscl  usr sys  wt idl sze
                # 0 20374   0 45634 57792 5786 26767   80  876 20036    2 724475   13  13   0  75  24 <-- past 1s
                # 1 ...
                # http://docs.oracle.com/cd/E23824_01/html/821-1462/mpstat-1m.html
                #
                # Will aggregate over all processor sets
                    output, _, _ = get_subprocess_output(['mpstat', '-aq', '1', '2'], self.logger)
                    mpstat = output.splitlines()
                    lines = [l for l in mpstat if len(l) > 0]
                    # discard the first len(lines)/2 lines
                    lines = lines[len(lines)/2:]
                    legend = [l for l in lines if "SET" in l]
                    assert len(legend) == 1
                    if len(legend) == 1:
                        headers = legend[0].split()
                        # collect stats for each processor set
                        # and aggregate them based on the relative set size
                        d_lines = [l for l in lines if "SET" not in l]
                        user = [get_value(headers, l.split(), "usr") for l in d_lines]
                        kern = [get_value(headers, l.split(), "sys") for l in d_lines]
                        wait = [get_value(headers, l.split(), "wt") for l in d_lines]
                        idle = [get_value(headers, l.split(), "idl") for l in d_lines]
                        size = [get_value(headers, l.split(), "sze") for l in d_lines]
                        count = sum(size)
                        rel_size = [s/count for s in size]
                        dot = lambda v1, v2: reduce(operator.add, map(operator.mul, v1, v2))
                        return format_results(dot(user, rel_size),
                                              dot(kern, rel_size),
                                              dot(wait, rel_size),
                                              dot(idle, rel_size),
                                              0.0)
            else:
                self.logger.warn("CPUStats: unsupported platform")
                return False
        except Exception:
            self.logger.exception("Cannot compute CPU stats")
            return False

Example 13

Project: MCEdit-Unified
Source File: options.py
View license
    def initComponents(self):
        """Initilize the window components. Call this after translation hs been loaded."""
        autoBrakeRow = albow.CheckBoxLabel("Autobrake",
                                              ref=config.controls.autobrake,
                                              tooltipText="Apply brake when not pressing movement keys")

        swapAxesRow = albow.CheckBoxLabel("Swap Axes Looking Down",
                                             ref=config.controls.swapAxes,
                                             tooltipText="Change the direction of the Forward and Backward keys when looking down")

        cameraAccelRow = albow.FloatInputRow("Camera Acceleration: ",
                                                ref=config.controls.cameraAccel, width=100, min=5.0)

        cameraDragRow = albow.FloatInputRow("Camera Drag: ",
                                               ref=config.controls.cameraDrag, width=100, min=1.0)

        cameraMaxSpeedRow = albow.FloatInputRow("Camera Max Speed: ",
                                                   ref=config.controls.cameraMaxSpeed, width=100, min=1.0)

        cameraBrakeSpeedRow = albow.FloatInputRow("Camera Braking Speed: ",
                                                     ref=config.controls.cameraBrakingSpeed, width=100,
                                                     min=1.0)

        mouseSpeedRow = albow.FloatInputRow("Mouse Speed: ",
                                               ref=config.controls.mouseSpeed, width=100, min=0.1,
                                               max=20.0)

        undoLimitRow = albow.IntInputRow("Undo Limit: ",
                                            ref=config.settings.undoLimit, width=100, min=0)

        maxCopiesRow = albow.IntInputRow("Copy Stack Size: ",
                                            ref=config.settings.maxCopies, width=100, min=0,
                                            tooltipText="Maximum number of copied objects.")

        compassSizeRow = albow.IntInputRow("Compass Size (%): ",
                                            ref=config.settings.compassSize, width=100, min=0, max=100)

        fontProportion = albow.IntInputRow("Fonts Proportion (%): ",
                                            ref=config.settings.fontProportion, width=100, min=0,
                                            tooltipText="Fonts sizing proportion. The number is a percentage.\nRestart needed!")
        albow.resource.font_proportion = config.settings.fontProportion.get()

        fogIntensityRow = albow.IntInputRow("Fog Intensity (%): ",
                                            ref=config.settings.fogIntensity, width=100, min=0, max=100)

        invertRow = albow.CheckBoxLabel("Invert Mouse",
                                           ref=config.controls.invertMousePitch,
                                           tooltipText="Reverse the up and down motion of the mouse.")

        spaceHeightRow = albow.IntInputRow("Low Detail Height",
                                              ref=config.settings.spaceHeight,
                                              tooltipText="When you are this far above the top of the world, move fast and use low-detail mode.")

        blockBufferRow = albow.IntInputRow("Block Buffer (MB):",
                                              ref=albow.AttrRef(self, 'blockBuffer'), min=1,
                                              tooltipText="Amount of memory used for temporary storage.  When more than this is needed, the disk is used instead.")

        setWindowPlacementRow = albow.CheckBoxLabel("Set Window Placement",
                                                       ref=config.settings.setWindowPlacement,
                                                       tooltipText="Try to save and restore the window position.")

        rotateBlockBrushRow = albow.CheckBoxLabel("Rotate block with brush",
                                                        ref=config.settings.rotateBlockBrush,
                                                        tooltipText="When rotating your brush, also rotate the orientation of the block your brushing with")

        compassToggleRow =albow.CheckBoxLabel("Toggle compass",
                                                        ref=config.settings.compassToggle)

        windowSizeRow = albow.CheckBoxLabel("Window Resize Alert",
                                               ref=config.settings.shouldResizeAlert,
                                               tooltipText="Reminds you that the cursor won't work correctly after resizing the window.")

        superSecretSettingsRow = albow.CheckBoxLabel("Super Secret Settings",
                                                ref=config.settings.superSecretSettings,
                                                tooltipText="Weird stuff happen!")

        longDistanceRow = albow.CheckBoxLabel("Long-Distance Mode",
                                                 ref=config.settings.longDistanceMode,
                                                 tooltipText="Always target the farthest block under the cursor, even in mouselook mode.")

        flyModeRow = albow.CheckBoxLabel("Fly Mode",
                                            ref=config.settings.flyMode,
                                            tooltipText="Moving forward and Backward will not change your altitude in Fly Mode.")
        
        showCommandsRow = albow.CheckBoxLabel("Show Block Info when hovering",
                                              ref=config.settings.showQuickBlockInfo,
                                              tooltipText="Shows summarized info of some Blocks when hovering over it.")

        cancelCommandBlockOffset = albow.CheckBoxLabel("Cancel Command Block Offset",
                                                       ref=config.schematicCopying.cancelCommandBlockOffset,
                                                       tooltipText="Cancels the command blocks coords changed when copied.")

        lng = config.settings.langCode.get()

        langs = sorted(self.getLanguageChoices().items())

        langNames = [k for k, v in langs]

        self.languageButton = albow.ChoiceButton(langNames, choose=self.changeLanguage, doNotTranslate=True)
        if self.sgnal[lng] in self.languageButton.choices:
            self.languageButton.selectedChoice = self.sgnal[lng]

        langButtonRow = albow.Row((albow.Label("Language", tooltipText="Choose your language."), self.languageButton))

        portableList = ["Portable", "Fixed"]
        self.goPortableButton = goPortableButton = albow.ChoiceButton(portableList, choose=self.togglePortable)
        goPortableButton.selectedChoice = self.saveOldPortable

        goPortableButton.tooltipText = self.portableButtonTooltip()
        goPortableRow = albow.Row((albow.Label("Install Mode"), goPortableButton))

# Disabled Crash Reporting Option
#       reportRow = albow.CheckBoxLabel("Report Errors",
#                                          ref=config.settings.reportCrashes,
#                                          tooltipText="Automatically report errors to the developer.")

        self.inputs = (
            spaceHeightRow,
            cameraAccelRow,
            cameraDragRow,
            cameraMaxSpeedRow,
            cameraBrakeSpeedRow,
            blockBufferRow,
            mouseSpeedRow,
            undoLimitRow,
            maxCopiesRow,
            compassSizeRow,
            fontProportion,
            fogIntensityRow,
        )

        options = (
                    longDistanceRow,
                    flyModeRow,
                    autoBrakeRow,
                    swapAxesRow,
                    invertRow,
                    superSecretSettingsRow,
                    rotateBlockBrushRow,
                    compassToggleRow,
                    showCommandsRow,
                    cancelCommandBlockOffset,
                    langButtonRow,
                    ) + (
                        ((sys.platform == "win32" and pygame.version.vernum == (1, 9, 1)) and (windowSizeRow,) or ())
                    ) + (
                        (sys.platform == "win32") and (setWindowPlacementRow,) or ()
                    ) + (
                        (not sys.platform == "darwin") and (goPortableRow,) or ()
                    )

        rightcol = albow.Column(options, align='r')
        leftcol = albow.Column(self.inputs, align='r')

        optionsColumn = albow.Column((albow.Label("Options"),
                                      albow.Row((leftcol, rightcol), align="t")))

        settingsRow = albow.Row((optionsColumn,))

        buttonsRow = albow.Row((albow.Button("OK", action=self.dismiss), albow.Button("Cancel", action=self.cancel)))

        resetToDefaultRow = albow.Row((albow.Button("Reset to default", action=self.resetDefault),))

        optionsColumn = albow.Column((settingsRow, buttonsRow, resetToDefaultRow))
        optionsColumn.key_down = self.key_down

        self.add(optionsColumn)
        self.shrink_wrap()

Example 14

Project: pypar
Source File: compile.py
View license
def compile(FNs=None, CC=None, LD=None, SFLAG=None, verbose=1):
    """compile(FNs=None, CC=None, LD=None, SFLAG=None):

       Compile FN(s) using compiler CC (e.g. mpicc),
       Loader LD and shared flag SFLAG.
       If CC is absent use default compiler dependent on platform
       if LD is absent CC is used.
       if SFLAG is absent platform default is used
       FNs can be either one filename or a list of filenames
       In the latter case, the first will be used to name so file.
    """

    # Input check
    assert not FNs is None, 'No filename provided'

    if not type(FNs) == types.ListType:
        FNs = [FNs]

    # Determine platform and compiler
    if sys.platform == 'sunos5':  # Solaris
        if CC:
            compiler = CC
        else:
            compiler = 'gcc'
        if LD:
            loader = LD
        else:
            loader = compiler
        if SFLAG:
            sharedflag = SFLAG
        else:
            sharedflag = 'G'

    elif sys.platform == 'osf1V5':  # Compaq AlphaServer
        if CC:
            compiler = CC
        else:
            compiler = 'cc'
        if LD:
            loader = LD
        else:
            loader = compiler
        if SFLAG:
            sharedflag = SFLAG
        else:
            sharedflag = 'shared'

    elif sys.platform == 'linux2':  # Linux
        if CC:
            compiler = CC
        else:
            compiler = 'gcc'
        if LD:
            loader = LD
        else:
            loader = compiler
        if SFLAG:
            sharedflag = SFLAG
        else:
            sharedflag = 'shared'

    elif sys.platform == 'darwin':  # Mac OS X:
        if CC:
            compiler = CC
        else:
            compiler = 'cc'
        if LD:
            loader = LD
        else:
            loader = compiler
        if SFLAG:
            sharedflag = SFLAG
        else:
            sharedflag = 'bundle -flat_namespace -undefined suppress'

    else:
        if verbose:
            print 'Unrecognised platform %s, revert to default' % sys.platform

        if CC:
            compiler = CC
        else:
            compiler = 'cc'
        if LD:
            loader = LD
        else:
            loader = 'ld'
        if SFLAG:
            sharedflag = SFLAG
        else:
            sharedflag = 'G'

    # Find location of include files
    python_include = os.path.join(os.path.join(sys.exec_prefix, 'include'),
                                  'python' + sys.version[:3])

    # Check existence of Python.h
    headerfile = python_include + '/Python.h'
    try:
        open(headerfile, 'r')
    except:
        raise """Did not find Python header file %s.
        Make sure files for Python C-extensions are installed.
        In debian linux, for example, you need to install a
        package called something like python2.1-dev""" % headerfile

    # Get numpy include
    numpy_include = numpy.get_include()

    # Check filename(s)
    object_files = ''
    for FN in FNs:
        root, ext = os.path.splitext(FN)
        if ext == '':
            FN = FN + '.c'
        elif ext.lower() != '.c':
            raise Exception('Unrecognised extension: ' + FN)

        try:
            open(FN, 'r')
        except:
            raise Exception('Could not open: ' + FN)

        if not object_files:
            root1 = root  # Remember first filename
        object_files += root + '.o '

        # Compile
        s = '%s -c %s -I%s -I%s -o %s.o -Wall' % (compiler, FN,
                                                  python_include,
                                                  numpy_include,
                                                  root)
        if os.name == 'posix' and os.uname()[4] == 'x86_64':
            #Extra flags for 64 bit architectures
            #s += ' -fPIC -m64' #gcc
            #s += ' -fPIC -tp amd64' #pgcc
            s += ' -fPIC'  # Position independent code allows 64 bit archs
        if verbose:
            print s
        else:
            s = s + ' 2> /dev/null'  # Suppress errors

        try:
            os.system(s)
        except:
            raise 'Could not compile %s - please try manually' % FN

    # Make shared library (*.so)
    s = "%s -%s %s -o %s.so" % (loader, sharedflag, object_files, root1)

    if os.name == 'posix' and os.uname()[4] == 'x86_64':
        pass
        #Extra flags for 64 bit architectures using Portland compilers
        s += ' -mcmodel=medium'

    if verbose:
        print s
    else:
        s = s + ' 2> /dev/null'  # Suppress warnings

    try:
        err = os.system(s)
    except:
        raise 'Could not link %s - please try manually' % root1

Example 15

Project: flare-ida
Source File: apply_callee_type.py
View license
    def getBuiltinGlobalTypeCtypes(self):
        self.logger.debug('Getting GlobalType the Ctypes way')

        ############################################################
        # Several type-related functions aren't accessibly via IDAPython
        # so have to do things with ctypes
        idaname = "ida64" if idc.__EA64__ else "ida"
        if sys.platform == "win32":
            g_dll = ctypes.windll[idaname + ".wll"]
        elif sys.platform == "linux2":
            g_dll = ctypes.cdll["lib" + idaname + ".so"]
        elif sys.platform == "darwin":
            g_dll = ctypes.cdll["lib" + idaname + ".dylib"]

        ############################################################
        # Specifying function types for a few IDA SDK functions to keep the 
        # pointer-to-pointer args clear.
        get_named_type = g_dll.get_named_type
        get_named_type.argtypes = [
            ctypes.c_void_p,                                #const til_t *ti,
            ctypes.c_char_p,                                #const char *name,
            ctypes.c_int,                                   #int ntf_flags,
            ctypes.POINTER(ctypes.POINTER(ctypes.c_ubyte)), #const type_t **type=NULL,
            ctypes.POINTER(ctypes.POINTER(ctypes.c_ubyte)), #const p_list **fields=NULL,
            ctypes.POINTER(ctypes.POINTER(ctypes.c_ubyte)), #const char **cmt=NULL,
            ctypes.POINTER(ctypes.POINTER(ctypes.c_ubyte)), #const p_list **fieldcmts=NULL,
            ctypes.POINTER(ctypes.c_ulong),                 #sclass_t *sclass=NULL,
            ctypes.POINTER(ctypes.c_ulong),                 #uint32 *value=NULL);
        ]

        sym = idaapi.til_symbol_t()
        #dang - no predicate func support via idapython :(
        #idaapi.choose_named_type2(idaapi.cvar.idati, 'Choose type to apply', idaapi.NTF_SYMM, predFunc, sym)
        ret = idaapi.choose_named_type2(idaapi.cvar.idati, 'Choose type to apply', idaapi.NTF_SYMM, None, sym)
        if not ret:
            self.logger.debug('User canceled. Bailing out')
            return
        til = sym.til
        funcname = sym.name

        typ_type = ctypes.POINTER(ctypes.c_ubyte)()
        typ_fields = ctypes.POINTER(ctypes.c_ubyte)()
        typ_cmt = ctypes.POINTER(ctypes.c_ubyte)()
        typ_fieldcmts = ctypes.POINTER(ctypes.c_ubyte)()
        typ_sclass = ctypes.c_ulong()
        value = ctypes.c_ulong()
        ret = get_named_type(
                long(til.this),
                funcname, 
                idaapi.NTF_SYMM, 
                ctypes.byref(typ_type),
                ctypes.byref(typ_fields),
                ctypes.byref(typ_cmt),
                ctypes.byref(typ_fieldcmts),
                ctypes.byref(typ_sclass),
                ctypes.byref(value)
        )
        if ret == 0:
            self.logger.debug('Could not find %s', funcname)
            return
        ########################################
        # the following isn't needed, as moved to tinfo_t usage
        #if typ_type[0] != idaapi.BT_FUNC:
        #    #not positive that the first type value has to be BT_FUNC or not...
        #    # and whether it's important to only apply to funcs or not
        #    self.logger.debug('Found named type, but not a function: %s', funcname)
        #    return
        #type_arr = ctypes.create_string_buffer(0x400)
        #type_arr[0] = chr(idaapi.BT_PTR)
        #manualTypeCopy(type_arr, 1, len(type_arr), typ_type)
        #name_buffer = ctypes.create_string_buffer(0x400)
        #print_type_to_one_line(
        #    name_buffer, 
        #    len(name_buffer),
        #    long(til.this),
        #    typ_type,
        #    funcname,
        #    typ_cmt,
        #    typ_fields,
        #    typ_fieldcmts
        #)
        #self.logger.info('Found type: %s', name_buffer.value)
        ########################################
        #this works as well, but it's deprecated
        #self.logger.info('Trying to set type: %s', name_buffer.value)
        #ret = g_dll.apply_callee_type(
        #    ctypes.c_uint(here),
        #    type_arr,
        #    typ_fields
        #)
        tinfo = idaapi.tinfo_t()
        #self.logger.info('Trying to deserialize stuff')
        #self.logger.info('Type of til: %s', type(til))
        #self.logger.info('Type of typ_type: %s', type(typ_type))
        ret = g_dll.deserialize_tinfo(
            long(tinfo.this),
            long(til.this), 
            ctypes.byref(typ_type), 
            ctypes.byref(typ_fields),
            ctypes.byref(typ_fieldcmts)
        )
        return tinfo

Example 16

Project: attention-lvcsr
Source File: nvcc_compiler.py
View license
    @staticmethod
    def compile_str(
            module_name, src_code,
            location=None, include_dirs=[], lib_dirs=[], libs=[], preargs=[],
            rpaths=rpath_defaults, py_module=True, hide_symbols=True):
        """

        Parameters
        ----------
        module_name: str
             This has been embedded in the src_code.
        src_code
            A complete c or c++ source listing for the module.
        location
            A pre-existing filesystem directory where the
            cpp file and .so will be written.
        include_dirs
            A list of include directory names (each gets prefixed with -I).
        lib_dirs
            A list of library search path directory names (each gets
            prefixed with -L).
        libs
            A list of libraries to link with (each gets prefixed with -l).
        preargs
            A list of extra compiler arguments.
        rpaths
            List of rpaths to use with Xlinker. Defaults to `rpath_defaults`.
        py_module
            If False, compile to a shared library, but
            do not import as a Python module.
        hide_symbols
            If True (the default), hide all symbols from the library symbol
            table unless explicitely exported.

        Returns
        -------
        module
            Dynamically-imported python module of the compiled code.
            (unless py_module is False, in that case returns None.)

        Notes
        -----
        On Windows 7 with nvcc 3.1 we need to compile in the real directory
        Otherwise nvcc never finish.

        """
        # Remove empty string directory
        include_dirs = [d for d in include_dirs if d]
        lib_dirs = [d for d in lib_dirs if d]

        rpaths = list(rpaths)

        if sys.platform == "win32":
            # Remove some compilation args that cl.exe does not understand.
            # cl.exe is the compiler used by nvcc on Windows.
            for a in ["-Wno-write-strings", "-Wno-unused-label",
                      "-Wno-unused-variable", "-fno-math-errno"]:
                if a in preargs:
                    preargs.remove(a)
        if preargs is None:
            preargs = []
        else:
            preargs = list(preargs)
        if sys.platform != 'win32':
            preargs.append('-fPIC')
        if config.cmodule.remove_gxx_opt:
            preargs = [p for p in preargs if not p.startswith('-O')]

        cuda_root = config.cuda.root

        # The include dirs gived by the user should have precedence over
        # the standards ones.
        include_dirs = include_dirs + std_include_dirs()
        if os.path.abspath(os.path.split(__file__)[0]) not in include_dirs:
            include_dirs.append(os.path.abspath(os.path.split(__file__)[0]))

        libs = libs + std_libs()
        if 'cudart' not in libs:
            libs.append('cudart')

        lib_dirs = lib_dirs + std_lib_dirs()

        # config.dnn.include_path add this by default for cudnn in the
        # new back-end. This should not be used in this back-end. So
        # just remove them.
        lib_dirs = [ld for ld in lib_dirs if
                    not(ld == os.path.join(cuda_root, 'lib') or
                        ld == os.path.join(cuda_root, 'lib64'))]

        if sys.platform != 'darwin':
            # sometimes, the linker cannot find -lpython so we need to tell it
            # explicitly where it is located
            # this returns somepath/lib/python2.x
            python_lib = distutils.sysconfig.get_python_lib(plat_specific=1,
                                                            standard_lib=1)
            python_lib = os.path.dirname(python_lib)
            if python_lib not in lib_dirs:
                lib_dirs.append(python_lib)

        cppfilename = os.path.join(location, 'mod.cu')
        with open(cppfilename, 'w') as cppfile:

            _logger.debug('Writing module C++ code to %s', cppfilename)
            cppfile.write(src_code)

        lib_filename = os.path.join(location, '%s.%s' %
                (module_name, get_lib_extension()))

        _logger.debug('Generating shared lib %s', lib_filename)
        # TODO: Why do these args cause failure on gtx285 that has 1.3
        # compute capability? '--gpu-architecture=compute_13',
        # '--gpu-code=compute_13',
        # nvcc argument
        preargs1 = []
        preargs2 = []
        for pa in preargs:
            if pa.startswith('-Wl,'):
                preargs1.append('-Xlinker')
                preargs1.append(pa[4:])
                continue
            for pattern in ['-O', '-arch=', '-ccbin=', '-G', '-g', '-I',
                            '-L', '--fmad', '--ftz', '--maxrregcount',
                            '--prec-div', '--prec-sqrt',  '--use_fast_math',
                            '-fmad', '-ftz', '-maxrregcount',
                            '-prec-div', '-prec-sqrt', '-use_fast_math',
                            '--use-local-env', '--cl-version=']:

                if pa.startswith(pattern):
                    preargs1.append(pa)
                    break
            else:
                preargs2.append(pa)

        # Don't put -G by default, as it slow things down.
        # We aren't sure if -g slow things down, so we don't put it by default.
        cmd = [nvcc_path, '-shared'] + preargs1
        if config.nvcc.compiler_bindir:
            cmd.extend(['--compiler-bindir', config.nvcc.compiler_bindir])

        if sys.platform == 'win32':
            # add flags for Microsoft compiler to create .pdb files
            preargs2.extend(['/Zi', '/MD'])
            cmd.extend(['-Xlinker', '/DEBUG'])
            # remove the complaints for the duplication of `double round(double)`
            # in both math_functions.h and pymath.h,
            # by not including the one in pymath.h
            cmd.extend(['-D HAVE_ROUND'])
        else:
            if hide_symbols:
                preargs2.append('-fvisibility=hidden')

        if local_bitwidth() == 64:
            cmd.append('-m64')
        else:
            cmd.append('-m32')

        if len(preargs2) > 0:
            cmd.extend(['-Xcompiler', ','.join(preargs2)])

        # We should not use rpath if possible. If the user provided
        # provided an cuda.root flag, we need to add one, but
        # otherwise, we don't add it. See gh-1540 and
        # https://wiki.debian.org/RpathIssue for details.

        if (not type(config.cuda).root.is_default and
            os.path.exists(os.path.join(config.cuda.root, 'lib'))):

            rpaths.append(os.path.join(config.cuda.root, 'lib'))
            if sys.platform != 'darwin':
                # the CUDA libs are universal (contain both 32-bit and 64-bit)
                rpaths.append(os.path.join(config.cuda.root, 'lib64'))
        if sys.platform != 'win32':
            # the -rpath option is not understood by the Microsoft linker
            for rpath in rpaths:
                cmd.extend(['-Xlinker', ','.join(['-rpath', rpath])])
        cmd.extend('-I%s' % idir for idir in include_dirs)
        cmd.extend(['-o', lib_filename])
        cmd.append(os.path.split(cppfilename)[-1])
        cmd.extend(['-L%s' % ldir for ldir in lib_dirs])
        cmd.extend(['-l%s' % l for l in libs])
        if sys.platform == 'darwin':
            # This tells the compiler to use the already-loaded python
            # symbols (which should always be the right ones).
            cmd.extend(['-Xcompiler', '-undefined,dynamic_lookup'])

        # Remove "-u Symbol" arguments, since they are usually not
        # relevant for the new compilation, even if they were used for
        # compiling python.  If they are necessary, the nvcc syntax is
        # "-U Symbol" with a capital U.
        done = False
        while not done:
            try:
                indexof = cmd.index('-u')
                cmd.pop(indexof)  # Remove -u
                cmd.pop(indexof)  # Remove argument to -u
            except ValueError as e:
                done = True

        # CUDA Toolkit v4.1 Known Issues:
        # Host linker on Mac OS 10.7 (and 10.6 for me) passes -no_pie option
        # to nvcc this option is not recognized and generates an error
        # http://stackoverflow.com/questions/9327265/nvcc-unknown-option-no-pie
        # Passing -Xlinker -pie stops -no_pie from getting passed
        if sys.platform == 'darwin' and nvcc_version >= '4.1':
            cmd.extend(['-Xlinker', '-pie'])

        # cmd.append("--ptxas-options=-v") #uncomment this to see
        # register and shared-mem requirements
        _logger.debug('Running cmd %s', ' '.join(cmd))
        orig_dir = os.getcwd()
        try:
            os.chdir(location)
            p = subprocess.Popen(
                    cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            nvcc_stdout, nvcc_stderr = decode_iter(p.communicate()[:2])
        finally:
            os.chdir(orig_dir)

        for eline in nvcc_stderr.split('\n'):
            if not eline:
                continue
            if 'skipping incompatible' in eline:
                # ld is skipping an incompatible library
                continue
            if 'declared but never referenced' in eline:
                continue
            if 'statement is unreachable' in eline:
                continue
            _logger.info("NVCC: %s", eline)

        if p.returncode:
            for i, l in enumerate(src_code.split('\n')):
                print(i + 1, l, file=sys.stderr)
            print('===============================', file=sys.stderr)
            # filter the output from the compiler
            for l in nvcc_stderr.split('\n'):
                if not l:
                    continue
                # filter out the annoying declaration warnings

                try:
                    if l[l.index(':'):].startswith(': warning: variable'):
                        continue
                    if l[l.index(':'):].startswith(': warning: label'):
                        continue
                except Exception:
                    pass
                print(l, file=sys.stderr)
            print(nvcc_stdout)
            print(cmd)
            raise Exception('nvcc return status', p.returncode,
                            'for cmd', ' '.join(cmd))
        elif config.cmodule.compilation_warning and nvcc_stdout:
            print(nvcc_stdout)

        if nvcc_stdout:
            # this doesn't happen to my knowledge
            print("DEBUG: nvcc STDOUT", nvcc_stdout, file=sys.stderr)

        if py_module:
            # touch the __init__ file
            open(os.path.join(location, "__init__.py"), 'w').close()
            return dlimport(lib_filename)

Example 17

Project: MCEdit-Unified
Source File: options.py
View license
    def initComponents(self):
        """Initilize the window components. Call this after translation hs been loaded."""
        autoBrakeRow = albow.CheckBoxLabel("Autobrake",
                                              ref=config.controls.autobrake,
                                              tooltipText="Apply brake when not pressing movement keys")

        swapAxesRow = albow.CheckBoxLabel("Swap Axes Looking Down",
                                             ref=config.controls.swapAxes,
                                             tooltipText="Change the direction of the Forward and Backward keys when looking down")

        cameraAccelRow = albow.FloatInputRow("Camera Acceleration: ",
                                                ref=config.controls.cameraAccel, width=100, min=5.0)

        cameraDragRow = albow.FloatInputRow("Camera Drag: ",
                                               ref=config.controls.cameraDrag, width=100, min=1.0)

        cameraMaxSpeedRow = albow.FloatInputRow("Camera Max Speed: ",
                                                   ref=config.controls.cameraMaxSpeed, width=100, min=1.0)

        cameraBrakeSpeedRow = albow.FloatInputRow("Camera Braking Speed: ",
                                                     ref=config.controls.cameraBrakingSpeed, width=100,
                                                     min=1.0)

        mouseSpeedRow = albow.FloatInputRow("Mouse Speed: ",
                                               ref=config.controls.mouseSpeed, width=100, min=0.1,
                                               max=20.0)

        undoLimitRow = albow.IntInputRow("Undo Limit: ",
                                            ref=config.settings.undoLimit, width=100, min=0)

        maxCopiesRow = albow.IntInputRow("Copy Stack Size: ",
                                            ref=config.settings.maxCopies, width=100, min=0,
                                            tooltipText="Maximum number of copied objects.")

        compassSizeRow = albow.IntInputRow("Compass Size (%): ",
                                            ref=config.settings.compassSize, width=100, min=0, max=100)

        fontProportion = albow.IntInputRow("Fonts Proportion (%): ",
                                            ref=config.settings.fontProportion, width=100, min=0,
                                            tooltipText="Fonts sizing proportion. The number is a percentage.\nRestart needed!")
        albow.resource.font_proportion = config.settings.fontProportion.get()

        fogIntensityRow = albow.IntInputRow("Fog Intensity (%): ",
                                            ref=config.settings.fogIntensity, width=100, min=0, max=100)

        invertRow = albow.CheckBoxLabel("Invert Mouse",
                                           ref=config.controls.invertMousePitch,
                                           tooltipText="Reverse the up and down motion of the mouse.")

        spaceHeightRow = albow.IntInputRow("Low Detail Height",
                                              ref=config.settings.spaceHeight,
                                              tooltipText="When you are this far above the top of the world, move fast and use low-detail mode.")

        blockBufferRow = albow.IntInputRow("Block Buffer (MB):",
                                              ref=albow.AttrRef(self, 'blockBuffer'), min=1,
                                              tooltipText="Amount of memory used for temporary storage.  When more than this is needed, the disk is used instead.")

        setWindowPlacementRow = albow.CheckBoxLabel("Set Window Placement",
                                                       ref=config.settings.setWindowPlacement,
                                                       tooltipText="Try to save and restore the window position.")

        rotateBlockBrushRow = albow.CheckBoxLabel("Rotate block with brush",
                                                        ref=config.settings.rotateBlockBrush,
                                                        tooltipText="When rotating your brush, also rotate the orientation of the block your brushing with")

        compassToggleRow =albow.CheckBoxLabel("Toggle compass",
                                                        ref=config.settings.compassToggle)

        windowSizeRow = albow.CheckBoxLabel("Window Resize Alert",
                                               ref=config.settings.shouldResizeAlert,
                                               tooltipText="Reminds you that the cursor won't work correctly after resizing the window.")

        superSecretSettingsRow = albow.CheckBoxLabel("Super Secret Settings",
                                                ref=config.settings.superSecretSettings,
                                                tooltipText="Weird stuff happen!")

        longDistanceRow = albow.CheckBoxLabel("Long-Distance Mode",
                                                 ref=config.settings.longDistanceMode,
                                                 tooltipText="Always target the farthest block under the cursor, even in mouselook mode.")

        flyModeRow = albow.CheckBoxLabel("Fly Mode",
                                            ref=config.settings.flyMode,
                                            tooltipText="Moving forward and Backward will not change your altitude in Fly Mode.")
        
        showCommandsRow = albow.CheckBoxLabel("Show Block Info when hovering",
                                              ref=config.settings.showQuickBlockInfo,
                                              tooltipText="Shows summarized info of some Blocks when hovering over it.")

        cancelCommandBlockOffset = albow.CheckBoxLabel("Cancel Command Block Offset",
                                                       ref=config.schematicCopying.cancelCommandBlockOffset,
                                                       tooltipText="Cancels the command blocks coords changed when copied.")

        lng = config.settings.langCode.get()

        langs = sorted(self.getLanguageChoices().items())

        langNames = [k for k, v in langs]

        self.languageButton = albow.ChoiceButton(langNames, choose=self.changeLanguage, doNotTranslate=True)
        if self.sgnal[lng] in self.languageButton.choices:
            self.languageButton.selectedChoice = self.sgnal[lng]

        langButtonRow = albow.Row((albow.Label("Language", tooltipText="Choose your language."), self.languageButton))

        portableList = ["Portable", "Fixed"]
        self.goPortableButton = goPortableButton = albow.ChoiceButton(portableList, choose=self.togglePortable)
        goPortableButton.selectedChoice = self.saveOldPortable

        goPortableButton.tooltipText = self.portableButtonTooltip()
        goPortableRow = albow.Row((albow.Label("Install Mode"), goPortableButton))

# Disabled Crash Reporting Option
#       reportRow = albow.CheckBoxLabel("Report Errors",
#                                          ref=config.settings.reportCrashes,
#                                          tooltipText="Automatically report errors to the developer.")

        self.inputs = (
            spaceHeightRow,
            cameraAccelRow,
            cameraDragRow,
            cameraMaxSpeedRow,
            cameraBrakeSpeedRow,
            blockBufferRow,
            mouseSpeedRow,
            undoLimitRow,
            maxCopiesRow,
            compassSizeRow,
            fontProportion,
            fogIntensityRow,
        )

        options = (
                    longDistanceRow,
                    flyModeRow,
                    autoBrakeRow,
                    swapAxesRow,
                    invertRow,
                    superSecretSettingsRow,
                    rotateBlockBrushRow,
                    compassToggleRow,
                    showCommandsRow,
                    cancelCommandBlockOffset,
                    langButtonRow,
                    ) + (
                        ((sys.platform == "win32" and pygame.version.vernum == (1, 9, 1)) and (windowSizeRow,) or ())
                    ) + (
                        (sys.platform == "win32") and (setWindowPlacementRow,) or ()
                    ) + (
                        (not sys.platform == "darwin") and (goPortableRow,) or ()
                    )

        rightcol = albow.Column(options, align='r')
        leftcol = albow.Column(self.inputs, align='r')

        optionsColumn = albow.Column((albow.Label("Options"),
                                      albow.Row((leftcol, rightcol), align="t")))

        settingsRow = albow.Row((optionsColumn,))

        buttonsRow = albow.Row((albow.Button("OK", action=self.dismiss), albow.Button("Cancel", action=self.cancel)))

        resetToDefaultRow = albow.Row((albow.Button("Reset to default", action=self.resetDefault),))

        optionsColumn = albow.Column((settingsRow, buttonsRow, resetToDefaultRow))
        optionsColumn.key_down = self.key_down

        self.add(optionsColumn)
        self.shrink_wrap()

Example 18

Project: alignak
Source File: install_hooks.py
View license
def fix_alignak_cfg(config):
    """
    Fix paths, user and group in alignak.cfg and daemons/*.ini
    Called one all files are copied.

    :param config:
    :return:
    """
    default_paths = {
        'workdir': '/var/run/alignak',
        'logdir': '/var/log/alignak',
        # TODO: confirm is is unuseful...
        'modules_dir': '/var/lib/alignak/modules',
        'plugins_dir': '/var/libexec/alignak',

        'lock_file': '/var/run/alignak/arbiterd.pid',
        'local_log': '/var/log/alignak/arbiterd.log',
        'pidfile': '/var/run/alignak/arbiterd.pid',

        'pack_distribution_file': '/var/lib/alignak/pack_distribution.dat'
    }

    default_macros = {
        'LOGSDIR': '/var/log/alignak',
        'PLUGINSDIR': '/var/libexec/alignak',
    }

    default_ssl = {
        'ca_cert': '/etc/alignak/certs/ca.pem',
        'server_cert': '/etc/alignak/certs/server.cert',
        'server_key': '/etc/alignak/certs/server.key',
    }

    # Changing default user/group if root
    default_users = {}
    if getpass.getuser() == 'root':
        default_users['alignak_user'] = 'alignak'
        default_users['alignak_group'] = 'alignak'
        default_users['user'] = 'alignak'
        default_users['group'] = 'alignak'
        default_users['ALIGNAKUSER'] = 'alignak'
        default_users['ALIGNAKGROUP'] = 'alignak'
        default_users['HOME'] = '`grep ^$ALIGNAKUSER: /etc/passwd | cut -d: -f 6`'

    # Prepare pattern for alignak.cfg
    pattern = "|".join(default_paths.keys())
    changing_path = re.compile("^(%s) *= *" % pattern)
    pattern = "|".join(default_users.keys())
    changing_user = re.compile("^#(%s) *= *" % pattern)
    pattern = "|".join(default_ssl.keys())
    changing_ssl = re.compile("^#(%s) *= *" % pattern)
    pattern = "|".join(default_macros.keys())
    changing_mac = re.compile("^\$(%s)\$ *= *" % pattern)

    # Fix resource paths
    alignak_file = os.path.join(
        config.install_dir, "etc", "alignak", "arbiter", "resource.d", "paths.cfg"
    )
    if not os.path.exists(alignak_file):
        print(
            "\n"
            "================================================================================\n"
            "==  The configuration file '%s' is missing.                                   ==\n"
            "================================================================================\n"
            % alignak_file
        )

    for line in fileinput.input(alignak_file, inplace=True):
        line = line.strip()
        mac_attr_name = changing_mac.match(line)
        if mac_attr_name:
            new_path = os.path.join(config.install_dir,
                                    default_macros[mac_attr_name.group(1)].strip("/"))
            print("$%s$=%s" % (mac_attr_name.group(1),
                             new_path))
        else:
            print(line)

    # Fix alignak.cfg
    alignak_file = os.path.join(config.install_dir, "etc", "alignak", "alignak.cfg")
    if not os.path.exists(alignak_file):
        print(
            "\n"
            "================================================================================\n"
            "==  The configuration file '%s' is missing.                                   ==\n"
            "================================================================================\n"
            % alignak_file
        )

    for line in fileinput.input(alignak_file, inplace=True):
        line = line.strip()
        path_attr_name = changing_path.match(line)
        user_attr_name = changing_user.match(line)
        ssl_attr_name = changing_ssl.match(line)
        if path_attr_name:
            new_path = os.path.join(config.install_dir,
                                    default_paths[path_attr_name.group(1)].strip("/"))
            print("%s=%s" % (path_attr_name.group(1),
                             new_path))
        elif user_attr_name:
            print("#%s=%s" % (user_attr_name.group(1),
                             default_users[user_attr_name.group(1)]))
        elif ssl_attr_name:
            new_path = os.path.join(config.install_dir,
                                    default_ssl[ssl_attr_name.group(1)].strip("/"))
            print("#%s=%s" % (ssl_attr_name.group(1),
                             new_path))
        else:
            print(line)

    # Handle daemons ini files
    for ini_file in ["arbiterd.ini", "brokerd.ini", "schedulerd.ini",
                     "pollerd.ini", "reactionnerd.ini", "receiverd.ini"]:
        # Prepare pattern for ini files
        daemon_name = ini_file.strip(".ini")
        default_paths['lock_file'] = '/var/run/alignak/%s.pid' % daemon_name
        default_paths['local_log'] = '/var/log/alignak/%s.log' % daemon_name
        default_paths['pidfile'] = '/var/run/alignak/%s.pid' % daemon_name
        pattern = "|".join(default_paths.keys())
        changing_path = re.compile("^(%s) *= *" % pattern)

        # Fix ini file
        alignak_file = os.path.join(config.install_dir, "etc", "alignak", "daemons", ini_file)
        if not os.path.exists(alignak_file):
            print(
                "\n"
                "================================================================================\n"
                "==  The configuration file '%s' is missing.                                   ==\n"
                "================================================================================\n"
                % alignak_file
            )

        for line in fileinput.input(alignak_file, inplace=True):
            line = line.strip()
            path_attr_name = changing_path.match(line)
            user_attr_name = changing_user.match(line)
            ssl_attr_name = changing_ssl.match(line)
            if path_attr_name:
                new_path = os.path.join(config.install_dir,
                                        default_paths[path_attr_name.group(1)].strip("/"))
                print("%s=%s" % (path_attr_name.group(1),
                                 new_path))
            elif user_attr_name:
                print("#%s=%s" % (user_attr_name.group(1),
                                 default_users[user_attr_name.group(1)]))
            elif ssl_attr_name:
                new_path = os.path.join(config.install_dir,
                                        default_ssl[ssl_attr_name.group(1)].strip("/"))
                print("#%s=%s" % (ssl_attr_name.group(1),
                                 new_path))
            else:
                print(line)

    # Handle default/alignak
    if 'linux' in sys.platform or 'sunos5' in sys.platform:
        old_name = os.path.join(config.install_dir, "etc", "default", "alignak.in")
        if not os.path.exists(old_name):
            print("\n"
                  "=======================================================================================================\n"
                  "==  The configuration file '%s' is missing.\n"
                  "=======================================================================================================\n"
                  % alignak_file)

        new_name = os.path.join(config.install_dir, "etc", "default", "alignak")
        try:
            os.rename(old_name, new_name)
        except OSError as e:
            print("\n"
                  "=======================================================================================================\n"
                  "==  The configuration file '%s' could not be renamed to '%s'.\n"
                  "==  The newly installed configuration will not be up-to-date.\n"
                  "=======================================================================================================\n"
                  % (old_name, new_name))

        default_paths = {
            'ETC': '/etc/alignak',
            'VAR': '/var/lib/alignak',
            'BIN': '/bin',
            'RUN': '/var/run/alignak',
            'LOG': '/var/log/alignak',
            'LIB': '/var/libexec/alignak',
        }
        pattern = "|".join(default_paths.keys())
        changing_path = re.compile("^(%s) *= *" % pattern)
        for line in fileinput.input(new_name,  inplace=True):
            line = line.strip()
            path_attr_name = changing_path.match(line)
            user_attr_name = changing_user.match(line)
            if path_attr_name:
                new_path = os.path.join(config.install_dir,
                                        default_paths[path_attr_name.group(1)].strip("/"))
                print("%s=%s" % (path_attr_name.group(1),
                                 new_path))
            elif user_attr_name:
                print("#%s=%s" % (user_attr_name.group(1),
                                 default_users[user_attr_name.group(1)]))

            else:
                print(line)

    # Alignak run script
    alignak_run = ''
    if 'win' in sys.platform:
        pass
    elif 'linux' in sys.platform or 'sunos5' in sys.platform:
        alignak_run = os.path.join(config.install_dir, "etc", "init.d", "alignak start")
    elif 'bsd' in sys.platform or 'dragonfly' in sys.platform:
        alignak_run = os.path.join(config.install_dir, "etc", "rc.d", "alignak start")

    # Alignak configuration root directory
    alignak_etc = os.path.join(config.install_dir, "etc", "alignak")

    # Add ENV vars only if we are in virtualenv
    # in order to get init scripts working
    if 'VIRTUAL_ENV' in os.environ:
        activate_file = os.path.join(os.environ.get("VIRTUAL_ENV"), 'bin', 'activate')
        try:
            afd = open(activate_file, 'r+')
        except Exception as exp:
            print(exp)
            raise Exception("Virtual environment error")

        env_config = ("""export PYTHON_EGG_CACHE=.\n"""
                      """export ALIGNAK_DEFAULT_FILE=%s/etc/default/alignak\n"""
                      % os.environ.get("VIRTUAL_ENV"))
        alignak_etc = "%s/etc/alignak" % os.environ.get("VIRTUAL_ENV")
        alignak_run = "%s/etc/init.d alignak start" % os.environ.get("VIRTUAL_ENV")

        if afd.read().find(env_config) == -1:
            afd.write(env_config)
            print(
                "\n"
                "================================================================================\n"
                "==                                                                            ==\n"
                "==  You need to REsource env/bin/activate in order to set appropriate         ==\n"
                "== variables to use init scripts                                              ==\n"
                "==                                                                            ==\n"
                "================================================================================\n"
            )

    print("\n"
          "================================================================================\n"
          "==                                                                            ==\n"
          "==  The installation succeded.                                                ==\n"
          "==                                                                            ==\n"
          "== -------------------------------------------------------------------------- ==\n"
          "==                                                                            ==\n"
          "== You can run Alignak with:                                                  ==\n"
          "==   %s\n"
          "==                                                                            ==\n"
          "== The default installed configuration is located here:                       ==\n"
          "==   %s\n"
          "==                                                                            ==\n"
          "== You will find more information about Alignak configuration here:           ==\n"
          "==   http://alignak-doc.readthedocs.io/en/latest/04_configuration/index.html  ==\n"
          "==                                                                            ==\n"
          "== -------------------------------------------------------------------------- ==\n"
          "==                                                                            ==\n"
          "== You should grant the write permissions on the configuration directory to   ==\n"
          "== the user alignak:                                                          ==\n"
          "==   find %s -type f -exec chmod 664 {} +\n"
          "==   find %s -type d -exec chmod 775 {} +\n"
          "== -------------------------------------------------------------------------- ==\n"
          "==                                                                            ==\n"
          "== You should also grant ownership on those directories to the user alignak:  ==\n"
          "==   chown -R alignak:alignak /usr/local/var/run/alignak                      ==\n"
          "==   chown -R alignak:alignak /usr/local/var/log/alignak                      ==\n"
          "==   chown -R alignak:alignak /usr/local/var/libexec/alignak                  ==\n"
          "==                                                                            ==\n"
          "== -------------------------------------------------------------------------- ==\n"
          "==                                                                            ==\n"
          "== Please note that installing Alignak with the setup.py script is not the    ==\n"
          "== recommended way. You'd rather use the packaging built for your OS          ==\n"
          "== distribution that you can find here:                                       ==\n"
          "==   http://alignak-monitoring.github.io/download/                            ==\n"
          "==                                                                            ==\n"
          "================================================================================\n"
          % (alignak_run, alignak_etc, alignak_etc, alignak_etc)
          )

    # Check Alignak recommended user existence
    if not user_exists('alignak'):
        print(
            "\n"
            "================================================================================\n"
            "==                                                                            ==\n"
            "== The user account 'alignak' does not exist on your system.                  ==\n"
            "==                                                                            ==\n"
            "================================================================================\n"
        )

    if not group_exists('alignak'):
        print(
            "\n"
            "================================================================================\n"
            "==                                                                            ==\n"
            "== The user group 'alignak' does not exist on your system.                    ==\n"
            "==                                                                            ==\n"
            "================================================================================\n"
        )

Example 19

Project: nuxeo-drive
Source File: setup.py
View license
    def __init__(self, driveAttributes):

        from distutils.core import setup

        attribs = driveAttributes
        freeze_options = {}
        ext_modules = []

        script = attribs.get_script()
        scripts = attribs.get_scripts()
        name = attribs.get_name()
        packages = Packages(attribs.get_package_dirs()).load()

        # special handling for data files, except for Linux
        if ((sys.platform == "win32" or sys.platform == 'darwin')
                and 'nxdrive.data' in packages):
            packages.remove('nxdrive.data')
        package_data = attribs.get_package_data()
        icons_home = attribs.get_icons_home()
        ui5_home = attribs.get_ui5_home()

        win_icon = os.path.join(icons_home, attribs.get_win_icon())
        png_icon = os.path.join(icons_home, attribs.get_png_icon())
        osx_icon = os.path.join(icons_home, attribs.get_osx_icon())

        if sys.platform == 'win32':
            icon = win_icon
        elif sys.platform == 'darwin':
            icon = osx_icon
        else:
            icon = png_icon

        # Files to include in frozen app
        # build_exe freeze with cx_Freeze (Windows)
        include_files = attribs.get_includes()
        # bdist_esky freeze with cx_Freeze (Windows) and py2app (OS X)
        # In fact this is a global setup option
        # TODO NXP-13810: check removed data_files from py2app and added to
        # global setup
        icon_files = data_file_dir(icons_home, 'icons', include_files).load()
        ui5_files = data_file_dir(ui5_home, 'ui5', include_files).load_recursive()
        data_files = [('icons', icon_files)]
        data_files.extend(ui5_files)
        data_files.extend(attribs.get_data_files())
        old_version = None
        init_file = attribs.get_init_file()
        version = read_version(init_file)

        if '-dev' in version:
            # timestamp the dev artifacts as distutils only accepts "b" + digit
            timestamp = datetime.utcnow().isoformat()
            timestamp = timestamp.replace(":", "")
            timestamp = timestamp.replace(".", "")
            timestamp = timestamp.replace("T", "")
            timestamp = timestamp.replace("-", "")
            old_version = version
            # distutils imposes a max 3 levels integer version
            # (+ prerelease markers which are not allowed in a
            # msi package version). On the other hand,
            # msi imposes the a.b.c.0 or a.b.c.d format where
            # a, b, c and d are all 16 bits integers
            # TODO: align on latest distutils versioning
            month_day = timestamp[4:8]
            if month_day.startswith('0'):
                month_day = month_day[1:]
            version = version.replace('-dev', ".%s" % (
                month_day))
            update_version(init_file, version)
            print "Updated version to " + version

        # Create JSON metadata file for the frozen application
        json_file = create_json_metadata(version, SERVER_MIN_VERSION)
        print "Created JSON metadata file for frozen app: " + json_file

        includes = [
            "PyQt4",
            "PyQt4.QtCore",
            "PyQt4.QtNetwork",
            "PyQt4.QtGui",
            "atexit"  # implicitly required by PyQt4
        ]
        if attribs.include_xattr_binaries():
            includes.append('cffi')
            includes.append('xattr')

        attribs.append_includes(includes)
        excludes = [
            "ipdb",
            "clf",
            "IronPython",
            "pydoc",
            "tkinter",
        ]
        if not attribs.include_xattr_binaries():
            excludes.append('cffi')
            excludes.append('xattr')

        if '--freeze' in sys.argv:
            print "Building standalone executable..."
            sys.argv.remove('--freeze')
            from nx_cx_Freeze import setup
            from cx_Freeze import Executable as cx_Executable
            from esky.util import get_platform

            # build_exe does not seem to take the package_dir info into account
            sys.path.append(attribs.get_path_append())

            executables = [cx_Executable(script)]
            freeze_options = dict()
            if sys.platform == "win32":
                # Windows GUI program that can be launched without a cmd
                # console
                script_w = attribs.get_win_script()
                if script_w is not None:
                    scripts.append(
                        es_Executable(script_w, icon=icon,
                                      shortcutDir="ProgramMenuFolder",
                                      shortcutName=attribs.shortcutName()))

                    executables.append(
                        cx_Executable(script_w,
                                      targetName=attribs.get_win_targetName(),
                                      base="Win32GUI", icon=icon,
                                      shortcutDir="ProgramMenuFolder",
                                      shortcutName=attribs.shortcutName()))
                freeze_options.update({'attribs': attribs})

            package_data = {}
            esky_app_name = (attribs.get_name()
                             + '-' + version + '.' + get_platform())
            esky_dist_dir = os.path.join(OUTPUT_DIR, esky_app_name)
            freeze_options.update({
                'executables': executables,
                'options': {
                    "build": {
                        "exe_command": "bdist_esky",
                    },
                    "build_exe": {
                        "includes": includes,
                        "packages": packages + [
                            "nose",
                        ],
                        "excludes": excludes,
                        "include_files": include_files,
                    },
                    "bdist_esky": {
                        "excludes": excludes,
                        "enable_appdata_dir": True,
                        "freezer_options": {
                            "packages": packages + [
                                "nose",
                            ],
                        },
                        "rm_freeze_dir_after_zipping": False,
                    },
                    "install": {
                        "skip_sub_commands":
                            "install_lib,install_scripts,install_data",
                    },
                    "install_exe": {
                        "skip_build": True,
                        "build_dir": esky_dist_dir,
                    },
                    "bdist_msi": {
                        "add_to_path": True,
                        "upgrade_code":
                            attribs.get_uid(),
                    },
                },
            })

            # Include cffi compiled C extension under Linux
            if sys.platform.startswith('linux') and attribs.include_xattr_binaries():
                import xattr
                includeFiles = [(os.path.join(os.path.dirname(xattr.__file__), '_cffi__x7c9e2f59xb862c7dd.so'),
                                 '_cffi__x7c9e2f59xb862c7dd.so')]
                freeze_options['options']['bdist_esky']['freezer_options'].update({
                    "includeFiles": includeFiles
                })

        if sys.platform == 'darwin':
            # Under OSX we use py2app instead of cx_Freeze because we need:
            # - argv_emulation=True for nxdrive:// URL scheme handling
            # - easy Info.plist customization
            import py2app  # install the py2app command
            if attribs.include_xattr_binaries():
                import xattr
                ext_modules = [xattr.lib.ffi.verifier.get_extension()]
                includes.append("_cffi__x7c9e2f59xb862c7dd")
            name = attribs.get_CFBundleName()
            py2app_options = dict(
                iconfile=icon,
                qt_plugins='imageformats',
                argv_emulation=False,  # We use QT for URL scheme handling
                plist=dict(
                    CFBundleDisplayName=attribs.get_CFBundleDisplayName(),
                    CFBundleName=attribs.get_CFBundleName(),
                    CFBundleIdentifier=attribs.get_CFBundleIdentifier(),
                    LSUIElement=True,  # Do not launch as a Dock application
                    CFBundleURLTypes=[
                        dict(
                            CFBundleURLName=attribs.get_CFBundleURLName(),
                            CFBundleURLSchemes=(attribs
                                                .get_CFBundleURLSchemes()),
                        )
                    ],
                    NSServices=[
                        dict(
                            NSMenuItem=dict(
                                default=attribs.get_CFBundleDisplayName()
                            ),
                            NSMessage=u"macRightClick",
                            NSPortName=attribs.get_CFBundleDisplayName(),
                            NSRequiredContext=dict(),
                            NSSendTypes=[
                                u'NSStringPboardType',
                            ],
                            NSSendFileTypes=[
                                u"public.item"
                            ]
                        )
                    ]
                ),
                includes=includes,
                excludes=excludes,
            )
            freeze_options = dict(
                app=attribs.get_app(),
                options=dict(
                    py2app=py2app_options,
                    bdist_esky=dict(
                        enable_appdata_dir=True,
                        create_zipfile=False,
                        freezer_options=py2app_options,
                    )
                )
            )
        setup(
            name=name,
            version=version,
            description=attribs.get_description(),
            author=attribs.get_author(),
            author_email=attribs.get_author_email(),
            url=attribs.get_url(),
            packages=packages,
            package_dir=attribs.get_package_dir(),
            package_data=package_data,
            scripts=scripts,
            long_description=attribs.get_long_description(),
            data_files=data_files,
            ext_modules=ext_modules,
            **freeze_options
        )

        if old_version is not None:
            update_version(init_file, old_version)
            print "Restored version to " + old_version

Example 20

Project: pydgin
Source File: build.py
View license
def build_target( name, pypy_dir, build_dir, extra_rpython_flags ):

  # use the name to determine the arch, jit, softfloat requirement, and debug

  arch = None
  require_softfloat = False
  if "parc" in name:
    arch = "parc"
  if "arm" in name:
    assert arch is None, "conflicting arch definitions {} and {}" \
                         .format( arch, "arm" )
    arch = "arm"
  if "riscv" in name:
    assert arch is None, "conflicting arch definitions {} and {}" \
                         .format( arch, "riscv" )
    arch = "riscv"
    # risc-v is the only architecture that requires softfloat for now
    require_softfloat = True
  assert arch is not None, "could not determine arch from name"

  # check if we have already built softfloat and if not, build it
  if require_softfloat:
    # check os to find which extension to check for (we only support mac
    # or linux)
    assert sys.platform == "linux" or sys.platform == "linux2" \
          or sys.platform == "darwin"

    softfloat_file = "libsoftfloat.dylib" if sys.platform == "darwin" \
          else "libsoftfloat.so"

    print "softfloat is required, checking if {} exists..." \
          .format( softfloat_file ),
    found_softfloat = os.path.isfile( softfloat_file )

    if not found_softfloat:
      print "no"
      print "calling build-softfloat.py to build it"
      cmd = "../scripts/build-softfloat.py"
      print cmd
      ret = subprocess.call( cmd, shell=True )

      # check for success and if the file exists

      if ret != 0:
        print "softfloat library could not be built, aborting!"
        sys.exit( ret )

      if not os.path.isfile( softfloat_file ):
        print "{} could not be found, aborting!".format( softfloat_file )
        sys.exit( ret )

    else:
      print "yes"

  if "jit" in name and "nojit" not in name:
    jit = True
  elif "nojit" in name:
    jit = False
  else:
    # default behavior if neither jit or nojit in name
    jit = True

  if "debug" in name and "nodebug" not in name:
    debug = True
  elif "nodebug" in name:
    debug = False
  else:
    # default behavior if neither debug or nodebug in name
    debug = False

  print "Building {}\n  arch: {}\n  jit: {}\n  debug: {}\n" \
        .format( name, arch, jit, debug )

  # check for the pypy executable, if it doesn't exist warn

  python_bin = distutils.spawn.find_executable('pypy')
  if not python_bin:
    print ('WARNING: Cannot find a pypy executable!\n'
           '  Proceeding to translate with CPython.\n'
           '  Note that this will be *much* slower than using pypy.\n'
           '  Please install pypy for faster translation times!\n')
    python_bin = 'python'

  # create the translation command and execute it

  os.chdir('../{}'.format( arch ) )
  cmd = ( '{python_bin} {pypy_dir}/rpython/bin/rpython {rpython_opts} '
          '{arch}-sim.py {pydgin_opts}' ) \
          .format( arch=arch, pypy_dir=pypy_dir,
                   rpython_opts=( extra_rpython_flags +
                                  ("--opt=jit" if jit   else "") ),
                   pydgin_opts =( "--debug"   if debug else "" ),
                   python_bin=python_bin )

  print cmd
  ret = subprocess.call( cmd, shell=True )

  # check for success and cleanup

  if ret != 0:
    print "{} failed building, aborting!".format( name )
    sys.exit( ret )

  # for some reason, -rpath to the linker doesn't seem to work on macs?
  # we patch the binary generated to add the exact dir of libsoftfloat.so

  if require_softfloat and sys.platform == "darwin":
    cmd = "install_name_tool -change {short_so} {full_so} {pydgin}" \
          .format( short_so="libsoftfloat.so",
                   full_so="{}/../../../libsoftfloat.so".format( build_dir ),
                   pydgin=name )
    print cmd
    subprocess.call( cmd, shell=True )

  shutil.copy( name, '{}'.format( build_dir ) )
  symlink_name = '{}/../{}'.format( build_dir, name )
  if os.path.lexists( symlink_name ):
    os.remove( symlink_name )
  os.symlink( '{}/{}'.format( build_dir, name ), symlink_name )

Example 21

Project: dd-agent
Source File: unix.py
View license
    def check(self, agentConfig):
        """Return an aggregate of CPU stats across all CPUs
        When figures are not available, False is sent back.
        """
        def format_results(us, sy, wa, idle, st, guest=None):
            data = {'cpuUser': us, 'cpuSystem': sy, 'cpuWait': wa, 'cpuIdle': idle, 'cpuStolen': st, 'cpuGuest': guest}
            return dict((k, v) for k, v in data.iteritems() if v is not None)

        def get_value(legend, data, name, filter_value=None):
            "Using the legend and a metric name, get the value or None from the data line"
            if name in legend:
                value = to_float(data[legend.index(name)])
                if filter_value is not None:
                    if value > filter_value:
                        return None
                return value

            else:
                # FIXME return a float or False, would trigger type error if not python
                self.logger.debug("Cannot extract cpu value %s from %s (%s)" % (name, data, legend))
                return 0.0
        try:
            if Platform.is_linux():
                output, _, _ = get_subprocess_output(['mpstat', '1', '3'], self.logger)
                mpstat = output.splitlines()
                # [email protected]:~$ mpstat 1 3
                # Linux 2.6.32-341-ec2 (ip)   01/19/2012  _x86_64_  (2 CPU)
                #
                # 04:22:41 PM  CPU    %usr   %nice    %sys %iowait    %irq   %soft  %steal  %guest   %idle
                # 04:22:42 PM  all    0.00    0.00    0.00    0.00    0.00    0.00    0.00    0.00  100.00
                # 04:22:43 PM  all    0.00    0.00    0.00    0.00    0.00    0.00    0.00    0.00  100.00
                # 04:22:44 PM  all    0.00    0.00    0.00    0.00    0.00    0.00    0.00    0.00  100.00
                # Average:     all    0.00    0.00    0.00    0.00    0.00    0.00    0.00    0.00  100.00
                #
                # OR
                #
                # Thanks to Mart Visser to spotting this one.
                # blah:/etc/dd-agent# mpstat
                # Linux 2.6.26-2-xen-amd64 (atira)  02/17/2012  _x86_64_
                #
                # 05:27:03 PM  CPU    %user   %nice   %sys %iowait    %irq   %soft  %steal  %idle   intr/s
                # 05:27:03 PM  all    3.59    0.00    0.68    0.69    0.00   0.00    0.01   95.03    43.65
                #
                legend = [l for l in mpstat if "%usr" in l or "%user" in l]
                avg = [l for l in mpstat if "Average" in l]
                if len(legend) == 1 and len(avg) == 1:
                    headers = [h for h in legend[0].split() if h not in ("AM", "PM")]
                    data = avg[0].split()

                    # Userland
                    # Debian lenny says %user so we look for both
                    # One of them will be 0
                    cpu_metrics = {
                        "%usr": None, "%user": None, "%nice": None,
                        "%iowait": None, "%idle": None, "%sys": None,
                        "%irq": None, "%soft": None, "%steal": None,
                        "%guest": None
                    }

                    for cpu_m in cpu_metrics:
                        cpu_metrics[cpu_m] = get_value(headers, data, cpu_m, filter_value=110)

                    if any([v is None for v in cpu_metrics.values()]):
                        self.logger.warning("Invalid mpstat data: %s" % data)

                    cpu_user = cpu_metrics["%usr"] + cpu_metrics["%user"] + cpu_metrics["%nice"]
                    cpu_system = cpu_metrics["%sys"] + cpu_metrics["%irq"] + cpu_metrics["%soft"]
                    cpu_wait = cpu_metrics["%iowait"]
                    cpu_idle = cpu_metrics["%idle"]
                    cpu_stolen = cpu_metrics["%steal"]
                    cpu_guest = cpu_metrics["%guest"]

                    return format_results(cpu_user,
                                          cpu_system,
                                          cpu_wait,
                                          cpu_idle,
                                          cpu_stolen,
                                          cpu_guest)
                else:
                    return False

            elif sys.platform == 'darwin':
                # generate 3 seconds of data
                # ['          disk0           disk1       cpu     load average', '    KB/t tps  MB/s     KB/t tps  MB/s  us sy id   1m   5m   15m', '   21.23  13  0.27    17.85   7  0.13  14  7 79  1.04 1.27 1.31', '    4.00   3  0.01     5.00   8  0.04  12 10 78  1.04 1.27 1.31', '']
                iostats, _, _ = get_subprocess_output(['iostat', '-C', '-w', '3', '-c', '2'], self.logger)
                lines = [l for l in iostats.splitlines() if len(l) > 0]
                legend = [l for l in lines if "us" in l]
                if len(legend) == 1:
                    headers = legend[0].split()
                    data = lines[-1].split()
                    cpu_user = get_value(headers, data, "us")
                    cpu_sys = get_value(headers, data, "sy")
                    cpu_wait = 0
                    cpu_idle = get_value(headers, data, "id")
                    cpu_st = 0
                    return format_results(cpu_user, cpu_sys, cpu_wait, cpu_idle, cpu_st)
                else:
                    self.logger.warn("Expected to get at least 4 lines of data from iostat instead of just " + str(iostats[:max(80, len(iostats))]))
                    return False

            elif sys.platform.startswith("freebsd"):
                # generate 3 seconds of data
                # tty            ada0              cd0            pass0             cpu
                # tin  tout  KB/t tps  MB/s   KB/t tps  MB/s   KB/t tps  MB/s  us ni sy in id
                # 0    69 26.71   0  0.01   0.00   0  0.00   0.00   0  0.00   2  0  0  1 97
                # 0    78  0.00   0  0.00   0.00   0  0.00   0.00   0  0.00   0  0  0  0 100
                iostats, _, _ = get_subprocess_output(['iostat', '-w', '3', '-c', '2'], self.logger)
                lines = [l for l in iostats.splitlines() if len(l) > 0]
                legend = [l for l in lines if "us" in l]
                if len(legend) == 1:
                    headers = legend[0].split()
                    data = lines[-1].split()
                    cpu_user = get_value(headers, data, "us")
                    cpu_nice = get_value(headers, data, "ni")
                    cpu_sys = get_value(headers, data, "sy")
                    cpu_intr = get_value(headers, data, "in")
                    cpu_wait = 0
                    cpu_idle = get_value(headers, data, "id")
                    cpu_stol = 0
                    return format_results(cpu_user + cpu_nice, cpu_sys + cpu_intr, cpu_wait, cpu_idle, cpu_stol)

                else:
                    self.logger.warn("Expected to get at least 4 lines of data from iostat instead of just " + str(iostats[:max(80, len(iostats))]))
                    return False

            elif sys.platform == 'sunos5':
                # mpstat -aq 1 2
                # SET minf mjf xcal  intr ithr  csw icsw migr smtx  srw syscl  usr sys  wt idl sze
                # 0 5239   0 12857 22969 5523 14628   73  546 4055    1 146856    5   6   0  89  24 <-- since boot
                # 1 ...
                # SET minf mjf xcal  intr ithr  csw icsw migr smtx  srw syscl  usr sys  wt idl sze
                # 0 20374   0 45634 57792 5786 26767   80  876 20036    2 724475   13  13   0  75  24 <-- past 1s
                # 1 ...
                # http://docs.oracle.com/cd/E23824_01/html/821-1462/mpstat-1m.html
                #
                # Will aggregate over all processor sets
                    output, _, _ = get_subprocess_output(['mpstat', '-aq', '1', '2'], self.logger)
                    mpstat = output.splitlines()
                    lines = [l for l in mpstat if len(l) > 0]
                    # discard the first len(lines)/2 lines
                    lines = lines[len(lines)/2:]
                    legend = [l for l in lines if "SET" in l]
                    assert len(legend) == 1
                    if len(legend) == 1:
                        headers = legend[0].split()
                        # collect stats for each processor set
                        # and aggregate them based on the relative set size
                        d_lines = [l for l in lines if "SET" not in l]
                        user = [get_value(headers, l.split(), "usr") for l in d_lines]
                        kern = [get_value(headers, l.split(), "sys") for l in d_lines]
                        wait = [get_value(headers, l.split(), "wt") for l in d_lines]
                        idle = [get_value(headers, l.split(), "idl") for l in d_lines]
                        size = [get_value(headers, l.split(), "sze") for l in d_lines]
                        count = sum(size)
                        rel_size = [s/count for s in size]
                        dot = lambda v1, v2: reduce(operator.add, map(operator.mul, v1, v2))
                        return format_results(dot(user, rel_size),
                                              dot(kern, rel_size),
                                              dot(wait, rel_size),
                                              dot(idle, rel_size),
                                              0.0)
            else:
                self.logger.warn("CPUStats: unsupported platform")
                return False
        except Exception:
            self.logger.exception("Cannot compute CPU stats")
            return False

Example 22

Project: esky
Source File: f_bbfreeze.py
View license
def freeze(dist):
    """Freeze the given distribution data using bbfreeze."""
    includes = dist.includes
    excludes = dist.excludes
    options = dist.freezer_options
    #  Merge in any encludes/excludes given in freezer_options
    for inc in options.pop("includes",()):
        includes.append(inc)
    for exc in options.pop("excludes",()):
        excludes.append(exc)
    if "pypy" not in includes and "pypy" not in excludes:
        excludes.append("pypy")
    #  Freeze up the given scripts
    f = bbfreeze.Freezer(dist.freeze_dir,includes=includes,excludes=excludes)
    for (nm,val) in options.iteritems():
        setattr(f,nm,val)
    f.addModule("esky")
    tdir = tempfile.mkdtemp()
    try:
        for exe in dist.get_executables():
            f.addScript(exe.script,gui_only=exe.gui_only)
        if "include_py" not in options:
            f.include_py = False
        if "linkmethod" not in options:
            #  Since we're going to zip it up, the benefits of hard-
            #  or sym-linking the loader exe will mostly be lost.
            f.linkmethod = "loader"
        f()
    finally:
        shutil.rmtree(tdir)
    #  Copy data files into the freeze dir
    for (src,dst) in dist.get_data_files():
        dst = os.path.join(dist.freeze_dir,dst)
        dstdir = os.path.dirname(dst)
        if not os.path.isdir(dstdir):
            dist.mkpath(dstdir)
        dist.copy_file(src,dst)
    #  Copy package data into the library.zip
    lib = zipfile.ZipFile(os.path.join(dist.freeze_dir,"library.zip"),"a")
    for (src,arcnm) in dist.get_package_data():
        lib.write(src,arcnm)
    lib.close()
    #  Create the bootstrap code, using custom code if specified.
    #  For win32 we include a special chainloader that can suck the selected
    #  version into the running process rather than spawn a new proc.
    code_source = ["__name__ = '__main__'"]
    esky_name = dist.distribution.get_name()
    code_source.append("__esky_name__ = %r" % (esky_name,))
    code_source.append(inspect.getsource(esky.bootstrap))
    if dist.compile_bootstrap_exes:
        if sys.platform == "win32":
            #  The pypy-compiled bootstrap exe will try to load a python env
            #  into its own process and run this "take2" code to bootstrap.
            take2_code = code_source[1:]
            take2_code.append(_CUSTOM_WIN32_CHAINLOADER)
            take2_code.append(dist.get_bootstrap_code())
            take2_code = compile("\n".join(take2_code),"<string>","exec")
            take2_code = marshal.dumps(take2_code)
            clscript = "import marshal; "
            clscript += "exec marshal.loads(%r); " % (take2_code,)
            clscript = clscript.replace("%","%%")
            clscript += "chainload(\"%s\")"
            #  Here's the actual source for the compiled bootstrap exe.
            from esky.bdist_esky import pypy_libpython
            code_source.append(inspect.getsource(pypy_libpython))
            code_source.append("_PYPY_CHAINLOADER_SCRIPT = %r" % (clscript,))
            code_source.append(_CUSTOM_PYPY_CHAINLOADER)
        code_source.append(dist.get_bootstrap_code())
        code_source = "\n".join(code_source)
        for exe in dist.get_executables(normalise=False):
            if not exe.include_in_bootstrap_env:
                continue
            bsexe = dist.compile_to_bootstrap_exe(exe,code_source)
            if sys.platform == "win32":
                fexe = os.path.join(dist.freeze_dir,exe.name)
                winres.copy_safe_resources(fexe,bsexe)
        #  We may also need the bundled MSVCRT libs
        if sys.platform == "win32":
            for nm in os.listdir(dist.freeze_dir):
                if is_core_dependency(nm) and nm.startswith("Microsoft"):
                    dist.copy_to_bootstrap_env(nm)
    else:
        if sys.platform == "win32":
            code_source.append(_CUSTOM_WIN32_CHAINLOADER)
        code_source.append(dist.get_bootstrap_code())
        code_source.append("bootstrap()")
        code_source = "\n".join(code_source)
        #  For non-compiled bootstrap exe, store the bootstrapping code
        #  into the library.zip as __main__.
        maincode = imp.get_magic() + struct.pack("<i",0)
        maincode += marshal.dumps(compile(code_source,"__main__.py","exec"))
        #  Create code for a fake esky.bootstrap module
        eskycode = imp.get_magic() + struct.pack("<i",0)
        eskycode += marshal.dumps(compile("","esky/__init__.py","exec"))
        eskybscode = imp.get_magic() + struct.pack("<i",0)
        eskybscode += marshal.dumps(compile("","esky/bootstrap.py","exec"))
        #  Store bootstrap code as __main__ in the bootstrap library.zip.
        #  The frozen library.zip might have the loader prepended to it, but
        #  that gets overwritten here.
        bslib_path = dist.copy_to_bootstrap_env("library.zip")
        bslib = zipfile.PyZipFile(bslib_path,"w",zipfile.ZIP_STORED)
        cdate = (2000,1,1,0,0,0)
        bslib.writestr(zipfile.ZipInfo("__main__.pyc",cdate),maincode)
        bslib.writestr(zipfile.ZipInfo("esky/__init__.pyc",cdate),eskycode)
        bslib.writestr(zipfile.ZipInfo("esky/bootstrap.pyc",cdate),eskybscode)
        bslib.close()
        #  Copy any core dependencies
        if "fcntl" not in sys.builtin_module_names:
            for nm in os.listdir(dist.freeze_dir):
                if nm.startswith("fcntl"):
                    dist.copy_to_bootstrap_env(nm)
        for nm in os.listdir(dist.freeze_dir):
            if is_core_dependency(nm):
                dist.copy_to_bootstrap_env(nm)
        #  Copy the bbfreeze interpreter if necessary
        if f.include_py:
            if sys.platform == "win32":
                dist.copy_to_bootstrap_env("py.exe")
            else:
                dist.copy_to_bootstrap_env("py")
        #  Copy the loader program for each script.
        #  We explicitly strip the loader binaries, in case they were made
        #  by linking to the library.zip.
        for exe in dist.get_executables(normalise=False):
            if not exe.include_in_bootstrap_env:
                continue
            exepath = dist.copy_to_bootstrap_env(exe.name)
            f.stripBinary(exepath)

Example 23

Project: dopey
Source File: mypaint.py
View license
def get_paths():
    join = os.path.join

    # Convert sys.argv to a list of unicode objects
    # (actually converting sys.argv confuses gtk, thus we add a new variable)
    if sys.platform == 'win32':
        sys.argv_unicode = win32_unicode_argv()
    else:
        sys.argv_unicode = [s.decode(sys.getfilesystemencoding())
                            for s in sys.argv]

    # Script and its location, in canonical absolute form
    scriptfile = os.path.abspath(os.path.normpath(sys.argv_unicode[0]))
    scriptdir = os.path.dirname(scriptfile)
    assert isinstance(scriptfile, unicode)
    assert isinstance(scriptdir, unicode)

    # Determine $prefix
    dir_install = scriptdir
    if os.path.basename(dir_install) == 'bin':
        # This is a normal POSIX installation.
        prefix = os.path.dirname(dir_install)
        assert isinstance(prefix, unicode)
        libpath = join(prefix, 'share', 'mypaint')
        libpath_compiled = join(prefix, 'lib', 'mypaint') # or lib64?
        sys.path.insert(0, libpath)
        sys.path.insert(0, libpath_compiled)
        localepath = join(prefix, 'share', 'locale')
        localepath_brushlib = localepath
        extradata = join(prefix, 'share')
    elif sys.platform == 'win32':
        prefix=None
        # this is py2exe point of view, all executables in root of installdir
        libpath = os.path.realpath(scriptdir)
        sys.path.insert(0, libpath)
        localepath = join(libpath, 'share', 'locale')
        localepath_brushlib = localepath
        extradata = join(libpath, 'share')
    else:
        # Not installed: run out of the source tree.
        prefix = None
        libpath = u'.'
        extradata = u'desktop'
        localepath = 'po'
        localepath_brushlib = 'brushlib/po'

    assert isinstance(libpath, unicode)

    try: # just for a nice error message
        from lib import mypaintlib
    except ImportError:
        logger.critical("We are not correctly installed or compiled!")
        logger.critical('script: %r', sys.argv[0])
        if prefix:
            logger.critical('deduced prefix: %r', prefix)
            logger.critical('lib_shared: %r', libpath)
            logger.critical('lib_compiled: %r', libpath_compiled)
        raise

    # Ensure that pyGTK compatibility is setup before anything else
    from gui import gtk2compat

    datapath = libpath
    if not os.path.isdir(join(datapath, 'brushes')):
        logger.critical('Default brush collection not found!')
        logger.critical('It should have been here: %r', datapath)
        sys.exit(1)

    # Old style config file and user data locations.
    # Return None if using XDG will be correct.
    if sys.platform == 'win32':
        old_confpath = None
    else:
        from lib import helpers
        homepath =  helpers.expanduser_unicode(u'~')
        old_confpath = join(homepath, '.mypaint/')

    if old_confpath:
        if not os.path.isdir(old_confpath):
            old_confpath = None
        else:
            logger.info("Found old-style configuration in %r", old_confpath)
            logger.info("This can be migrated to $XDG_CONFIG_HOME and "
                        "$XDG_DATA_HOME if you wish.")
            logger.info("See the XDG Base Directory Specification for info.")

    assert isinstance(old_confpath, unicode) or old_confpath is None
    assert isinstance(datapath, unicode)
    assert isinstance(extradata, unicode)

    return datapath, extradata, old_confpath, localepath, localepath_brushlib

Example 24

Project: sd-agent
Source File: unix.py
View license
    def check(self, agentConfig):
        if Platform.is_linux():
            try:
                with open('/proc/meminfo', 'r') as mem_info:
                    lines = mem_info.readlines()
            except Exception:
                self.logger.exception('Cannot get memory metrics from /proc/meminfo')
                return False

            # NOTE: not all of the stats below are present on all systems as
            # not all kernel versions report all of them.
            #
            # $ cat /proc/meminfo
            # MemTotal:        7995360 kB
            # MemFree:         1045120 kB
            # MemAvailable:    1253920 kB
            # Buffers:          226284 kB
            # Cached:           775516 kB
            # SwapCached:       248868 kB
            # Active:          1004816 kB
            # Inactive:        1011948 kB
            # Active(anon):     455152 kB
            # Inactive(anon):   584664 kB
            # Active(file):     549664 kB
            # Inactive(file):   427284 kB
            # Unevictable:     4392476 kB
            # Mlocked:         4392476 kB
            # SwapTotal:      11120632 kB
            # SwapFree:       10555044 kB
            # Dirty:              2948 kB
            # Writeback:             0 kB
            # AnonPages:       5203560 kB
            # Mapped:            50520 kB
            # Shmem:             10108 kB
            # Slab:             161300 kB
            # SReclaimable:     136108 kB
            # SUnreclaim:        25192 kB
            # KernelStack:        3160 kB
            # PageTables:        26776 kB
            # NFS_Unstable:          0 kB
            # Bounce:                0 kB
            # WritebackTmp:          0 kB
            # CommitLimit:    15118312 kB
            # Committed_AS:    6703508 kB
            # VmallocTotal:   34359738367 kB
            # VmallocUsed:      400668 kB
            # VmallocChunk:   34359329524 kB
            # HardwareCorrupted:     0 kB
            # HugePages_Total:       0
            # HugePages_Free:        0
            # HugePages_Rsvd:        0
            # HugePages_Surp:        0
            # Hugepagesize:       2048 kB
            # DirectMap4k:       10112 kB
            # DirectMap2M:     8243200 kB

            regexp = re.compile(r'^(\w+):\s+([0-9]+)')  # We run this several times so one-time compile now
            meminfo = {}

            for line in lines:
                try:
                    match = re.search(regexp, line)
                    if match is not None:
                        meminfo[match.group(1)] = match.group(2)
                except Exception:
                    self.logger.exception("Cannot parse /proc/meminfo")

            memData = {}

            # Physical memory
            # FIXME units are in MB, we should use bytes instead
            try:
                memData['physTotal'] = int(meminfo.get('MemTotal', 0)) / 1024
                memData['physFree'] = int(meminfo.get('MemFree', 0)) / 1024
                memData['physBuffers'] = int(meminfo.get('Buffers', 0)) / 1024
                memData['physCached'] = int(meminfo.get('Cached', 0)) / 1024
                memData['physShared'] = int(meminfo.get('Shmem', 0)) / 1024
                memData['physSlab'] = int(meminfo.get('Slab', 0)) / 1024
                memData['physPageTables'] = int(meminfo.get('PageTables', 0)) / 1024
                memData['physUsed'] = memData['physTotal'] - memData['physFree']

                if 'MemAvailable' in meminfo:
                    memData['physUsable'] = int(meminfo.get('MemAvailable', 0)) / 1024
                else:
                    # Usable is relative since cached and buffers are actually used to speed things up.
                    memData['physUsable'] = memData['physFree'] + memData['physBuffers'] + memData['physCached']

                if memData['physTotal'] > 0:
                    memData['physPctUsable'] = float(memData['physUsable']) / float(memData['physTotal'])
            except Exception:
                self.logger.exception('Cannot compute stats from /proc/meminfo')

            # Swap
            # FIXME units are in MB, we should use bytes instead
            try:
                memData['swapTotal'] = int(meminfo.get('SwapTotal', 0)) / 1024
                memData['swapFree'] = int(meminfo.get('SwapFree', 0)) / 1024
                memData['swapCached'] = int(meminfo.get('SwapCached', 0)) / 1024

                memData['swapUsed'] = memData['swapTotal'] - memData['swapFree']

                if memData['swapTotal'] > 0:
                    memData['swapPctFree'] = float(memData['swapFree']) / float(memData['swapTotal'])
            except Exception:
                self.logger.exception('Cannot compute swap stats')

            return memData

        elif sys.platform == 'darwin':
            if psutil is None:
                self.logger.error("psutil must be installed on MacOS to collect memory metrics")
                return False

            phys_memory = psutil.virtual_memory()
            swap = psutil.swap_memory()
            return {'physUsed': phys_memory.used / float(1024**2),
                'physFree': phys_memory.free / float(1024**2),
                'physUsable': phys_memory.available / float(1024**2),
                'physPctUsable': (100 - phys_memory.percent) / 100.0,
                'swapUsed': swap.used / float(1024**2),
                'swapFree': swap.free / float(1024**2)}

        elif sys.platform.startswith("freebsd"):
            try:
                phys_total, _, _ = get_subprocess_output(['sysctl', '-n', 'hw.physmem'], self.logger)
                output, _, _ = get_subprocess_output(['sysctl', 'vm.stats.vm'], self.logger)
                sysctl = output.splitlines()
            except Exception:
                self.logger.exception('getMemoryUsage')
                return False

            # ...
            # vm.stats.vm.v_page_size: 4096
            # vm.stats.vm.v_page_count: 759884
            # vm.stats.vm.v_wire_count: 122726
            # vm.stats.vm.v_active_count: 109350
            # vm.stats.vm.v_cache_count: 17437
            # vm.stats.vm.v_inactive_count: 479673
            # vm.stats.vm.v_free_count: 30542
            # ...

            # We run this several times so one-time compile now
            regexp = re.compile(r'^vm\.stats\.vm\.(\w+):\s+([0-9]+)')
            meminfo = {}

            for line in sysctl:
                try:
                    match = re.search(regexp, line)
                    if match is not None:
                        meminfo[match.group(1)] = match.group(2)
                except Exception:
                    self.logger.exception("Cannot parse sysctl vm.stats.vm output")

            memData = {}

            # Physical memory
            try:
                pageSize = int(meminfo.get('v_page_size'))

                memData['physTotal'] = int(phys_total.strip()) / 1048576
                memData['physFree'] = (int(meminfo.get('v_free_count', 0))
                                       * pageSize) / 1048576
                memData['physCached'] = (int(meminfo.get('v_cache_count', 0))
                                         * pageSize) / 1048576
                memData['physUsable'] = ((int(meminfo.get('v_free_count'), 0) +
                                          int(meminfo.get('v_cache_count', 0)) +
                                          int(meminfo.get('v_inactive_count', 0))) *
                                         pageSize) / 1048576
                memData['physUsed'] = memData['physTotal'] - memData['physUsable']

                if memData['physTotal'] > 0:
                    memData['physPctUsable'] = float(memData['physUsable']) / float(memData['physTotal'])
            except Exception:
                self.logger.exception('Cannot compute stats from /proc/meminfo')

            # Swap
            try:
                output, _, _ = get_subprocess_output(['swapinfo', '-m'], self.logger)
                sysctl = output.splitlines()
            except Exception:
                self.logger.exception('getMemoryUsage')
                return False

            # ...
            # Device          1M-blocks     Used    Avail Capacity
            # /dev/ad0s1b           570        0      570     0%
            # ...

            assert "Device" in sysctl[0]

            try:
                memData['swapTotal'] = 0
                memData['swapFree'] = 0
                memData['swapUsed'] = 0
                for line in sysctl[1:]:
                    if len(line) > 0:
                        line = line.split()
                        memData['swapTotal'] += int(line[1])
                        memData['swapFree'] += int(line[3])
                        memData['swapUsed'] += int(line[2])
            except Exception:
                self.logger.exception('Cannot compute stats from swapinfo')

            return memData
        elif sys.platform == 'sunos5':
            try:
                memData = {}
                cmd = ["kstat", "-m", "memory_cap", "-c", "zone_memory_cap", "-p"]
                output, _, _ = get_subprocess_output(cmd, self.logger)
                kmem = output.splitlines()

                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:anon_alloc_fail   0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:anonpgin  0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:class     zone_memory_cap
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:crtime    16359935.0680834
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:execpgin  185
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:fspgin    2556
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:n_pf_throttle     0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:n_pf_throttle_usec        0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:nover     0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:pagedout  0
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:pgpgin    2741
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:physcap   536870912  <--
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:rss       115544064  <--
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:snaptime  16787393.9439095
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:swap      91828224   <--
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:swapcap   1073741824 <--
                # memory_cap:360:53aa9b7e-48ba-4152-a52b-a6368c:zonename  53aa9b7e-48ba-4152-a52b-a6368c3d9e7c

                # turn memory_cap:360:zone_name:key value
                # into { "key": value, ...}
                kv = [l.strip().split() for l in kmem if len(l) > 0]
                entries = dict([(k.split(":")[-1], v) for (k, v) in kv])
                # extract rss, physcap, swap, swapcap, turn into MB
                convert = lambda v: int(long(v))/2**20
                memData["physTotal"] = convert(entries["physcap"])
                memData["physUsed"] = convert(entries["rss"])
                memData["physFree"] = memData["physTotal"] - memData["physUsed"]
                memData["swapTotal"] = convert(entries["swapcap"])
                memData["swapUsed"] = convert(entries["swap"])
                memData["swapFree"] = memData["swapTotal"] - memData["swapUsed"]

                if memData['swapTotal'] > 0:
                    memData['swapPctFree'] = float(memData['swapFree']) / float(memData['swapTotal'])
                return memData
            except Exception:
                self.logger.exception("Cannot compute mem stats from kstat -c zone_memory_cap")
                return False
        else:
            return False

Example 25

Project: Devede
Source File: devede_subtitles.py
View license
	def __init__(self,videofile,filename,filefolder,progresbar,proglabel,disctype,title,chapter,stream):

		""" This class adds the subtitles to an already converted file

		VIDEOFILE contains the parameters to convert the video
		FILENAME is the generic file name given by the user
		FILEFOLDER is the path where all the temporary and finall files will be created
		PROGRESBAR is the progress bar where the class will show the progress
		PROGLABEL is the label where the class will show what is it doing
		DISCTYPE can be dvd, vcd, svcd, cvd or divx
		TITLE and CHAPTER are the numbers used to identify the TITLE and CHAPTER number for this file
		STREAMS is the stream number (to allow to add several subtitles)
		"""

		devede_executor.executor.__init__(self,filename,filefolder,progresbar)
		progresbar.pulse()
		proglabel.set_text(_("Adding subtitles to")+"\n"+videofile["filename"])
		self.currentfile=self.create_filename(filefolder+filename,title,chapter,disctype=="divx")

		subtitle_list=videofile["sub_list"][stream]

		# generate the XML file

		self.error=""

		try:
			print "Trying to create "+filefolder+filename+"_sub.xml"
			fichero=open(filefolder+filename+"_sub.xml","w")
		except IOError:
			print "IOError en subtitulos"
			self.print_error=_("Failed to write to the destination directory.\nCheck that you have privileges and free space there.")
			self.initerror=True
			return
		
		fichero.write('<subpictures format="')
		if (videofile["fps"]==25) :
			fichero.write('PAL')
		else:
			fichero.write('NTSC')
		fichero.write('">\n\t<stream>')
		if (subtitle_list["sub_codepage"]!="UTF-8"):
			final_type="UTF-8"
			subfilename=os.path.join(filefolder,filename+"_sub_tmp.sub")
			self.deletesub=subfilename
			if 0!=devede_other.check_utf().convert_to_UTF8(subtitle_list["subtitles"],subfilename,subtitle_list["sub_codepage"]):
				#except IOError:
				print "IOError al convertir a UTF8"
				self.print_error=_("Failed to write to the destination directory.\nCheck that you have privileges and free space there.")
				self.initerror=True
				return
		else:
			self.deletesub=""
			final_type=subtitle_list["sub_codepage"]
			subfilename=subtitle_list["subtitles"]
		fichero.write('\n\t\t<textsub filename="'+self.expand_xml(subfilename)+'"')

		if (sys.platform=="win32") or (sys.platform=="win64"):
			if os.path.isfile(os.path.join(os.environ["WINDIR"],"Fonts","devedesans.ttf")):
				fichero.write('\n\t\tfont="devedesans.ttf"')
			else:
				fichero.write('\n\t\tfont="ARIAL.ttf"')
		else:
			fichero.write('\n\t\tfont="devedesans.ttf"')
		if ((subtitle_list["sub_codepage"]!="") and (subtitle_list["sub_codepage"]!="ASCII")):
			fichero.write('\n\t\tcharacterset="'+final_type+'"')
		fichero.write('\n\t\thorizontal-alignment="center"')

		if (videofile["fps"]==25):
			ancho=716
			alto=572
			tamanofont=videofile["subfont_size"]
		else:
			ancho=716
			alto=476
			tamanofont=videofile["subfont_size"]

		margin_hor=int((58*ancho)/720)
		margin_vert=int((28*alto)/576)
		bottom_margin=margin_vert

		fichero.write('\n\t\tmovie-width="'+str(ancho-4)+'"')
		fichero.write('\n\t\tmovie-height="'+str(alto-4)+'"')
		fichero.write('\n\t\tleft-margin="'+str(margin_hor)+'"')
		fichero.write('\n\t\tright-margin="'+str(margin_hor)+'"')

		if subtitle_list["subtitles_up"]:
			tamanofont-=1
			bottom_margin=4+(alto/8) # put it in the border of 16:9 aspect ratio

		fichero.write('\n\t\tbottom-margin="'+str(bottom_margin)+'"')
		fichero.write('\n\t\ttop-margin="'+str(margin_vert)+'"')

		fichero.write('\n\t\tfontsize="'+str(tamanofont)+'.0"')

		fill_color = 'rgba(%.0f%%, %.0f%%, %.0f%%, %.0f%%)' % tuple(
			float(item)/655.35 for item in videofile["sub_fill_color"]
		)
		fichero.write('\n\t\tfill-color="%s"' % fill_color)

		outline_color = 'rgba(%.0f%%, %.0f%%, %.0f%%, %.0f%%)' % tuple(
			float(item)/655.35 for item in videofile["sub_outline_color"]
		)
		fichero.write('\n\t\toutline-color="%s"' % outline_color)

		if (videofile["fps"]==30):
			if (videofile["ofps"]==24) and ((disctype=="dvd") or (disctype=="divx")):
				fps_out="24000/1001"
			else:
				fps_out="30000/1001"
		else:
			fps_out="25"

		if videofile["ismpeg"]:
			fps_out=videofile["ofps2"]
			print "FPS sub 1 original"
		else:
			print "FPS sub 1 final"

		fichero.write('\n\t\tmovie-fps="'+str(fps_out)+'"')
		speed1,speed2=devede_other.get_speedup(videofile)
		if speed1==speed2:
			fps_out_subs=fps_out
			print "FPS sub 2 final"
		else:
			if speed2==24:
				fps_out_subs="24"
			else:
				fps_out_subs=videofile["ofps2"]
			print "FPS sub 2 original"

		fichero.write('\n\t\tsubtitle-fps="'+fps_out_subs+'"')
		fichero.write('\n\t\tvertical-alignment="bottom" />')
		fichero.write("\n\t</stream>\n</subpictures>")
		fichero.close()
		
		comando=""
		if (sys.platform=="win32") or (sys.platform=="win64"):
			comando=["spumux.exe"]
			comando.append("-m")
			if disctype=="vcd":
				comando.append("svcd")
			else:
				comando.append(disctype)
			comando.append("-s")
			comando.append(str(stream))
			
			comando.append(filefolder+filename+"_sub.xml")
			comando.append("-i")
			comando.append(self.currentfile)
			comando.append("-o")
			comando.append(self.currentfile+".sub")
			self.print_error=_("Conversion failed.\nIt seems a bug of SPUMUX.")
			self.launch_program(comando,output=True)
		else:
			comando="spumux -m "
			if disctype=="vcd":
				comando+="svcd"
			else:
				comando+=disctype
		
			comando+=' -s '+str(stream)+' "'+filefolder+filename+'_sub.xml"'
		
			self.print_error=_("Conversion failed.\nIt seems a bug of SPUMUX.")
			self.launch_shell(comando,output=True,stdinout=[self.currentfile,self.currentfile+".sub"])

Example 26

Project: peach
Source File: agent.py
View license
    def __init__(self, agentUri, password, pythonPaths=None, imports=None, configs=None):
        """
        Creates and Agent instance and attempts to connect to the AgentMaster.
        If connection works the Client Hello message is sent.

        @type	agentUri: string
        @param	agentUri: Url of agent
        @type	password: string
        @param	password: [optional] Password to authenticate to agent.  Warning: CLEAR-TEXT!!
        @type	pythonPaths: list
        @param	pythonPaths: List of paths we should configure on the remote agent
        @type	imports: list
        @param	imports: list of imports that should be performed on the remote agent
        """

        self._pythonPaths = pythonPaths
        self._imports = imports
        self._password = password
        self._monitors = []
        self._id = None
        self._agent = None
        self._agentUri = agentUri

        agentUrl = urlparse(agentUri)
        agentPort = agentUrl.port
        agentHostname = agentUrl.hostname

        # This is nicer, but does not work on darwin:
        #if socket.getfqdn(agentHostname) in ('localhost', socket.gethostname()):
        if agentHostname in ("127.0.0.1", "0.0.0.0", "localhost", socket.gethostname()):
            peachPath = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
            macros = []
            if configs:
                macros.append("-macros")
                for kv in configs.iteritems():
                    macros.append("=".join(kv))
            if sys.platform == "win32":
                agentProcess = subprocess.call(['start',
                                                "Peach Agent",
                                                sys.executable,
                                                "%s\peach.py" % peachPath,
                                                "-agent", str(agentPort), password] + macros)
                agentProcesses.append(agentProcess)
            elif sys.platform == "darwin":
                if not self.isPeachRunning(agentPort):
                    peachAgentCommand = [sys.executable, 'peach.py', '-agent', str(agentPort), password] + macros
                    if configs and getBooleanAttribute(configs, "HideAgentWindow"):
                        logging.warning("Agent window will not be visible!")
                        agentProcess = subprocess.Popen(peachAgentCommand,
                                                        cwd=peachPath,
                                                        stdout=open('/dev/null', 'w'))
                        agentProcesses.append(agentProcess)
                    else:
                        logging.info("Opening agent in new terminal.")
                        osxTerminalCommand = \
                            """osascript -e 'tell application "Terminal" to do script "cd %s; %s; exit"'""" % \
                            (peachPath, re.sub(r"""(['"])""", r"\\\1", subprocess.list2cmdline(peachAgentCommand)))
                        agentProcess = subprocess.Popen(osxTerminalCommand,
                                                        stdout=subprocess.PIPE,
                                                        shell=True)
                        agentProcesses.append(agentProcess)
            elif sys.platform == "linux2":
                if not self.isPeachRunning(agentPort):
                    logging.info("Opening agent in new terminal.")
                    peachAgentCommand = [sys.executable, "peach.py", "-agent", str(agentPort), password] + macros
                    if "COLORTERM" in os.environ and 'gnome-terminal' in os.environ["COLORTERM"]:
                        linuxTerminalCommands = ['gnome-terminal', '-x'] + peachAgentCommand
                    else:
                        linuxTerminalCommands = ['xterm', '-hold']
                        if configs and getBooleanAttribute(configs, "AgentTerminalLogging"):
                            linuxTerminalCommands += ["-l"]
                        linuxTerminalCommands += ['-e'] + peachAgentCommand
                    agentProcess = subprocess.Popen(linuxTerminalCommands,
                                                    cwd=peachPath,
                                                    stdout=subprocess.PIPE)
                    agentProcesses.append(agentProcess)
            else:
                raise PeachException("We only support auto starting agents on Windows, OSX and Linux. "
                                     "Please configure all agents with location URIs and launch any Agent manually.")

        # Connect to remote agent
        try:
            m = re.search(r"://([^/]*)", agentUri)
            self._name = m.group(1)
        except:
            raise PeachException("Please make sure your agent location string is a valid http URL.")
        self.Connect()

Example 27

Project: sd-agent
Source File: sd_cpu_stats.py
View license
    def check(self, instance):
        #self.log.debug('hello')
        ##self.gauge('serverdensity.disk.free', 1)
        #self.gauge('serverdensity.disk.free', 1, device_name="/")
        #self.gauge('serverdensity.disk.free', 2, device_name="/var")
        #self.gauge('serverdensity.disk.free', 3, device_name="/home")
        #self.log.debug('hello2')

        def get_value(legend, data, name, filter_value=None):
            "Using the legend and a metric name, get the value or None from the data line"
            if name in legend:
                value = data[legend.index(name)]
                if filter_value is not None:
                    if value > filter_value:
                        return None
                return value

            else:
                # FIXME return a float or False, would trigger type error if not python
                self.log.debug("Cannot extract cpu value %s from %s (%s)" % (name, data, legend))
                return 0.0

        self.log.debug('getCPUStats: start')

        cpu_stats = {}

        if sys.platform == 'linux2':
            self.log.debug('getCPUStats: linux2')

            headerRegexp = re.compile(r'.*?([%][a-zA-Z0-9]+)[\s+]?')
            itemRegexp = re.compile(r'.*?\s+(\d+)[\s+]?')
            valueRegexp = re.compile(r'\d+\.\d+')
            proc = None
            try:
                proc = subprocess.Popen(['mpstat', '-P', 'ALL', '1', '1'], stdout=subprocess.PIPE, close_fds=True)
                stats = proc.communicate()[0]

                if int(pythonVersion[1]) >= 6:
                    try:
                        proc.kill()
                    except Exception:
                        self.log.debug('Process already terminated')

                stats = stats.split('\n')
                header = stats[2]
                headerNames = re.findall(headerRegexp, header)
                device = None

                for statsIndex in range(3, len(stats)):
                    row = stats[statsIndex]

                    if not row:  # skip the averages
                        break

                    deviceMatch = re.match(itemRegexp, row)

                    if string.find(row, 'all') is not -1:
                        device = 'ALL'
                    elif deviceMatch is not None:
                        device = 'CPU%s' % deviceMatch.groups()[0]

                    values = re.findall(valueRegexp, row.replace(',', '.'))

                    cpu_stats[device] = {}
                    for headerIndex in range(0, len(headerNames)):
                        headerName = headerNames[headerIndex]
                        cpu_stats[device][headerName] = values[headerIndex]
                        key = headerName.replace('%', '')
                        self.gauge('serverdensity.cpu.{0}'.format(key), float(values[headerIndex]), device_name=device)

            except OSError:
                # we dont have it installed return nothing
                return False

            except Exception:
                import traceback
                self.log.error("getCPUStats: exception = %s", traceback.format_exc())

                if int(pythonVersion[1]) >= 6:
                    try:
                        if proc is not None:
                            proc.kill()
                    except UnboundLocalError:
                        self.log.debug('Process already terminated')
                    except Exception:
                        self.log.debug('Process already terminated')

                return False

        elif sys.platform == 'darwin':
            self.log.debug('getCPUStats: darwin')

            try:
                proc = subprocess.Popen(['sar', '-u', '1', '2'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
                stats = proc.communicate()[0]

                itemRegexp = re.compile(r'\s+(\d+)[\s+]?')
                titleRegexp = re.compile(r'.*?([%][a-zA-Z0-9]+)[\s+]?')
                titles = []
                values = []
                for line in stats.split('\n'):
                    # top line with the titles in
                    if '%' in line:
                        titles = re.findall(titleRegexp, line)
                    if line and line.startswith('Average:'):
                        values = re.findall(itemRegexp, line)

                if values and titles:
                    cpu_stats['ALL'] = dict(zip(titles, values))
                    for headerIndex in range(0, len(titles)):
                        key = titles[headerIndex].replace('%', '')
                        self.gauge('serverdensity.cpu.{0}'.format(key), float(values[headerIndex]), device_name='ALL')

            except Exception:
                import traceback
                self.log.error('getCPUStats: exception = %s', traceback.format_exc())
                return False

        elif sys.platform.startswith("freebsd"):
            # generate 3 seconds of data
            # tty            ada0              cd0            pass0             cpu
            # tin  tout  KB/t tps  MB/s   KB/t tps  MB/s   KB/t tps  MB/s  us ni sy in id
            # 0    69 26.71   0  0.01   0.00   0  0.00   0.00   0  0.00   2  0  0  1 97
            # 0    78  0.00   0  0.00   0.00   0  0.00   0.00   0  0.00   0  0  0  0 100
            iostats, _, _ = get_subprocess_output(['iostat', '-w', '3', '-c', '2'], self.log)
            lines = [l for l in iostats.splitlines() if len(l) > 0]
            legend = [l for l in lines if "us" in l]
            if len(legend) == 1:
                headers = legend[0].split()
                data = lines[-1].split()
                cpu_user = get_value(headers, data, "us")
                cpu_nice = get_value(headers, data, "ni")
                cpu_sys = get_value(headers, data, "sy")
                cpu_intr = get_value(headers, data, "in")
                cpu_idle = get_value(headers, data, "id")
                self.gauge('serverdensity.cpu.usr', float(cpu_user), device_name='ALL')
                self.gauge('serverdensity.cpu.nice', float(cpu_nice), device_name='ALL')
                self.gauge('serverdensity.cpu.sys', float(cpu_sys), device_name='ALL')
                self.gauge('serverdensity.cpu.irq', float(cpu_intr), device_name='ALL')
                self.gauge('serverdensity.cpu.idle', float(cpu_idle), device_name='ALL')
                cpu_stats['ALL'] = {
                    'usr': cpu_user,
                    'nice': cpu_nice,
                    'sys': cpu_sys,
                    'irq': cpu_intr,
                    'idle': cpu_idle,
                }

            else:
                self.logger.warn("Expected to get at least 4 lines of data from iostat instead of just " + str(iostats[:max(80, len(iostats))]))
                return False

        else:
            self.log.debug('getCPUStats: unsupported platform')
            return False

        self.log.debug('getCPUStats: completed, returning')
        return {'cpuStats': cpu_stats}

Example 28

Project: pybfd
Source File: setup.py
View license
    def build_extensions(self):
        """Compile the python extension module for further installation."""
        global final_supported_archs

        ext_extra_objects = []
        ext_libs = []
        ext_libs_dir = []
        ext_includes = []

        self.platform = CustomBuildExtension.PLATFORMS.get( sys.platform, None )
        if self.platform == None:
            raise Exception("unsupported platform: %s" % sys.platform)

        if self.with_static_binutils: # the user has specified a custom binutils distro.
            print "[+] Using specific binutils static distribution"
            print "[+]   %s" % self.with_static_binutils
            self.platform["libs"] = [os.path.join( self.with_static_binutils, "lib"),]
            self.platform["includes"] = [os.path.join( self.with_static_binutils, "include"),]
            self.platform["possible-lib-ext"] = [".a",] # for all unix platforms.

        # check for known includes
        for inc in self.platform["includes"]:
            if self.check_includes(inc):
                self.includes = inc # found a valid include dir with bintuils
                break
        if self.includes == None:
            raise Exception("unable to determine correct include path for bfd.h / dis-asm.h")

        print "[+] Using binutils headers at:"
        print "[+]   %s" % self.includes

        # we'll use this include path for building.
        ext_includes = [self.includes, ]

        # Try to guess libopcodes / libbfd libs.
        libs_dirs = self.platform["libs"]
        print "[+] Searching binutils libraries..."
        for libdir in libs_dirs:
            for possible_lib_ext in self.platform["possible-lib-ext"]:
                libs = self.find_binutils_libs(libdir, possible_lib_ext)
                if libs:
                    if self.libs:
                        self.libs = self.libs + libs
                    else:
                        self.libs = libs
                    break

        if self.libs == None:
            raise Exception("unable to find binutils libraries.")

        for lib in self.libs:
            print "[+]   %s" % lib
        #
        # check for libopcodes / libbfd
        #
        libnames = [os.path.basename(lib) for lib in self.libs]
        libraries_paths = [os.path.dirname(lib) for lib in self.libs]
        libraries_paths = list(set(libraries_paths))  # removing duplicates
        if not all( [lib.startswith("libopcodes") or lib.startswith("libbfd") for lib in libnames] ):
            raise Exception("missing expected library (libopcodes / libbfd) in %s." % "\n".join(libraries_paths))

        ext_libs_dir += libraries_paths

        if self.with_static_binutils:
            # use libs as extra objects...
            ext_extra_objects.extend( self.libs )
        else:
            ext_libs = [self.prepare_libs_for_cc(os.path.basename(lib)) for lib in self.libs]

        # add dependecy to libiberty
        if self.with_static_binutils or sys.platform == "darwin": # in OSX we always needs a static lib-iverty.

            lib_liberty_partialpath = libraries_paths
            if sys.platform == "darwin": # in osx the lib-iberty is prefixe by "machine" ppc/i386/x86_64
                lib_liberty_partialpath.append( self._darwin_current_arch() )
            lib_liberty_partialpath.append( "libiberty.a" )

            lib_liberty_fullpath = os.path.join(*lib_liberty_partialpath ) # merge the prefix and the path
            if not os.path.isfile(lib_liberty_fullpath):
                raise Exception("missing expected library (libiberty) in %s." % "\n".join(libraries_paths))
            ext_extra_objects.append(lib_liberty_fullpath)

        # generate .py / .h files that depends of libopcodes / libbfd currently selected
        final_supported_archs = self.generate_source_files()

        # final hacks for OSX
        if sys.platform == "darwin":
            # fix arch value.
            os.environ["ARCHFLAGS"] = "-arch %s" % self._darwin_current_arch()
            # In OSX we've to link against libintl.
            ext_libs.append("intl")

            # TODO: we have to improve the detection of gettext/libintl in OSX.. this is a quick fix.
            dirs = [
                "/usr/local/opt/gettext/lib", # homebrew
                "/opt/local/lib" # macports
            ]
            for d in dirs:
                if os.path.exists(d):
                    ext_libs_dir.append(d)

        # fix extensions.
        for extension in self.extensions:
            extension.include_dirs.extend( ext_includes )
            extension.extra_objects.extend( ext_extra_objects )
            extension.libraries.extend( ext_libs )
            extension.library_dirs.extend( ext_libs_dir )

        return build_ext.build_extensions(self)

Example 29

Project: parlparse
Source File: runfilters.py
View license
def RunFilterFile(FILTERfunction, xprev, sdate, sdatever, dname, jfin, patchfile, jfout, bquietc):
    # now apply patches and parse
    patchtempfilename = tempfile.mktemp("", "pw-applypatchtemp-", miscfuncs.tmppath)

    if not bquietc:
        print "reading " + jfin

    # apply patch filter
    kfin = jfin
    if os.path.isfile(patchfile) and ApplyPatches(jfin, patchtempfilename, patchfile):
        kfin = patchtempfilename

    # read the text of the file
    ofin = open(kfin)
    text = ofin.read()
    ofin.close()

    # do the filtering according to the type.  Some stuff is being inlined here
    if dname == 'regmem' or dname == 'votes' or dname == 'ni':
        regmemout = open(tempfilename, 'w')
        try:
            FILTERfunction(regmemout, text, sdate, sdatever)  # totally different filter function format
        finally:
            regmemout.close()
        # in win32 this function leaves the file open and stops it being renamed
        if sys.platform != "win32":
            xmlvalidate.parse(tempfilename) # validate XML before renaming
        if os.path.isfile(jfout):
            os.remove(jfout)
        os.rename(tempfilename, jfout)
        return

    safejfout = jfout
    assert dname in ('wrans', 'debates', 'wms', 'westminhall', 'lordspages')

    decode_from_utf8 = False
    if sdate > '2014-01-01' or (sdate > '2006-05-07' and re.search('<notus-date', text)):
        decode_from_utf8 = True
        text = re.sub("\n", ' ', text)
        text = re.sub("\s{2,}", ' ', text) # No need for multiple spaces anywhere
        text = re.sub("</?notus-date[^>]*>", "", text)
        text = re.sub("\s*<meta[^>]*>\s*", "", text)
        text = re.sub('(<h5 align="left">)((?:<a name="(.*?)">)*)', r"\2\1", text) # If you can't beat them, ...
        text = re.sub("(<br><b>[^:<]*:\s*column\s*\d+(?:WH)?\s*</b>)(\s+)(?i)", r"\1<br>\2", text)
        text = re.sub("(\s+)(<b>[^:<]*:\s*column\s*\d+(?:WH)?\s*</b><br>)(?i)", r"\1<br>\2", text)

        # Make sure correction is before written answer question number
        text = re.sub('(<a href="[^"]*corrtext[^"]*")\s*shape="rect">\s*(.*?)\s*(</a>)', r'\1>\2\3', text)
        text = re.sub('(\[\d+\])\s*((?:</p>)?)\s*(<a href="[^"]*corrtext[^"]*">.*?</a>)', r'\3 \1\2', text)

        # Fix new thing where they sometimes put (a), (b) of wrans, or "Official Report", in separate paragraphs
        # Two regular expressions, so as not to lose needed end </p> of a column heading.
        italic_para = '\s*<p>\s*(<i>\s*(?:\(.\)|Official Report,?)\s*</i>)\s*</p>\s*'
        text = re.sub('(?<!</b>)</p>' + italic_para + '<p[^>]*>', r' \1 ', text)
        text = re.sub('(?<=</b></p>)' + italic_para + '<p[^>]*>', r' \1 ', text)

        # May also need the same thing with a space, and look behind requires a fixed width pattern.
        text = re.sub('(?<!</b>) </p>' + italic_para + '<p[^>]*>', r' \1 ', text)
        text = re.sub('(?<=</b> </p>)' + italic_para + '<p[^>]*>', r' \1 ', text)
                
        # Don't want bad XHTML self closed table cells.
        text = re.sub('<td([^>]*) ?/>', r'<td\1></td>', text)
        # Or pointless empty headings
        text = re.sub('<h[45] align="[^"]*" ?/>', '', text)

        # Lords, big overall replacements
        text = text.replace('<br></br>', '<br>')
        text = text.replace('<br/>', '<br>')
        if dname == 'lordspages':
            text = re.sub(' shape="rect">', '>', text)
            text = re.sub(' class="anchor"', '', text)
            text = re.sub(' class="anchor noCont"', '', text)
            text = re.sub(' class="anchor-column"', '', text)
            text = re.sub(' class="columnNum"', '', text)
            text = re.sub('(<a[^>]*>) (</a>)', r'\1\2', text)
            text = re.sub('(<h5>)((?:<a name="(.*?)">(?:</a>)?)*)', r"\2\1", text) # If you can't beat them, ...
            text = re.sub('<columnNum><br />( |\xc2\xa0)<br />', '<br>&nbsp;<br>', text)
            text = re.sub('<br />( |\xc2\xa0)<br /></columnNum>', '<br>&nbsp;<br>', text)
            text = text.replace('<b align="center">', '<b>')
            text = text.replace('<br />', '<br>')
            text = text.replace('CONTENTS', 'CONTENTS\n')
            text = re.sub('</?small>', '', text)
            text = re.sub('<div class="amendment(?:_heading)?">', '', text)
            text = re.sub('</?div>', '', text)
            # Double bolding sometimes has some <a> tags in between
            text = re.sub(r'<b>((?:</?a[^>]*>|\s)*)<b>', r'\1<b>', text)
            text = re.sub('</b></b>', '</b>', text)
            text = re.sub('</b><b>', '', text)
            text = re.sub('<I></I>', '', text)

    # Changes in 2008-09 session
    if sdate>'2008-12-01' and dname=='lordspages':
        text = re.sub('(?i)Asked By (<b>.*?)</b>', r'\1:</b>', text)
        text = re.sub('(?i)((?:Moved|Tabled) By) ?((?:<a name="[^"]*"></a>)*)<b>(.*?)</b>', r'\1 \2\3', text)
        text = re.sub('(?i)(Moved on .*? by) ?<b>(.*?)</b>', r'\1 \2', text)

    if decode_from_utf8:
        # Some UTF-8 gets post-processed into nonsense
        # XXX - should probably be in miscfuncs.py/StraightenHTMLrecurse with other character set evil
        text = text.replace("\xe2\x22\xa2", "&trade;")
        text = text.replace("\xc2(c)", "&copy;")
        text = text.replace("\xc2(r)", "&reg;")
        text = text.replace("\xc21/4", "&frac14;")
        text = text.replace("\xc21/2", "&frac12;")
        text = text.replace("\xc23/4", "&frac34;")
        text = text.replace("\xc3\"", "&#279;")
        text = text.replace("\xc3 ", "&agrave;")
        text = text.replace("\xc3(c)", "&eacute;")
        text = text.replace("\xc3(r)", "&icirc;")
        text = text.replace("\xc31/4", "&uuml;")
        # And it's true UTF-8 since the start of the 2009 session, let's pretend it isn't.
        try:
            text = text.decode('utf-8').encode('ascii', 'xmlcharrefreplace')
        except:
            print "Failed to decode text from utf-8"
            pass

    # They've started double bolding names, parts of names, splitting names up, and having a "[" on its own
    if sdate >= '2013-01-01':
        text = re.sub(r'</b>(\s*)<b>', r'\1', text)
        # <b> <b>Name</b> (Constituency) (Party):</b>
        text = re.sub('<b>\s*<b>([^<]*)</b>([^<]*)</b>', r'<b>\1\2</b>', text)
        # <b><b>Name bits:</b></b>
        text = re.sub('<b>\s*(<b>([^<]|<i>\s*\(Urgent Question\)\s*</i>)*</b>\s*)</b>', r'\1', text)
        # <p> <b>[</b> </p> <p> <b>TIME</b> </p>
        text = re.sub('<p>\s*<b>\[</b>\s*</p>\s*<p>\s*<b>([^<]*)</b>\s*</p>', r'<p> <b>[\1</b> </p>', text)
        # And have changed <center> to <span class="centred">
        text = re.sub('<span class="centred">(.*?)</span>', r'<center>\1</center>', text)

    if sdate >= '2015-10-12':
        # annoying double <b> round members rose text
        text = re.sub(r'<b><b>Several hon. Members </b>', '<b>Several hon. Members ', text)

    if sdate >= '2016-01-01':
        # Deal with big heading spotting aname appearing AFTER heading
        text = re.sub('(<h3(?:(?!<h3).)*?)(<a name="ordayhd_\d">)', r'\2\1', text)

    (flatb, gidname) = FILTERfunction(text, sdate)
    for i in range(len(gidname)):
        tempfilenameoldxml = None

        gidnam = gidname[i]
        if gidname[i] == 'lordswms':
            gidnam = 'wms'
        if gidname[i] == 'lordswrans':
            gidnam = 'wrans'
        CreateGIDs(gidnam, sdate, sdatever, flatb[i])
        jfout = safejfout
        if gidname[i] != 'lords':
            jfout = re.sub('(daylord|lordspages)', gidname[i], jfout)

        # wrans case is special, with its question-id numbered gids
        if dname == 'wrans':
            majblocks = CreateWransGIDs(flatb[i], (sdate + sdatever)) # combine the date and datever.  the old style gids stand on the paragraphs still
            bMakeOldWransGidsToNew = (sdate < "2005")

        fout = open(tempfilename, "w")
        WriteXMLHeader(fout);
        fout.write('<publicwhip scrapeversion="%s" latest="yes">\n' % sdatever)

        # go through and output all the records into the file
        if dname == 'wrans':
            for majblock in majblocks:
                WriteXMLspeechrecord(fout, majblock[0], bMakeOldWransGidsToNew, True)
                for qblock in majblock[1]:
                    qblock.WriteXMLrecords(fout, bMakeOldWransGidsToNew)
        else:
            for qb in flatb[i]:
                WriteXMLspeechrecord(fout, qb, False, False)
        fout.write("</publicwhip>\n\n")
        fout.close()

        # load in a previous file and over-write it if necessary
        if xprev:
            xprevin = xprev[0]
            if gidname[i] != 'lords':
                xprevin = re.sub('(daylord|lordspages)', gidname[i], xprevin)
            if os.path.isfile(xprevin):
                xin = open(xprevin, "r")
                xprevs = xin.read()
                xin.close()

                # separate out the scrape versions
                mpw = re.search('<publicwhip([^>]*)>\n([\s\S]*?)</publicwhip>', xprevs)
                if mpw.group(1):
                    re.match(' scrapeversion="([^"]*)" latest="yes"', mpw.group(1)).group(1) == xprev[1]
                # else it's old style xml files that had no scrapeversion or latest attributes
                if dname == 'wrans':
                    xprevcompress = FactorChangesWrans(majblocks, mpw.group(2))
                else:
                    xprevcompress = FactorChanges(flatb[i], mpw.group(2))

                tempfilenameoldxml = tempfile.mktemp(".xml", "pw-filtertempold-", miscfuncs.tmppath)
                foout = open(tempfilenameoldxml, "w")
                WriteXMLHeader(foout)
                foout.write('<publicwhip scrapeversion="%s" latest="no">\n' % xprev[1])
                foout.writelines(xprevcompress)
                foout.write("</publicwhip>\n\n")
                foout.close()

        # in win32 this function leaves the file open and stops it being renamed
        if sys.platform != "win32":
            xmlvalidate.parse(tempfilename) # validate XML before renaming

        # in case of error, an exception is thrown, so this line would not be reached
        # we rename both files (the old and new xml) at once

        if os.path.isfile(jfout):
            os.remove(jfout)
        if not os.path.isdir(os.path.dirname(jfout)):  # Lords output directories need making here
            os.mkdir(os.path.dirname(jfout))
        os.rename(tempfilename, jfout)

        # copy over onto old xml file
        if tempfilenameoldxml:
            if sys.platform != "win32":
                xmlvalidate.parse(tempfilenameoldxml) # validate XML before renaming
            assert os.path.isfile(xprevin)
            os.remove(xprevin)
            os.rename(tempfilenameoldxml, xprevin)

Example 30

Project: OpenCobolIDE
Source File: preferences.py
View license
    def reset(self, all_tabs=False):
        settings = Settings()
        # Editor
        if self.tabWidget.currentIndex() == 0 or all_tabs:
            self.cb_cursor_pos_in_bytes.setChecked(
                settings.show_cursor_pos_in_bytes)
            self.checkBoxShowErrors.setChecked(settings.show_errors)
            self.checkBoxViewLineNumber.setChecked(settings.display_lines)
            self.checkBoxHighlightCurrentLine.setChecked(
                settings.highlight_caret)
            self.checkBoxHighlightWhitespaces.setChecked(
                settings.show_whitespaces)
            self.spinBoxEditorTabLen.setValue(settings.tab_len)
            self.checkBoxEditorAutoIndent.setChecked(
                settings.enable_autoindent)
            self.spinBoxEditorCCTriggerLen.setValue(
                settings.code_completion_trigger_len)
            self.rbLowerCaseKwds.setChecked(settings.lower_case_keywords)
            self.rbUpperCaseKwds.setChecked(not settings.lower_case_keywords)
            self.lineEditCommentIndicator.setText(settings.comment_indicator)
            self.checkBoxSmartBackspace.setChecked(
                settings.enable_smart_backspace)
            self.checkBoxAutodetectEOL.setChecked(settings.autodetect_eol)
            self.comboBoxPreferredEOL.setCurrentIndex(settings.preferred_eol)
            self.comboCcFilterMode.setCurrentIndex(
                settings.completion_filter_mode)
            for pos, spin_box, color, picker in zip(
                    settings.margin_positions, self._margin_spin_boxes,
                    settings.margin_colors, self._margin_color_pickers):
                spin_box.setValue(pos + 1)
                picker.color = QtGui.QColor(color)
        # Style
        if self.tabWidget.currentIndex() == 1 or all_tabs:
            rb = (self.radioButtonColorDark if settings.dark_style else
                  self.radioButtonColorWhite)
            rb.setChecked(True)
            index = self.comboBoxIconTheme.findText(QtGui.QIcon.themeName())
            if index != -1:
                self.comboBoxIconTheme.setCurrentIndex(index)
            self.fontComboBox.setCurrentFont(QtGui.QFont(settings.font))
            self.spinBoxFontSize.setValue(settings.font_size)
            self.listWidgetColorSchemes.clear()
            current_index = None
            self.listWidgetColorSchemes.clear()
            for style in PYGMENTS_STYLES:
                self.listWidgetColorSchemes.addItem(style)
                if style == settings.color_scheme:
                    current_index = self.listWidgetColorSchemes.count() - 1
            if current_index:
                self.listWidgetColorSchemes.setCurrentRow(current_index)
        # Run
        if self.tabWidget.currentIndex() == 3 or all_tabs:
            self.checkBoxRunExtTerm.setChecked(settings.external_terminal)
            self.lineEditRunTerm.setVisible(sys.platform != 'win32')
            self.lbl_external_terminal_command.setVisible(
                sys.platform != 'win32')
            self.lineEditRunTerm.setEnabled(settings.external_terminal)
            self.lineEditRunTerm.setText(settings.external_terminal_command)
            self.tw_run_env.clearContents()
            self.tw_run_env.setRowCount(0)
            for key, value in Settings().run_environemnt.items():
                index = self.tw_run_env.rowCount()
                self.tw_run_env.insertRow(index)
                self.tw_run_env.setItem(
                    index, 0, QtWidgets.QTableWidgetItem(key))
                self.tw_run_env.setItem(
                    index, 1, QtWidgets.QTableWidgetItem(value))
            self.edit_working_dir.setText(settings.working_dir)
        # compiler
        if self.tabWidget.currentIndex() == 2 or all_tabs:
            self.cbAutoDetectSublmodules.setChecked(
                Settings().autodetect_submodules)
            self.cb_copy_runtime_dlls.setVisible(sys.platform == 'win32')
            self.cb_copy_runtime_dlls.setChecked(Settings().copy_runtime_dlls)
            self.lineEditOutputDirectory.setText(Settings().output_directory)
            self.lineEditCobcExts.setText(';'.join(Settings().cobc_extensions))
            self.checkBoxFreeFormat.setChecked(settings.free_format)
            self.comboBoxStandard.setCurrentIndex(
                int(settings.cobol_standard))
            self.lineEditCompilerPath.setText(settings.compiler_path)
            flags = Settings().compiler_flags
            self.cb_debugging_line.setChecked(
                self.cb_debugging_line.text() in flags)
            self.cb_ftrace.setChecked(self.cb_ftrace.text().replace('&', '') in flags)
            self.cb_ftraceall.setChecked(self.cb_ftraceall.text().replace('&', '') in flags)
            self.cb_g.setChecked(self.cb_g.text().replace('&', '') in flags)
            self.cb_static.setChecked(self.cb_static.text().replace('&', '') in flags)
            self.cb_debug.setChecked(self.cb_debug.text().replace('&', '') in flags)
            self.cb_w.setChecked(self.cb_w.text().replace('&', '') in flags)
            self.cb_wall.setChecked(self.cb_wall.text().replace('&', '') in flags)
            for v in self.flags_in_checkbox:
                try:
                    flags.remove(v)
                except ValueError:
                    pass
            self.lineEditLibs.setText(settings.libraries)
            self.listWidgetLibPaths.addItems(
                [pth for pth in settings.library_search_path.split(';')
                 if pth])
            self.listWidgetCopyPaths.addItems(
                [pth for pth in settings.copybook_paths.split(';')
                 if pth])
            self.le_compiler_flags.setText(' '.join(flags))
            if system.windows:
                self.lineEditVCVARS.setText(settings.vcvarsall)
                self.combo_arch.setCurrentIndex(
                    1 if settings.vcvarsall_arch == 'x64' else 0)
            self.PATH.setText(settings.path)
            self.cbPATH.setChecked(settings.path_enabled)
            self.COB_CONFIG_DIR.setText(settings.cob_config_dir)
            self.cbCOB_CONFIG_DIR.setChecked(settings.cob_config_dir_enabled)
            self.COB_COPY_DIR.setText(settings.cob_copy_dir)
            self.cbCOB_COPY_DIR.setChecked(settings.cob_copy_dir_enabled)
            self.COB_INCLUDE_PATH.setText(settings.cob_include_path)
            self.cbCOB_INCLUDE_PATH.setChecked(settings.cob_include_path_enabled)
            self.COB_LIB_PATH.setText(settings.cob_lib_path)
            self.cbCOB_LIB_PATH.setChecked(settings.cob_lib_path_enabled)

        # SQL Cobol
        if self.tabWidget.currentIndex() == 4 or all_tabs:
            self.lineEditDbpreExts.setText(';'.join(Settings().dbpre_extensions))
            self.lineEditDbpre.setText(settings.dbpre)
            self.lineEditDbpreFramework.setText(settings.dbpre_framework)
            self.lineEditCobmysqlapi.setText(settings.cobmysqlapi)
            self.lineEditDBHOST.setText(settings.dbhost)
            self.lineEditDBUSER.setText(settings.dbuser)
            self.lineEditDBPASSWD.setText(settings.dbpasswd)
            self.lineEditDBNAME.setText(settings.dbname)
            self.lineEditDBPORT.setText(settings.dbport)
            self.lineEditDBSOCKET.setText(settings.dbsocket)
            self.labelDbpreVersion.setText(
                compilers.DbpreCompiler().get_version()
                if Settings().dbpre != '' else '')
            self.lineEditESQLOC.setText(settings.esqloc)
            self.lineEditesqlOcExts.setText(';'.join(Settings().esqloc_extensions))

Example 31

Project: C-PAC
Source File: main_window.py
View license
    def __init__(self, parent, id, title):
        wx.Frame.__init__(self, parent, id, title, size=(700, 650),  style= wx.SYSTEM_MENU | wx.CAPTION | wx.CLOSE_BOX)

        # Import packages
        import CPAC
        
        self.CreateStatusBar()
        self.SetStatusText("The Configurable Pipeline for the Analysis of Connectomes (C-PAC) v" + CPAC.__version__)
    
        self.pipeline_map = {}
        self.sublist_map= {}
        
        mainPanel = wx.Panel(self)
        mainPanel.SetBackgroundColour('#E9E3DB')
        mainSizer = wx.BoxSizer(wx.VERTICAL)
        
        outerPanel1 = wx.Panel(mainPanel)
        outerSizer1 = wx.BoxSizer(wx.HORIZONTAL)
        
        outerPanel2 = wx.Panel(mainPanel)
        outerSizer2 = wx.BoxSizer(wx.HORIZONTAL)
              
        innerPanel1 = wx.Panel(outerPanel1)
        innerSizer1 = wx.BoxSizer(wx.HORIZONTAL)
         
        innerPanel2 = wx.Panel(outerPanel1, )
        innerSizer2 = wx.BoxSizer(wx.HORIZONTAL)
                
        lboxPanel1 = wx.Panel(innerPanel1)
        lboxSizer1 = wx.BoxSizer(wx.VERTICAL)
        btnPanel1 = wx.Panel(innerPanel1, -1)
        btnSizer1 = wx.BoxSizer(wx.VERTICAL)
        
        label = wx.StaticText(lboxPanel1, -1, "Pipelines")
        
        if 'linux' in sys.platform:
            label.SetFont(wx.Font(12, wx.SWISS, wx.NORMAL, wx.BOLD))
        else:
            label.SetFont(wx.Font(16, wx.SWISS, wx.NORMAL, wx.BOLD))
            
        self.listbox = wx.CheckListBox(lboxPanel1, -1, size = (160,400))
        
        lboxSizer1.Add(label, 0, wx.ALIGN_CENTER)
        lboxSizer1.Add(self.listbox, 1, wx.EXPAND | wx.ALL, 10)
        lboxPanel1.SetSizer(lboxSizer1)
        
        lboxPanel1.SetBackgroundColour('#E9E3DB')
        
        new = wx.Button(btnPanel1, ID_NEW, 'New', size=(90, 30))
        ren = wx.Button(btnPanel1, ID_RENAME, 'Rename', size=(90, 30))
        dlt = wx.Button(btnPanel1, ID_DELETE, 'Delete', size=(90, 30))
        load = wx.Button(btnPanel1, ID_LOAD, 'Load', size=(90,30))
        edit = wx.Button(btnPanel1, ID_EDIT, 'Edit', size=(90,30))
        shw = wx.Button(btnPanel1, ID_DISPLAY, 'View', size=(90,30))
        clr = wx.Button(btnPanel1, ID_CLEAR, 'Clear', size=(90, 30))
    
        self.Bind(wx.EVT_BUTTON, self.NewItem, id=ID_NEW)
        self.Bind(wx.EVT_BUTTON, self.OnRename, id=ID_RENAME)
        self.Bind(wx.EVT_BUTTON, self.OnDelete, id=ID_DELETE)
        self.Bind(wx.EVT_BUTTON, self.AddConfig, id=ID_LOAD)
        self.Bind(wx.EVT_BUTTON, self.OnEdit, id=ID_EDIT)
        self.Bind(wx.EVT_BUTTON, self.OnDisplay, id= ID_DISPLAY)
        self.Bind(wx.EVT_BUTTON, lambda event: self.OnClear(event, 1), id=ID_CLEAR)
        self.Bind(wx.EVT_LISTBOX_DCLICK, self.OnDisplay)        

        if 'linux' in sys.platform:
            btnSizer1.Add((-1,30))
        else:
            btnSizer1.Add((-1, 27))
        
        btnSizer1.Add(new, 0, wx.TOP)
        btnSizer1.Add(load, 0, wx.TOP)
        btnSizer1.Add(edit, 0, wx.TOP)
        btnSizer1.Add(shw, 0, wx.TOP)
        btnSizer1.Add(ren, 0, wx.TOP)
        btnSizer1.Add(dlt, 0, wx.TOP)
        btnSizer1.Add(clr, 0, wx.TOP)
        btnPanel1.SetSizer(btnSizer1)
        
        btnPanel1.SetBackgroundColour('#E9E3DB')
                
        innerSizer1.Add(lboxPanel1, 1, wx.EXPAND | wx.ALL)
        
        if 'linux' in sys.platform:
            innerSizer1.Add(btnPanel1, 1, wx.EXPAND | wx.ALL, 5)
        else:
            innerSizer1.Add(btnPanel1, 1, wx.EXPAND | wx.ALL)
        
        innerPanel1.SetSizer(innerSizer1)
        innerPanel1.SetBackgroundColour('#E9E3DB')
        
        lboxPanel2 = wx.Panel(innerPanel2)
        lboxSizer2 = wx.BoxSizer(wx.VERTICAL)
        btnPanel2 = wx.Panel(innerPanel2, -1)
        btnSizer2 = wx.BoxSizer(wx.VERTICAL)

        label2 = wx.StaticText(lboxPanel2, -1, "Subject Lists")
        
        if 'linux' in sys.platform:
            label2.SetFont(wx.Font(12, wx.SWISS, wx.NORMAL, wx.BOLD))
        else:
            label2.SetFont(wx.Font(16, wx.SWISS, wx.NORMAL, wx.BOLD))
            
        self.listbox2 = wx.CheckListBox(lboxPanel2, -1, size= (160,400)) 
        self.listbox2.Bind(wx.EVT_LISTBOX_DCLICK, self.OnShow)
        lboxSizer2.Add(label2, 0,wx.ALIGN_CENTER)
        lboxSizer2.Add(self.listbox2, 1, wx.EXPAND | wx.ALL, 10)
        lboxPanel2.SetSizer(lboxSizer2)
        
        lboxPanel2.SetBackgroundColour('#E9E3DB')
                
        create = wx.Button(btnPanel2, ID_CREATE, 'New', size=(90, 30))
        add = wx.Button(btnPanel2, ID_ADD, 'Load', size= (90,30))
        show = wx.Button(btnPanel2, ID_SHOW, 'View', size= (90,30))
        clr2 = wx.Button(btnPanel2, ID_CLEARALL, 'Clear', size=(90, 30))
        self.Bind(wx.EVT_BUTTON, self.CreateItem, id=ID_CREATE)
        self.Bind(wx.EVT_BUTTON, self.AddItem, id=ID_ADD)
        self.Bind(wx.EVT_BUTTON, self.OnShow, id= ID_SHOW)
        self.Bind(wx.EVT_BUTTON, lambda event: self.OnClear(event, 2), id=ID_CLEARALL)
        
        if 'linux' in sys.platform:
            btnSizer2.Add((-1,30))
        else:
            btnSizer2.Add((-1, 27))

        # Add buttons to button sizer
        btnSizer2.Add(create, 0, wx.TOP)
        btnSizer2.Add(add, 0, wx.TOP)
        btnSizer2.Add(show, 0, wx.TOP)
        btnSizer2.Add(clr2, 0, wx.TOP)
        btnPanel2.SetSizer(btnSizer2)
        btnPanel2.SetBackgroundColour('#E9E3DB')
        
        innerSizer2.Add(lboxPanel2, 1, wx.EXPAND | wx.ALL)
        
        if 'linux' in sys.platform:
            innerSizer2.Add(btnPanel2, 1, wx.EXPAND | wx.ALL, 5)
        else:
            innerSizer2.Add(btnPanel2, 1, wx.EXPAND | wx.ALL)
        
        innerPanel2.SetSizer(innerSizer2)
        innerPanel2.SetBackgroundColour('#E9E3DB')
        
        outerSizer1.Add(innerPanel2, 1, wx.EXPAND | wx.ALL)
        outerSizer1.Add(innerPanel1, 1, wx.EXPAND | wx.ALL)
        
        outerPanel1.SetSizer(outerSizer1)
        outerPanel1.SetBackgroundColour('#E9E3DB')
        

        self.runCPAC1 = wx.Button(outerPanel2, -1, 'Run Individual Level Analysis')
        self.runCPAC1.Bind(wx.EVT_BUTTON, self.runIndividualAnalysis)
        

        self.runCPAC2 =  wx.Button(outerPanel2, -1, 'Run Group Level Analysis')
        self.runCPAC2.Bind(wx.EVT_BUTTON, self.runGroupLevelAnalysis)

        outerSizer2.Add(self.runCPAC1, 1, wx.RIGHT, 20)
        outerSizer2.Add(self.runCPAC2, 1, wx.LEFT, 20)

        outerPanel2.SetSizer(outerSizer2)
        outerPanel2.SetBackgroundColour('#E9E3DB')
        
        hbox = wx.BoxSizer(wx.HORIZONTAL)
        text1 = wx.StaticText(mainPanel, -1, "Configure CPAC")
        
        if 'linux' in sys.platform:
            text1.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD))
        else:
            text1.SetFont(wx.Font(18, wx.SWISS, wx.NORMAL, wx.BOLD))
            
        img = wx.Image(p.resource_filename('CPAC', 'GUI/resources/images/cpac_new_logo.png'), wx.BITMAP_TYPE_PNG).ConvertToBitmap()
        logo = wx.StaticBitmap(mainPanel, -1, img)
        hbox.Add(text1, 1, wx.TOP | wx.EXPAND, 15)
        hbox.Add(logo, 0,wx.ALIGN_RIGHT | wx.RIGHT)
        
        text2 = wx.StaticText(mainPanel, -1, "Run CPAC")
        
        if 'linux' in sys.platform:
            text2.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD))
        else:
            text2.SetFont(wx.Font(18, wx.SWISS, wx.NORMAL, wx.BOLD))
        
        mainSizer.Add(hbox, 0, wx.EXPAND | wx.ALL,10)
        mainSizer.Add(outerPanel1, 1, wx.EXPAND | wx.ALL, 20)
        mainSizer.Add(wx.StaticLine(mainPanel), 0, wx.EXPAND|wx.TOP|wx.BOTTOM, 10)
        mainSizer.Add(text2, 0, wx.EXPAND| wx.ALL, 5)
        mainSizer.Add(outerPanel2, 0 ,wx.EXPAND | wx.ALL, 20)
        
        mainPanel.SetSizer(mainSizer)

        self.Centre()
        self.Show(True)

Example 32

Project: visionegg
Source File: PlatformDependent.py
View license
def set_priority(*args,**kw):
    """Set the priority of the Vision Egg application.

    Defaults to maximum priority, but can be changed via keyword
    arguments.

    Raises an exception on failure.
    """

    # potential keywords
    parse_me = ["darwin_realtime_period_denom",
                "darwin_realtime_computation_denom",
                "darwin_realtime_constraint_denom",
                "darwin_realtime_preemptible",
                "darwin_maxpriority_conventional_not_realtime",
                "darwin_conventional_priority",
                "darwin_pthread_priority"]

    logger = logging.getLogger('VisionEgg.PlatformDependent')
    params = {}

    # set variable in local namespace
    for word in parse_me:
        # set the value from VisionEgg.config
        config_name = "VISIONEGG_"+word.upper()
        if hasattr(VisionEgg.config,config_name):
            value = getattr(VisionEgg.config,config_name)
        else:
            value = None
        # override default value if present in keyword arguments
        if word in kw.keys():
            value = kw[word]
        if value is not None:
            params[word] = value

    if sys.platform == 'darwin':

        # Everything to support realtime in Apple Mac OS X is based on
        # the following two things:
        #
        # 1) http://developer.apple.com/techpubs/macosx/Darwin/General/KernelProgramming/scheduler/Using_Mach__pplications.html
        #
        # 2) The Mac OS X port of the Esound daemon.

        import darwin_maxpriority

        if params['darwin_maxpriority_conventional_not_realtime']:
            process = darwin_maxpriority.PRIO_PROCESS
            policy = darwin_maxpriority.SCHED_RR

            logger.info("Setting max priority mode for darwin platform "
                        "using conventional priority %d."%(
                        params['darwin_conventional_priority'],))

            # set the priority of the current process
            darwin_maxpriority.setpriority(process,0,params['darwin_conventional_priority'])

            # This sets the pthread priority, which only prioritizes
            # threads in the process.  Might as well do it, but it
            # shouldn't matter unless we're running multi-threaded.
            darwin_pthread_priority = params['darwin_pthread_priority']
            if darwin_pthread_priority == "max": # should otherwise be an int
                darwin_pthread_priority = darwin_maxpriority.sched_get_priority_max(policy)

            if darwin_maxpriority.set_self_pthread_priority(policy,
                                                            darwin_pthread_priority) == -1:
                raise RuntimeError("set_self_pthread failed.")

        else:
            bus_speed = darwin_maxpriority.get_bus_speed()
            logger.info("Setting max priority mode for darwin platform "
                        "using realtime threads. ( period = %d / %d, "
                        "computation = %d / %d, constraint = %d / %d, "
                        "preemptible = %d )" % (
                        bus_speed, params['darwin_realtime_period_denom'],
                        bus_speed, params['darwin_realtime_computation_denom'],
                        bus_speed, params['darwin_realtime_constraint_denom'],
                        params['darwin_realtime_preemptible'] ))
            period = bus_speed / params['darwin_realtime_period_denom']
            computation = bus_speed / params['darwin_realtime_computation_denom']
            constraint = bus_speed / params['darwin_realtime_constraint_denom']
            preemptible = params['darwin_realtime_preemptible']

            darwin_maxpriority.set_self_thread_time_constraint_policy( period, computation, constraint, preemptible )
    elif sys.platform == 'win32':
        import win32_maxpriority
        logger.info("Setting priority for win32 platform to "
                    "HIGH_PRIORITY_CLASS, THREAD_PRIORITY_HIGHEST. "
                    "(This is Microsoft's maximum recommended priority, "
                    "but you could still raise it higher.)")
        win32_maxpriority.set_self_process_priority_class(
            win32_maxpriority.HIGH_PRIORITY_CLASS )
        win32_maxpriority.set_self_thread_priority(
            win32_maxpriority.THREAD_PRIORITY_HIGHEST)

    elif sys.platform.startswith('irix') or sys.platform.startswith('linux') or sys.platform.startswith('posix'):
        import posix_maxpriority
        policy = posix_maxpriority.SCHED_FIFO
        max_priority = posix_maxpriority.sched_get_priority_max( policy )
        logger.info("Setting priority for POSIX-compatible platform to "
                    "policy SCHED_FIFO and priority to "
                    "%d"%max_priority)
        posix_maxpriority.set_self_policy_priority( policy, max_priority ) # Fails if you don't have permission (try running as root)
        posix_maxpriority.stop_memory_paging()
    else:
        raise RuntimeError("Cannot change priority.  Unknown platform '%s'"%sys.platform)

Example 33

View license
	def __init__(self, data, filename, view, parent):
		super(TerminalView, self).__init__(parent)

		self.view = view
		view.setTabName("Terminal")
		self.setFrameStyle(QFrame.NoFrame)

		if data is None:
			self.proc = TerminalProcess(None)
			self.auto_close = False
		elif hasattr(data, "raw_debug"):
			self.proc = TerminalProcess(data.cmd, data.raw_debug)
			self.auto_close = data.auto_close
		else:
			self.proc = TerminalProcess(data.cmd)
			self.auto_close = data.auto_close

		self.proc.term.update_callback = self.updateLines
		self.proc.term.title_callback = self.updateWindowTitle

		self.proc.exit_callback = self.processExit
		self.proc.start_monitoring()

		self.setCursor(Qt.IBeamCursor)
		self.verticalScrollBar().setCursor(Qt.ArrowCursor)

		# Get font and compute character sizes
		self.initFont()
		self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)

		# Initialize scroll bars
		self.historySize = 0
		self.resizeDisabled = False
		areaSize = self.viewport().size()
		self.adjustSize(areaSize.width(), areaSize.height())

		# Give a small black border around the terminal
		self.setViewportMargins(2, 2, 2, 2)
		pal = QPalette(self.palette())
		pal.setColor(QPalette.Background, Qt.black)
		self.setPalette(pal)
		self.setAutoFillBackground(True)

		self.cursorTimer = QTimer()
		self.cursorTimer.setInterval(500)
		self.cursorTimer.setSingleShot(False)
		self.cursorTimer.timeout.connect(self.cursorTimerEvent)
		self.cursorTimer.start()

		self.cursorY = 0
		self.caretVisible = False
		self.caretBlink = True

		self.selection = False
		self.selectionStartX = 0
		self.selectionStartY = 0
		self.selectionEndX = 0
		self.selectionEndY = 0

		# Control means control
		if sys.platform == 'darwin':
			self.ctrl = Qt.MetaModifier
			self.command = Qt.ControlModifier
			self.ctrl_hotkey = Qt.META
		else:
			self.ctrl = Qt.ControlModifier
			self.command = Qt.ControlModifier
			self.ctrl_hotkey = Qt.CTRL

		self.setFocusPolicy(Qt.StrongFocus)

		if (sys.platform.find('linux') != -1) or (sys.platform.find('freebsd') != -1):
			self.x11 = True
		else:
			self.x11 = False

		# System colors
		dim_colors = [Qt.black, QColor(135, 0, 0), QColor(0, 135, 0), QColor(135, 135, 0),
			QColor(0, 0, 135), QColor(135, 0, 135), QColor(0, 135, 135), QColor(135, 135, 135)]
		normal_colors = [QColor(46, 52, 54), QColor(204, 0, 0), QColor(78, 154, 6), QColor(196, 160, 0),
			QColor(52, 101, 164), QColor(117, 80, 123), QColor(6, 152, 154), QColor(211, 215, 207)]
		bright_colors = [QColor(85, 87, 83), QColor(239, 41, 41), QColor(138, 226, 52), QColor(252, 233, 79),
			QColor(114, 159, 207), QColor(173, 127, 168), QColor(52, 226, 226), Qt.white]

		# Create color arrays for normal mode
		self.fore_colors = [Qt.white] + dim_colors + normal_colors + bright_colors + bright_colors
		self.back_colors = [Qt.black] + normal_colors + bright_colors

		# Create color array for 256-color mode
		self.colors = normal_colors + bright_colors

		values = [0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff]
		for red in values:
			for green in values:
				for blue in values:
					color = QColor(red, green, blue)
					self.colors.append(color)

		values = [0x08, 0x12, 0x1c, 0x26, 0x30, 0x3a, 0x44, 0x4e, 0x58, 0x62, 0x6c, 0x76, 0x80, 0x8a, 0x94, 0x9e,
			0xa8, 0xb2, 0xbc, 0xc6, 0xd0, 0xda, 0xe4, 0xee]
		for gray in values:
			color = QColor(gray, gray, gray)
			self.colors.append(color)

Example 34

Project: Devede
Source File: devede_video_convert.py
View license
	def __init__(self,global_vars,videofile,filename,filefolder,progresbar,proglabel,disctype,title,chapter,threads,seconds,encpass,fix_ac3):

		""" This class converts a video file to MPEG-1 or MPEG-2 format

		VIDEOFILE contains the parameters to convert the video
		FILENAME is the generic file name given by the user
		FILEFOLDER is the path where all the temporary and finall files will be created
		PROGRESBAR is the progress bar where the class will show the progress
		PROGLABEL is the label where the class will show what is it doing
		DISCTYPE can be dvd, vcd, svcd, cvd or divx
		TITLE and CHAPTER are the numbers used to identify the TITLE and CHAPTER number for this file
		THREADS is the number of threads to use
		SECONDS is the number of seconds we want to convert (for previews) 
		ENCPASS is the number of encoding pass"""
		
		devede_executor.executor.__init__(self,filename,filefolder,progresbar)
		self.printout=False

		self.percent2=120
		if seconds==0:
			self.divide=float(videofile["olength"])
			if (videofile["cutting"]==1) or (videofile["cutting"]==2): # if we want only one half of the file
				self.divide/=2
		else:
			self.divide=float(seconds)

		if self.divide==0:
			self.divide=1

		self.error=""
		progresbar.set_fraction(0)
		progresbar.set_text("")
		
		if videofile["ismpeg"]: # if the file hasn't to be converted, we simply copy or link it
			self.pulse=True
			self.print_error=_("File copy failed\nMaybe you ran out of disk space?")
			if seconds==0:
				texto=_("Copying the file")+"\n"
			else:
				texto=_("Creating preview")+"\n"
			proglabel.set_text(texto+videofile["filename"])
			currentfile=self.create_filename(filefolder+filename,title,chapter,disctype=="divx")
		
			print "\ncurrentfile is: ", currentfile , "\n" 

			try:
				os.remove(currentfile)
			except:
				pass

			if (sys.platform=="win32") or (sys.platform=="win64"):
				# links do not work on windows, so just copy the file
				# self.launch_shell('copy "'+videofile["path"].replace('"','""')+'" "'+currentfile+'"',output=False)
				# Only hardlinks are available on 2000 and XP, reparse points are available from vista onwards.
				win32file.CreateHardLink(currentfile, videofile["path"].replace('"','""'))
			else:
				if len(videofile["sub_list"])==0:
					self.launch_shell('ln -s "'+videofile["path"].replace('"','\\"')+'" "'+currentfile+'"',output=False)
				else:
					self.launch_shell('cp "'+videofile["path"].replace('"','\\"')+'" "'+currentfile+'"',output=False)
			return

		isvob=videofile["isvob"]

		self.pulse=False
		if seconds==0:
			texto=(_("Converting files from title %(title_number)s (pass %(pass_number)s)\n\n%(file_name)s") % {"title_number":str(title),"pass_number":str(encpass),"file_name":videofile["filename"]} )
			proglabel.set_text(texto) #+" "+str(title)+" Pass: "+ str(encpass) +"\n\n"+videofile["filename"] )
		else:
			texto=_("Creating preview")
			proglabel.set_text(texto+"\n"+videofile["filename"])

		addbars=False
		framerate=int(videofile["ofps"])
		videorate=int(videofile["vrate"])
		audiorate=self.adjust_audiorate(int(videofile["arate"]),disctype=="dvd")
		
		audio_final_rate=int(videofile["arateunc"])
		audiodelay=float(videofile["adelay"])
		final_framerate=float(videofile["fps"])
		aspect_ratio_original=videofile["oaspect"]
		aspect_ratio_final=videofile["aspect"]
		resx_final=videofile["width"]
		resy_final=videofile["height"]
		resx_original=videofile["owidth"]
		resy_original=videofile["oheight"]
		copy_audio=videofile["copy_audio"]
		sound51=videofile["sound51"]
		gop12=videofile["gop12"]
		audiostream=videofile["audio_stream"]
		swap_fields=videofile["swap_fields"]
		volume=videofile["volume"]
		if (videofile["resolution"]==0) and (disctype=="divx"):
			default_res=True
		else:
			default_res=False
		
		speed1,speed2=devede_other.get_speedup(videofile)
		if speed1==speed2:
			speedup=None
		else:
			speedup=str(speed1)+":"+str(speed2)
	
		if aspect_ratio_original<1.3:
			aspect_ratio_original=float(videofile["owidth"])/(float(videofile["oheight"]))
		if aspect_ratio_original<1.33333333:
			aspect_ratio_original=1.33333333
	
		max_videorate=int(videorate*2)
		min_videorate=int(videorate*0.75)
		
		dsize,minvid,maxvid=devede_other.get_dvd_size(None,disctype)
		
		if max_videorate>maxvid:
			max_videorate=maxvid
		if min_videorate<minvid:
			min_videorate=minvid
			
		if videofile["blackbars"]==0: # check if has to add black bars
			addbars=True
			if (videofile["rotate"]==90) or (videofile["rotate"]==270):
				resx_original2=resy_original
				resy_original2=resx_original
			else:
				resx_original2=resx_original
				resy_original2=resy_original

			if (resx_original2%2)==1:
				resx_original2+=1
			if (resy_original2%2)==1:
				resy_original2+=1
			resx_inter=resx_original2
			resy_inter=int((resy_original2*aspect_ratio_original)/aspect_ratio_final)
			if (resy_inter%2)==1:
				resy_inter+=1
			
			# due to a bug in MENCODER, we put bars only up and down, never left and right,
			# and we don't scale it if we have to add only 4 or less lines, because it is
			# too much work for so little profit
			
			if ((resy_inter<resy_original) or (resy_original+5>resy_inter)):
				addbars=False

		if addbars==False:
			resx_inter=resx_original
			resy_inter=resy_original
		else:
			addx=0
			addy=int((resy_inter-resy_original)/2)
			if(addy%2)==1:
				addy+=1

		command_var=[]
		if (sys.platform!="win32") and (sys.platform!="win64"):
			command_var=["mencoder"]
		else:
			command_var=["mencoder.exe"]

		if (disctype=="dvd") or (disctype=="divx"):
			audio_desired_final_rate=48000
		else:
			audio_desired_final_rate=44100

		afvalues=""

		if isvob==False:
			if ((audio_final_rate!=audio_desired_final_rate) and (copy_audio==False)) or (speedup!=None):
				command_var.append("-srate")
				command_var.append(str(audio_desired_final_rate))
				afvalues+="lavcresample="+str(audio_desired_final_rate)
			
			if (copy_audio==False) and volume!=100:
				if afvalues!="":
					afvalues+=":"
				afvalues+="volume="+str(10*math.log(volume/10,10))

			# Add the speedup code

			if speedup!=None:
				command_var.append("-speed")
				command_var.append(speedup)

		if afvalues!="":
			command_var.append("-af")
			command_var.append(afvalues)
			
		command_var.append("-noautosub")
		
		command_var.append("-oac")
		if copy_audio or isvob:
			command_var.append("copy")
		else:
			if (disctype=="divx"):
				command_var.append("mp3lame")
			else:
				command_var.append("lavc")

		if (audiostream!=10000):
			command_var.append("-aid")
			command_var.append(str(audiostream))


		
		telecine=False
		if final_framerate==30:
			if (framerate==24) and ((disctype=="dvd") or (disctype=="divx")):
				str_final_framerate="24000/1001"
				keyintv=15
				telecine=True
			else:
				str_final_framerate="30000/1001"
				keyintv=18
		else:
			str_final_framerate=str(int(final_framerate))
			keyintv=15
		
		if gop12:
			keyintv=12
		
		command_var.append("-ovc")
		if isvob:
			command_var.append("copy")
		else:
			command_var.append("lavc")
		
		if (disctype!="divx"):
			command_var.append("-of")
			command_var.append("mpeg")
			command_var.append("-mpegopts")
			if disctype=="dvd":
				if telecine and isvob==False:
					command_var.append("format=dvd:tsaf:telecine")
				else:
					command_var.append("format=dvd:tsaf")
			elif disctype=="vcd":
				command_var.append("format=xvcd")
			elif (disctype=="svcd") or (disctype=="cvd"):
				command_var.append("format=xsvcd")
			else:
				print "Error, disc format incorrect. Talk with the creator."
				sys.exit(1)

		if seconds!=0:
			command_var.append("-endpos")
			command_var.append(str(seconds))
		else:
			if videofile["cutting"]==1: # first half only
				command_var.append("-endpos")
				command_var.append(str(videofile["olength"]/2))
			elif videofile["cutting"]==2: # second half only
				command_var.append("-ss")
				command_var.append(str((videofile["olength"]/2)-5)) # start 5 seconds before

		if (audiodelay!=0.0) and (copy_audio==False) and (isvob==False):
			command_var.append("-delay")
			command_var.append(str(audiodelay))

		if sound51:
			command_var.append("-channels")
			command_var.append("6")

		if (isvob==False) and (default_res==False):
			command_var.append("-ofps")
			command_var.append(str_final_framerate)

		if disctype=="divx":
			command_var.append("-ffourcc")
			command_var.append("DX50")

		lineatemp=""
		acoma=False;
		
		if swap_fields:
			lineatemp+="phase=a"
			acoma=True
		
		extra_params=videofile["params_vf"] # take the VF extra params
		while (extra_params!=""):
			extra_params,new_param=devede_other.get_new_param(extra_params)
			if (new_param!="") and (new_param!=','):
				while (len(new_param)>1) and (new_param[0]==','):
					new_param=new_param[1:]
				while (len(new_param)>1) and (new_param[-1]==','):
					new_param=new_param[:-1]
				if acoma:
					lineatemp+=","
				lineatemp+=new_param
				acoma=True
		
		vmirror=0
		hmirror=0
		passlog_var = None
		
		if videofile["deinterlace"]!="none":
			if acoma:
				lineatemp+=","
			if videofile["deinterlace"]!="yadif":
				lineatemp+="pp="+videofile["deinterlace"]
			else:
				lineatemp+="yadif=0"
			acoma=True
			
		if videofile["rotate"]==180:
			vmirror=1-vmirror
			hmirror=1-hmirror
		
		if videofile["vmirror"]:
			vmirror=1-vmirror
		
		if videofile["hmirror"]:
			hmirror=1-hmirror
		
		if vmirror==1:
			if acoma:
				lineatemp+=","
			lineatemp+="flip"
			acoma=True
		
		if hmirror==1:
			if acoma:
				lineatemp+=","
			lineatemp+="mirror"
			acoma=True
		
		print "Addbars "+str(addbars)+" resx_o "+str(resx_original)+" resy_o "+str(resy_original)
		print "resx_i "+str(resx_inter)+" resy_i "+str(resy_inter)
		if addbars and ((resx_inter!=resx_original) or (resy_inter!=resy_original)) and (default_res==False):
			if acoma:
				lineatemp+=","
			lineatemp+="expand="+str(resx_inter)+":"+str(resy_inter)+":"+str(addx)+":"+str(addy)
			acoma=True

		if videofile["rotate"]==90:
			if acoma:
				lineatemp+=","
			lineatemp+="rotate=1"
			acoma=True
		
		if videofile["rotate"]==270:
			if acoma:
				lineatemp+=","
			lineatemp+="rotate=2"
			acoma=True

		if ((resx_inter!=resx_final) or (resy_inter!=resy_final)) and (default_res==False):
			if acoma:
				lineatemp+=","
			lineatemp+="scale="+str(resx_final)+":"+str(resy_final)
			acoma=True
		
		if disctype!="divx":
			if acoma:
				lineatemp+=","
			lineatemp+="harddup"
			acoma=True

		if (lineatemp!="") and (isvob==False):
			command_var.append("-vf")		
			command_var.append(lineatemp)

		if isvob==False:
			command_var.append("-lavcopts")
			
			lavcopts=""
			
			# Currently Mencoder supports up to 8 threads
			if threads>8:
				nthreads=8
			else:
				nthreads=threads
			
			if nthreads>1:
				lavcopts="threads="+str(nthreads)+":"
			lavcopts+="vcodec="
			if disctype=="vcd":
				lavcopts+="mpeg1video"
			elif disctype=="divx":
				lavcopts+="mpeg4"
			else:
				lavcopts+="mpeg2video"
		
			if videofile["trellis"]:
				lavcopts+=":trell"
		
			if videofile["mbd"]==0:
				lavcopts+=":mbd=0"	
			elif videofile["mbd"]==1:
				lavcopts+=":mbd=1"
			elif videofile["mbd"]==2:
				lavcopts+=":mbd=2"
	
			lavcopts+=":sc_threshold=1000000000:cgop"

			if disctype!="divx":
				lavcopts+=":vstrict=0:vrc_maxrate="+str(max_videorate)
				lavcopts+=":vrc_buf_size="
				if (disctype=="vcd"):
					lavcopts+="327"
				elif (disctype=="svcd") or (disctype=="cvd"):
					lavcopts+="917"
				elif (disctype=="dvd"):
					lavcopts+="1835"
			if disctype=="vcd":
				lavcopts+=":vrc_minrate="+str(min_videorate)
	
			lavcopts+=":vbitrate="+str(videorate)
		
			if disctype!="divx":
				lavcopts+=":keyint="+str(keyintv)
				if(copy_audio==False):
					lavcopts+=":acodec="
					if disctype=="dvd":
						if fix_ac3:
							lavcopts+="ac3_fixed"
						else:
							lavcopts+="ac3"
					else:
						lavcopts+="mp2"
					lavcopts+=":abitrate="+str(audiorate)

			if (default_res==False):
				if aspect_ratio_final>1.4:
					lavcopts+=":aspect=16/9"
				else:
					lavcopts+=":aspect=4/3"
			
			if encpass > 0:
				lavcopts+=":vpass=" + str(encpass)
				passlog_var = os.path.join(filefolder,filename)+".log"
				if encpass==1:
					try:
						os.remove(passlog_var)
					except:
						 pass
					if videofile["turbo1stpass"]:
						lavcopts+=":turbo"
				
	
			extra_params=videofile["params_lavc"] # take the LAVC extra params
			while (extra_params!=""):
				extra_params,new_param=devede_other.get_new_param(extra_params)
				if (new_param!="") and (new_param!=':'):
					while (len(new_param)>1) and (new_param[0]==':'):
						new_param=new_param[1:]
					while (len(new_param)>1) and (new_param[-1]==':'):
						new_param=new_param[:-1]
					lavcopts+=":"+new_param
			command_var.append(lavcopts)
	
		if (disctype=="divx") and (copy_audio==False) and (isvob==False):
			lameopts="abr:br="+str(audiorate)
			command_var.append("-lameopts")
			extra_params=videofile["params_lame"] # take the LAME extra params
			while (extra_params!=""):
				extra_params,new_param=devede_other.get_new_param(extra_params)
				if (new_param!="") and (new_param!=':'):
					while (len(new_param)>1) and (new_param[0]==':'):
						new_param=new_param[1:]
					while (len(new_param)>1) and (new_param[-1]==':'):
						new_param=new_param[:-1]
					lameopts+=":"+new_param
			command_var.append(lameopts)
	
		currentfile=self.create_filename(filefolder+filename,title,chapter,disctype=="divx")

		if (passlog_var != None):
			command_var.append("-passlogfile")
			command_var.append(passlog_var)

		command_var.append("-o")
		command_var.append(currentfile)
		command_var.append(videofile["path"])

		extra_params=videofile["params"] # take the extra params
		while (extra_params!=""):
			extra_params,new_param=devede_other.get_new_param(extra_params)
			if new_param!="":
				command_var.append(new_param)

		self.print_error=_("Conversion failed.\nIt seems a bug of Mencoder.")
		if (videofile["params"]!="") or (videofile["params_vf"]!="") or (videofile["params_lavc"]!="") or (videofile["params_lame"]!=""):
			self.print_error+="\n"+_("Also check the extra params passed to Mencoder for syntax errors.")
		self.error_not_done=True
		self.launch_program(command_var,read_chars=300)

Example 35

Project: bauble.classic
Source File: __init__.py
View license
def main(uri=None):
    """
    Run the main Bauble application.

    :param uri:  the URI of the database to connect to.  For more information
                 about database URIs see `<http://www.sqlalchemy.org/docs/05/\
dbengine.html#create-engine-url-arguments>`_

    :type uri: str
    """
    # TODO: it would be nice to show a Tk dialog here saying we can't
    # import gtk...but then we would have to include all of the Tk libs in
    # with the win32 batteries-included installer
    try:
        import gtk
        import gobject
    except ImportError, e:
        print _('** Error: could not import gtk and/or gobject')
        print e
        if sys.platform == 'win32':
            print _('Please make sure that GTK_ROOT\\bin is in your PATH.')
        sys.exit(1)

    # create the user directory
    if not os.path.exists(paths.user_dir()):
        os.makedirs(paths.user_dir())

    # add console root handler, and file root handler, set it at the logging
    # level specified by BAUBLE_LOGGING, or at INFO level.
    filename = os.path.join(paths.user_dir(), 'bauble.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    fileHandler = logging.FileHandler(filename, 'w+')
    logging.getLogger().addHandler(fileHandler)
    consoleHandler = logging.StreamHandler()
    logging.getLogger().addHandler(consoleHandler)
    fileHandler.setFormatter(formatter)
    consoleHandler.setFormatter(formatter)
    fileHandler.setLevel(logging.DEBUG)
    consoleHandler.setLevel(consoleLevel)

    # intialize the user preferences
    from bauble.prefs import prefs, use_sentry_client_pref
    prefs.init()

    try:
        # no raven.conf.setup_logging: just standard Python logging
        from raven import Client
        from raven.handlers.logging import SentryHandler

        # only register the sentry client if the user agrees on it
        if prefs[use_sentry_client_pref]:
            logger.debug('registering sentry client')
            sentry_client = Client('https://59105d22a4ad49158796088c26bf8e4c:'
                                   '[email protected]'
                                   'app.getsentry.com/45704')
            handler = SentryHandler(sentry_client)
            logging.getLogger().addHandler(handler)
            handler.setLevel(logging.WARNING)
        else:
            logger.debug('not registering sentry client')

    except Exception, e:
        logger.warning("can't configure sentry client")
        logger.debug('%s - %s' % (type(e), e))

    import gtk.gdk
    import pygtk
    if not main_is_frozen():
        pygtk.require("2.0")

    display = gtk.gdk.display_get_default()
    if display is None:
        print _("**Error: Bauble must be run in a windowed environment.")
        sys.exit(1)

    import bauble.pluginmgr as pluginmgr
    import bauble.utils as utils

    # initialize threading
    gobject.threads_init()

    try:
        import bauble.db as db
    except Exception, e:
        utils.message_dialog(utils.xml_safe(e), gtk.MESSAGE_ERROR)
        sys.exit(1)

    # declare module level variables
    global gui, default_icon, conn_name

    default_icon = os.path.join(paths.lib_dir(), "images", "icon.svg")

    open_exc = None
    # open default database
    if uri is None:
        from bauble.connmgr import start_connection_manager
        while True:
            if not uri or not conn_name:
                conn_name, uri = start_connection_manager()
                if conn_name is None:
                    quit()
            try:
                if db.open(uri, True, True):
                    prefs[conn_default_pref] = conn_name
                    break
                else:
                    uri = conn_name = None
            except err.VersionError, e:
                logger.warning("%s(%s)" % (type(e), e))
                db.open(uri, False)
                break
            except (err.EmptyDatabaseError, err.MetaTableError,
                    err.VersionError, err.TimestampError,
                    err.RegistryError), e:
                logger.info("%s(%s)" % (type(e), e))
                open_exc = e
                # reopen without verification so that db.Session and
                # db.engine, db.metadata will be bound to an engine
                db.open(uri, False)
                break
            except err.DatabaseError, e:
                logger.debug("%s(%s)" % (type(e), e))
                # traceback.format_exc()
                open_exc = e
                # break
            except Exception, e:
                msg = _("Could not open connection.\n\n%s") % \
                    utils.xml_safe(repr(e))
                utils.message_details_dialog(msg, traceback.format_exc(),
                                             gtk.MESSAGE_ERROR)
                uri = None
    else:
        db.open(uri, True, True)

    # load the plugins
    pluginmgr.load()

    # save any changes made in the conn manager before anything else has
    # chance to crash
    prefs.save()

    # set the default command handler
    from bauble.view import DefaultCommandHandler
    pluginmgr.register_command(DefaultCommandHandler)

    # now that we have a connection create the gui, start before the plugins
    # are initialized in case they have to do anything like add a menu
    import bauble.ui as ui
    gui = ui.GUI()

    def _post_loop():
        gtk.gdk.threads_enter()
        try:
            if isinstance(open_exc, err.DatabaseError):
                msg = _('Would you like to create a new Bauble database at '
                        'the current connection?\n\n<i>Warning: If there is '
                        'already a database at this connection any existing '
                        'data will be destroyed!</i>')
                if utils.yes_no_dialog(msg, yes_delay=2):
                    try:
                        db.create()
                        # db.create() creates all tables registered with
                        # the default metadata so the pluginmgr should be
                        # loaded after the database is created so we don't
                        # inadvertantly create tables from the plugins
                        pluginmgr.init()
                        # set the default connection
                        prefs[conn_default_pref] = conn_name
                    except Exception, e:
                        utils.message_details_dialog(utils.xml_safe(e),
                                                     traceback.format_exc(),
                                                     gtk.MESSAGE_ERROR)
                        logger.error("%s(%s)" % (type(e), e))
            else:
                pluginmgr.init()
        except Exception, e:
            logger.warning("%s\n%s(%s)"
                           % (traceback.format_exc(), type(e), e))
            utils.message_dialog(utils.utf8(e), gtk.MESSAGE_WARNING)
        gui.get_view().update()
        gtk.gdk.threads_leave()

    gobject.idle_add(_post_loop)

    gui.show()
    gtk.threads_enter()
    gtk.main()
    active_view = gui.get_view()
    if active_view:
        active_view.cancel_threads()
    gtk.threads_leave()

Example 36

Project: headphones
Source File: cuesplit.py
View license
def split(albumpath):
    global CUE_META
    os.chdir(albumpath)
    base_dir = Directory(os.getcwd())

    cue = None
    wave = None

    # determining correct cue file
    # if perfect match found
    for _cue in base_dir.filter('CueFile'):
        for _wave in base_dir.filter('WaveFile'):
            if _cue.header['file'] == _wave.name:
                logger.info('CUE Sheet found: %s', _cue.name)
                logger.info('Music file found: %s', _wave.name)
                cue = _cue
                wave = _wave
    # if no perfect match found then try without extensions
    if not cue and not wave:
        logger.info('No match for music files, trying to match without extensions...')
        for _cue in base_dir.filter('CueFile'):
            for _wave in base_dir.filter('WaveFile'):
                if ''.join(os.path.splitext(_cue.header['file'])[:-1]) == _wave.name_name:
                    logger.info('Possible CUE Sheet found: %s', _cue.name)
                    logger.info('CUE Sheet refers music file: %s', _cue.header['file'])
                    logger.info('Possible Music file found: %s', _wave.name)
                    cue = _cue
                    wave = _wave
                    cue.header['file'] = wave.name
    # if still no match then raise an exception
    if not cue and not wave:
        raise ValueError('No music file match found!')

    # Split with xld or shntool
    splitter = 'shntool'
    xldprofile = None

    # use xld profile to split cue
    if headphones.CONFIG.ENCODER == 'xld' and headphones.CONFIG.MUSIC_ENCODER and headphones.CONFIG.XLDPROFILE:
        import getXldProfile
        xldprofile, xldformat, _ = getXldProfile.getXldProfile(headphones.CONFIG.XLDPROFILE)
        if not xldformat:
            raise ValueError(
                'Details for xld profile "%s" not found, cannot split cue' % (xldprofile))
        else:
            if headphones.CONFIG.ENCODERFOLDER:
                splitter = os.path.join(headphones.CONFIG.ENCODERFOLDER, 'xld')
            else:
                splitter = 'xld'
    # use standard xld command to split cue
    elif sys.platform == 'darwin':
        splitter = 'xld'
        if not check_splitter(splitter):
            splitter = 'shntool'

    if splitter == 'shntool' and not check_splitter(splitter):
        raise ValueError('Command not found, ensure shntool or xld installed')

    # Determine if file can be split
    if wave.name_ext not in WAVE_FILE_TYPE_BY_EXTENSION.keys():
        raise ValueError('Cannot split, audio file has unsupported extension')

    # Split with xld
    if 'xld' in splitter:
        cmd = [splitter]
        cmd.extend([wave.name])
        cmd.extend(['-c'])
        cmd.extend([cue.name])
        if xldprofile:
            cmd.extend(['--profile'])
            cmd.extend([xldprofile])
        else:
            cmd.extend(['-f'])
            cmd.extend(['flac'])
        cmd.extend(['-o'])
        cmd.extend([base_dir.path])
        split = split_baby(wave.name, cmd)
    else:

        # Split with shntool

        # generate temporary metafile describing the cue
        with open(ALBUM_META_FILE_NAME, mode='w') as meta_file:
            meta_file.write(cue.get_meta())
        base_dir.content.append(MetaFile(os.path.abspath(ALBUM_META_FILE_NAME)))
        # check metafile for completeness
        if not base_dir.filter('MetaFile'):
            raise ValueError('Cue Meta file {0} missing!'.format(ALBUM_META_FILE_NAME))
        else:
            CUE_META = base_dir.filter('MetaFile')[0]

        with open(SPLIT_FILE_NAME, mode='w') as split_file:
            split_file.write(cue.breakpoints())

        if headphones.CONFIG.CUE_SPLIT_SHNTOOL_PATH:
            cmd = [os.path.join(headphones.CONFIG.CUE_SPLIT_SHNTOOL_PATH, 'shntool')]
        else:
            cmd = ['shntool']

        cmd.extend(['split'])
        cmd.extend(['-f'])
        cmd.extend([SPLIT_FILE_NAME])
        cmd.extend(['-o'])
        cmd.extend([wave.name_ext.lstrip('.')])
        cmd.extend([wave.name])
        split = split_baby(wave.name, cmd)
        os.remove(SPLIT_FILE_NAME)
        base_dir.update()

        # tag FLAC files
        if split and CUE_META.count_tracks() == len(base_dir.tracks(ext='.flac', split=True)):
            for t in base_dir.tracks(ext='.flac', split=True):
                logger.info('Tagging %s...', t.name)
                t.tag()

        # rename files
        if split and CUE_META.count_tracks() == len(base_dir.tracks(ext=wave.name_ext, split=True)):
            for t in base_dir.tracks(ext=wave.name_ext, split=True):
                if t.name != t.filename():
                    logger.info('Renaming %s to %s...', t.name, t.filename())
                    os.rename(t.name, t.filename())

        os.remove(ALBUM_META_FILE_NAME)

    if not split:
        raise ValueError('Failed to split, check logs')
    else:
        # Rename original file
        os.rename(wave.name, wave.name + '.original')
        return True

Example 37

View license
    def __init__(self, parent=None, signalManager=None):
        OWWidget.__init__(self, parent, signalManager, 'MultiData Python Script')
        
        self.inputs = [("in_data", ExampleTable, self.setExampleTable, Default + Multiple),
                       ("in_distance", orange.SymMatrix, self.setDistanceMatrix),
                       ("in_network", Orange.network.Graph, self.setNetwork),
                       ("in_learners", orange.Learner, self.setLearner, Default + Multiple),
                       ("in_classifiers", orange.Classifier, self.setClassifier, Default + Multiple),
                       ("in_misc", object, self.setMisc, Default + Multiple)]
        self.outputs = [("out_data", ExampleTable), 
                        ("out_distance", orange.SymMatrix), 
                        ("out_network", Orange.network.Graph), 
                        ("out_learner", orange.Learner), 
                        ("out_classifier", orange.Classifier, Dynamic),
                        ("out_test_results", Orange.evaluation.testing.ExperimentResults),
                        ("out_misc", object)]
        
        self.in_data = []
        self.in_data_dict = {}   # TODO: switch to weakref?
        self.in_learners = []
        self.in_learner_dict = {}
        self.in_classifiers = []
        self.in_classifier_dict = {}
        self.in_misc = []
        self.in_misc_dict = {}

        self.in_network = None
        self.in_distance = None
        
        self.codeFile = ''
        self.libraryListSource = [Script("Hello world", "print 'Hello world'\n")]
        self.currentScriptIndex = 0
        self.splitterState = None
        self.loadSettings()
        
        for s in self.libraryListSource:
            s.flags = 0
        
        self._cachedDocuments = {}
        
        self.infoBox = OWGUI.widgetBox(self.controlArea, 'Info')
        label = OWGUI.label(self.infoBox, self, "<p>Execute python script.</p><p>Input variables:<ul><li> " + \
                    "<li>".join(t[0] for t in self.inputs) + "</ul></p><p>Output variables:<ul><li>" + \
                    "<li>".join(t[0] for t in self.outputs) + "</ul></p>")
        self.libraryList = PyListModel([], self, flags=Qt.ItemIsSelectable | Qt.ItemIsEnabled | Qt.ItemIsEditable)
#        self.libraryList.append(Script("Hello world", "print 'Hello world'\n"))
        self.libraryList.wrap(self.libraryListSource)
        
        self.controlBox = OWGUI.widgetBox(self.controlArea, 'Library')
        self.controlBox.layout().setSpacing(1)
        self.libraryView = QListView()
        self.libraryView.setEditTriggers(QListView.DoubleClicked | QListView.EditKeyPressed)
        self.libraryView.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Preferred)
        self.libraryView.setItemDelegate(ScriptItemDelegate(self))
        self.libraryView.setModel(self.libraryList)
        self.connect(self.libraryView.selectionModel(), SIGNAL("selectionChanged(QItemSelection, QItemSelection)"), self.onSelectedScriptChanged)
        self.controlBox.layout().addWidget(self.libraryView)
        w = ModelActionsWidget()
        
        self.addNewScriptAction = action = QAction("+", self)
        action.pyqtConfigure(toolTip="Add a new script to the library")
        self.connect(action, SIGNAL("triggered()"), self.onAddScript)
        new_empty = QAction("Add a new empty script", action)
        new_from_file = QAction("Add a new script from a file", action)
        self.connect(new_empty, SIGNAL("triggered()"), self.onAddScript)
        self.connect(new_from_file, SIGNAL("triggered()"), self.onAddScriptFromFile)
        menu = QMenu(w)
        menu.addAction(new_empty)
        menu.addAction(new_from_file)
        
#        action.setMenu(menu)
        button = w.addAction(action)
        
        self.removeAction = action = QAction("-", self)
        action.pyqtConfigure(toolTip="Remove script from library")
        self.connect(action, SIGNAL("triggered()"), self.onRemoveScript)
        w.addAction(action)
        
        action = QAction("Update", self)
        action.pyqtConfigure(toolTip="Save changes in the editor to library")
        action.setShortcut(QKeySequence(QKeySequence.Save))
        self.connect(action, SIGNAL("triggered()"), self.commitChangesToLibrary)
        b = w.addAction(action)
#        b.setSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.Fixed)
        
        action = QAction("More", self)
        action.pyqtConfigure(toolTip="More actions") #, icon=self.style().standardIcon(QStyle.SP_ToolBarHorizontalExtensionButton))
        
        self.openScriptFromFileAction = new_from_file = QAction("Import a script from a file", self)
        self.saveScriptToFile = save_to_file = QAction("Save selected script to a file", self)
        save_to_file.setShortcut(QKeySequence(QKeySequence.SaveAs))
        
        self.connect(new_from_file, SIGNAL("triggered()"), self.onAddScriptFromFile)
        self.connect(save_to_file, SIGNAL("triggered()"), self.saveScript)
        
        menu = QMenu(w)
        menu.addAction(new_from_file)
        menu.addAction(save_to_file)
        action.setMenu(menu)
        b = w.addAction(action)
        b.setPopupMode(QToolButton.InstantPopup) 
        ## TODO: set the space for the indicator
        
        w.layout().setSpacing(1)
        
        self.controlBox.layout().addWidget(w)
                    
#        OWGUI.button(self.controlBox, self, "Open...", callback=self.openScript)
#        OWGUI.button(self.controlBox, self, "Save...", callback=self.saveScript)
        
        self.runBox = OWGUI.widgetBox(self.controlArea, 'Run')
        OWGUI.button(self.runBox, self, "Execute", callback=self.execute)
        
        self.splitCanvas = QSplitter(Qt.Vertical, self.mainArea)
        self.mainArea.layout().addWidget(self.splitCanvas)
        
        self.defaultFont = defaultFont = "Monaco" if sys.platform == "darwin" else "Courier"
        self.textBox = OWGUI.widgetBox(self, 'MultiData Python script')
        self.splitCanvas.addWidget(self.textBox)
        self.text = PythonScriptEditor(self)
        self.textBox.layout().addWidget(self.text)
        
        self.textBox.setAlignment(Qt.AlignVCenter)
        self.text.setTabStopWidth(4)
        
        self.connect(self.text, SIGNAL("modificationChanged(bool)"), self.onModificationChanged)
        
        self.saveAction = action = QAction("&Save", self.text)
        action.pyqtConfigure(toolTip="Save script to file")
        action.setShortcut(QKeySequence(QKeySequence.Save))
        action.setShortcutContext(Qt.WidgetWithChildrenShortcut)
        self.connect(action, SIGNAL("triggered()"), self.saveScript)
        
        self.consoleBox = OWGUI.widgetBox(self, 'Console')
        self.splitCanvas.addWidget(self.consoleBox)
        self.console = PythonConsole(self.__dict__, self)
        self.consoleBox.layout().addWidget(self.console)
        self.console.document().setDefaultFont(QFont(defaultFont))
        self.consoleBox.setAlignment(Qt.AlignBottom)
        self.console.setTabStopWidth(4)
        
        self.openScript(self.codeFile)
        try:
            self.libraryView.selectionModel().select(self.libraryList.index(self.currentScriptIndex), QItemSelectionModel.ClearAndSelect)
        except Exception:
            pass
        self.splitCanvas.setSizes([2, 1])
        if self.splitterState is not None:
            self.splitCanvas.restoreState(QByteArray(self.splitterState))
        
        self.connect(self.splitCanvas, SIGNAL("splitterMoved(int, int)"), lambda pos, ind: setattr(self, "splitterState", str(self.splitCanvas.saveState())))
        self.controlArea.layout().addStretch(1)
        self.resize(800,600)

Example 38

Project: pystan
Source File: model.py
View license
    def __init__(self, file=None, charset='utf-8', model_name="anon_model",
                 model_code=None, stanc_ret=None, boost_lib=None,
                 eigen_lib=None, verbose=False, obfuscate_model_name=True,
                 extra_compile_args=None):

        if stanc_ret is None:
            stanc_ret = pystan.api.stanc(file=file,
                                         charset=charset,
                                         model_code=model_code,
                                         model_name=model_name,
                                         verbose=verbose,
                                         obfuscate_model_name=obfuscate_model_name)

        if not isinstance(stanc_ret, dict):
            raise ValueError("stanc_ret must be an object returned by stanc.")
        stanc_ret_keys = {'status', 'model_code', 'model_cppname',
                          'cppcode', 'model_name'}
        if not all(n in stanc_ret_keys for n in stanc_ret):
            raise ValueError("stanc_ret lacks one or more of the keys: "
                             "{}".format(str(stanc_ret_keys)))
        elif stanc_ret['status'] != 0:  # success == 0
            raise ValueError("stanc_ret is not a successfully returned "
                             "dictionary from stanc.")
        self.model_cppname = stanc_ret['model_cppname']
        self.model_name = stanc_ret['model_name']
        self.model_code = stanc_ret['model_code']
        self.model_cppcode = stanc_ret['cppcode']

        msg = "COMPILING THE C++ CODE FOR MODEL {} NOW."
        logger.info(msg.format(self.model_name))
        if verbose:
            msg = "OS: {}, Python: {}, Cython {}".format(sys.platform,
                                                         sys.version,
                                                         Cython.__version__)
            logger.info(msg)
        if boost_lib is not None:
            # FIXME: allow boost_lib, eigen_lib to be specified
            raise NotImplementedError
        if eigen_lib is not None:
            raise NotImplementedError

        # module_name needs to be unique so that each model instance has its own module
        nonce = abs(hash((self.model_name, time.time())))
        self.module_name = 'stanfit4{}_{}'.format(self.model_name, nonce)
        lib_dir = tempfile.mkdtemp()
        pystan_dir = os.path.dirname(__file__)
        include_dirs = [
            lib_dir,
            pystan_dir,
            os.path.join(pystan_dir, "stan", "src"),
            os.path.join(pystan_dir, "stan", "lib", "stan_math_2.12.0"),
            os.path.join(pystan_dir, "stan", "lib", "stan_math_2.12.0", "lib", "eigen_3.2.9"),
            os.path.join(pystan_dir, "stan", "lib", "stan_math_2.12.0", "lib", "boost_1.60.0"),
            os.path.join(pystan_dir, "stan", "lib", "stan_math_2.12.0", "lib", "cvodes_2.8.2", "include"),
            np.get_include(),
        ]

        model_cpp_file = os.path.join(lib_dir, self.model_cppname + '.hpp')
        with io.open(model_cpp_file, 'w', encoding='utf-8') as outfile:
            outfile.write(self.model_cppcode)

        pyx_file = os.path.join(lib_dir, self.module_name + '.pyx')
        pyx_template_file = os.path.join(pystan_dir, 'stanfit4model.pyx')
        with io.open(pyx_template_file, 'r', encoding='utf-8') as infile:
            s = infile.read()
            template = string.Template(s)
        with io.open(pyx_file, 'w', encoding='utf-8') as outfile:
            s = template.safe_substitute(model_cppname=self.model_cppname)
            outfile.write(s)

        stan_macros = [
            ('BOOST_RESULT_OF_USE_TR1', None),
            ('BOOST_NO_DECLTYPE', None),
            ('BOOST_DISABLE_ASSERTS', None),
        ]
        # compile stan models with optimization (-O2)
        # (stanc is compiled without optimization (-O0) currently, see #33)
        if extra_compile_args is None:
            extra_compile_args = [
                '-O2',
                '-ftemplate-depth-256',
                '-Wno-unused-function',
                '-Wno-uninitialized',
            ]
            if platform.platform().startswith('Win'):
                extra_compile_args = ['/EHsc', '-DBOOST_DATE_TIME_NO_LIB']

        distutils.log.set_verbosity(verbose)
        extension = Extension(name=self.module_name,
                              language="c++",
                              sources=[pyx_file],
                              define_macros=stan_macros,
                              include_dirs=include_dirs,
                              extra_compile_args=extra_compile_args)

        cython_include_dirs = ['.', pystan_dir]
        build_extension = _get_build_extension()
        build_extension.extensions = cythonize([extension],
                                               include_path=cython_include_dirs,
                                               quiet=not verbose)
        build_extension.build_temp = os.path.dirname(pyx_file)
        build_extension.build_lib = lib_dir

        redirect_stderr = not verbose and pystan.misc._has_fileno(sys.stderr)
        if redirect_stderr:
            # silence stderr for compilation
            orig_stderr = pystan.misc._redirect_stderr()

        try:
            build_extension.run()
        finally:
            if redirect_stderr:
                # restore stderr
                os.dup2(orig_stderr, sys.stderr.fileno())

        self.module = load_module(self.module_name, lib_dir)
        self.module_filename = os.path.basename(self.module.__file__)
        # once the module is in memory, we no longer need the file on disk
        # but we do need a copy of the file for pickling and the module name
        with io.open(os.path.join(lib_dir, self.module_filename), 'rb') as f:
            self.module_bytes = f.read()
        shutil.rmtree(lib_dir, ignore_errors=True)
        self.fit_class = getattr(self.module, "StanFit4Model")

Example 39

Project: pyload
Source File: systemCheck.py
View license
def main():
    print("#####   System Information   #####\n")
    print("Platform:", sys.platform)
    print("Operating System:", os.name)
    print("Python:", sys.version.replace("\n", "")+ "\n")

    try:
        import pycurl
        print("pycurl:", pycurl.version)
    except:
        print("pycurl:", "missing")

    try:
        import Crypto
        print("py-crypto:", Crypto.__version__)
    except:
        print("py-crypto:", "missing")


    try:
        import OpenSSL
        print("OpenSSL:", OpenSSL.version.__version__)
    except:
        print("OpenSSL:", "missing")

    try:
        import Image
        print("image libary:", Image.VERSION)
    except:
        print("image libary:", "missing")

    try:
        import PyQt4.QtCore
        print("pyqt:", PyQt4.QtCore.PYQT_VERSION_STR)
    except:
        print("pyqt:", "missing")

    print("\n\n#####   System Status   #####")
    print("\n##  pyLoadCore  ##")

    core_err = []
    core_info = []

    if sys.version_info > (2, 8):
        core_err.append("Your python version is to new, Please use Python 2.6/2.7")

    if sys.version_info < (2, 5):
        core_err.append("Your python version is to old, Please use at least Python 2.5")

    try:
        import pycurl
    except:
        core_err.append("Please install py-curl to use pyLoad.")


    try:
        from pycurl import AUTOREFERER
    except:
        core_err.append("Your py-curl version is to old, please upgrade!")

    try:
        import Image
    except:
        core_err.append("Please install py-imaging/pil to use Hoster, which uses captchas.")

    pipe = subprocess.PIPE
    try:
        p = subprocess.call(["tesseract"], stdout=pipe, stderr=pipe)
    except:
        core_err.append("Please install tesseract to use Hoster, which uses captchas.")

    try:
        import OpenSSL
    except:
        core_info.append("Install OpenSSL if you want to create a secure connection to the core.")

    if core_err:
        print("The system check has detected some errors:\n")
        for err in core_err:
            print(err)
    else:
        print("No Problems detected, pyLoadCore should work fine.")

    if core_info:
        print("\nPossible improvements for pyload:\n")
        for line in core_info:
            print(line)


    print("\n##  pyLoadGui  ##")

    gui_err = []

    try:
        import PyQt4
    except:
        gui_err.append("GUI won't work without pyqt4 !!")

    if gui_err:
        print("The system check has detected some errors:\n")
        for err in gui_err:
            print(err)
    else:
        print("No Problems detected, pyLoadGui should work fine.")


    print("\n##  Webinterface  ##")

    web_err = []
    web_info = []

    try:
        import flup
    except:
        web_info.append("Install Flup to use FastCGI or optional webservers.")


    if web_err:
        print("The system check has detected some errors:\n")
        for err in web_err:
            print(err)
    else:
        print("No Problems detected, Webinterface should work fine.")

    if web_info:
        print("\nPossible improvements for webinterface:\n")
        for line in web_info:
            print(line)

Example 40

Project: reviewboard
Source File: checks.py
View license
def check_updates_required():
    """Check if there are manual updates required.

    Sometimes, especially in developer installs, some things need to be tweaked
    by hand before Review Board can be used on this server.
    """
    global _install_fine

    updates_required = []

    if not _install_fine:
        site_dir = os.path.dirname(settings.HTDOCS_ROOT)
        devel_install = (os.path.exists(os.path.join(settings.LOCAL_ROOT,
                                                     'manage.py')))
        siteconfig = None

        # Check if we can access a SiteConfiguration. There should always
        # be one, unless the user has erased stuff by hand.
        #
        # This also checks for any sort of errors in talking to the database.
        # This could be due to the database being down, or corrupt, or
        # tables locked, or an empty database, or other cases. We want to
        # catch this before getting the point where plain 500 Internal Server
        # Errors appear.
        try:
            siteconfig = SiteConfiguration.objects.get_current()
        except (DatabaseError, SiteConfiguration.DoesNotExist) as e:
            updates_required.append((
                'admin/manual-updates/database-error.html', {
                    'error': e,
                }
            ))

        # Check if the version running matches the last stored version.
        # Only do this for non-debug installs, as it's really annoying on
        # a developer install.:
        cur_version = get_version_string()

        if siteconfig and siteconfig.version != cur_version:
            updates_required.append((
                'admin/manual-updates/version-mismatch.html', {
                    'current_version': cur_version,
                    'stored_version': siteconfig.version,
                    'site_dir': site_dir,
                    'devel_install': devel_install,
                }
            ))

        # Check if the site has moved and the old media directory no longer
        # exists.
        if siteconfig and not os.path.exists(settings.STATIC_ROOT):
            new_static_root = os.path.join(settings.HTDOCS_ROOT, 'static')

            if os.path.exists(new_static_root):
                siteconfig.set('site_static_root', new_static_root)
                settings.STATIC_ROOT = new_static_root

        # Check if the site has moved and the old media directory no longer
        # exists.
        if siteconfig and not os.path.exists(settings.MEDIA_ROOT):
            new_media_root = os.path.join(settings.HTDOCS_ROOT, 'media')

            if os.path.exists(new_media_root):
                siteconfig.set('site_media_root', new_media_root)
                settings.MEDIA_ROOT = new_media_root

        # Check if the user has any pending static media configuration
        # changes they need to make.
        if siteconfig and 'manual-updates' in siteconfig.settings:
            stored_updates = siteconfig.settings['manual-updates']

            if not stored_updates.get('static-media', False):
                updates_required.append((
                    'admin/manual-updates/server-static-config.html', {
                        'STATIC_ROOT': settings.STATIC_ROOT,
                        'SITE_ROOT': settings.SITE_ROOT,
                        'SITE_DIR': settings.LOCAL_ROOT,
                    }
                ))

        # Check if there's a media/uploaded/images directory. If not, this is
        # either a new install or is using the old-style media setup and needs
        # to be manually upgraded.
        uploaded_dir = os.path.join(settings.MEDIA_ROOT, "uploaded")

        if not os.path.isdir(uploaded_dir) or \
           not os.path.isdir(os.path.join(uploaded_dir, "images")):
            updates_required.append((
                "admin/manual-updates/media-upload-dir.html", {
                    'MEDIA_ROOT': settings.MEDIA_ROOT
                }
            ))

        try:
            username = getpass.getuser()
        except ImportError:
            # This will happen if running on Windows (which doesn't have
            # the pwd module) and if %LOGNAME%, %USER%, %LNAME% and
            # %USERNAME% are all undefined.
            username = "<server username>"

        # Check if the data directory (should be $HOME) is writable by us.
        data_dir = os.environ.get('HOME', '')

        if (not data_dir or
                not os.path.isdir(data_dir) or
                not os.access(data_dir, os.W_OK)):
            try:
                username = getpass.getuser()
            except ImportError:
                # This will happen if running on Windows (which doesn't have
                # the pwd module) and if %LOGNAME%, %USER%, %LNAME% and
                # %USERNAME% are all undefined.
                username = "<server username>"

            updates_required.append((
                'admin/manual-updates/data-dir.html', {
                    'data_dir': data_dir,
                    'writable': os.access(data_dir, os.W_OK),
                    'server_user': username,
                    'expected_data_dir': os.path.join(site_dir, 'data'),
                }
            ))

        # Check if the the legacy htdocs and modern static extension
        # directories exist and are writable by us.
        ext_roots = [settings.MEDIA_ROOT]

        if not settings.DEBUG:
            ext_roots.append(settings.STATIC_ROOT)

        for root in ext_roots:
            ext_dir = os.path.join(root, 'ext')

            if not os.path.isdir(ext_dir) or not os.access(ext_dir, os.W_OK):
                updates_required.append((
                    'admin/manual-updates/ext-dir.html', {
                        'ext_dir': ext_dir,
                        'writable': os.access(ext_dir, os.W_OK),
                        'server_user': username,
                    }
                ))

        if not is_exe_in_path('patch'):
            if sys.platform == 'win32':
                binaryname = 'patch.exe'
            else:
                binaryname = 'patch'

            updates_required.append((
                "admin/manual-updates/install-patch.html", {
                    'platform': sys.platform,
                    'binaryname': binaryname,
                    'search_path': os.getenv('PATH'),
                }
            ))

        #
        # NOTE: Add new checks above this.
        #

        _install_fine = not updates_required

    return updates_required

Example 41

Project: deep_nets_iclr04
Source File: data_processing.py
View license
    def __init__(self, type='FLIC'):
        if type == 'FLIC':
            ip_dir = 'cropped-images'
        elif type == 'SHOULDER':
            ip_dir = 'full-images'
        
        self.ptno_part = {0:'face', 1:'lsho', 2:'lelb', 3:'lwri', 4:'rsho', 5:'relb', 6:'rwri'}
        self.part_pos = dict()
        for pt_no, part in self.ptno_part.items():
            matname = self.ptno_part[pt_no] + '_pos.mat'
            matkey =  self.ptno_part[pt_no] + 'Pos'
            self.part_pos[part] = io.loadmat('unprocessed_data/'+ip_dir+'/' + matname)[matkey]
        
        self.names = io.loadmat('unprocessed_data/'+ip_dir+'/names.mat')['nameList'][0]
        self.is_train = io.loadmat('unprocessed_data/'+ip_dir+'/istrain.mat')['train_set'][0]
        self.scale_and_crop_coords = io.loadmat('unprocessed_data/'+ip_dir+'/scale_and_crop_coords.mat')['scale_and_crop_coords'][0]
                        
        self.X = defaultdict(list)
        self.Y = defaultdict(list)
        self.index = defaultdict(list)
        
        
        # which file is train, test, valid
        # no validation
        train_valid_sep = 10000
        X_names = defaultdict(list)      
        for idx in range(0, len(self.names)):
            if self.is_train[idx] == 1 and len(X_names['train']) < train_valid_sep:
                X_names['train'].append(self.names[idx])
                self.index['train'].append(idx)
            elif self.is_train[idx] == 1 and len(X_names['train']) >= train_valid_sep:   
                X_names['valid'].append(self.names[idx])
                self.index['valid'].append(idx)
            else:
                self.index['test'].append(idx)
                X_names['test'].append(self.names[idx])
        
        test_indices_subset  = [170, 171, 172, 173, 174, 175, 176, 376, 377, 378, 379, 380, 381, 384, 386, 389, 390, 391, 392, 393, 394, 398, 400, 401, 402, 404, 405, 407, 408, 417, 699, 700, 701, 702, 703, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 733, 734, 735, 752, 754, 755, 756, 757, 896, 897, 898, 899, 900, 903, 904, 905, 906, 907, 918, 919, 920, 961, 963, 964, 965, 966, 967, 981, 982, 983, 1526, 1527, 1528, 1529, 1533, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1561, 1576, 1577, 1609, 1610, 1611, 1612, 1613, 1614, 1626, 1627, 1777, 1778, 1779, 1780, 1781, 1783, 1785, 1786, 1787, 1788, 1789, 1790, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1815, 1856, 1857, 1858, 1859, 1860, 1885, 2324, 2325, 2327, 2328, 2329, 2330, 2334, 2335, 2336, 2337, 2338, 2339, 2340, 2343, 2344, 2345, 2346, 2347, 2348, 2349, 2589, 2590, 2591, 2592, 2593, 2594, 2595, 2596, 2597, 2598, 2599, 2600, 2601, 2602, 2603, 2604, 2605, 2606, 2607, 2608, 2609, 2610, 2611, 2612, 2613, 2614, 2615, 2616, 2621, 2622, 2623, 2624, 2625, 2626, 2627, 2628, 2629, 2630, 2631, 2632, 2633, 2634, 2922, 2923, 2924, 2925, 2926, 2927, 2928, 2929, 2930, 2931, 2932, 2933, 2934, 2950, 2952, 2953, 2959, 2960, 2961, 2962, 2963, 2964, 2965, 2969, 2970, 2971, 2972, 2973, 3244, 3245, 3246, 3247, 3248, 3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259, 3260, 3261, 3262, 3263, 3264, 3265, 3266, 3267, 3268, 3269, 3270, 3271, 3272, 3273, 3277, 3278, 3279, 3280, 3285, 3286, 3287, 3288, 3300, 3305, 3341, 3344, 3345, 3389, 3390, 3391, 3392, 3393, 3395, 3397, 3398, 3592, 3593, 3594, 3595, 3596, 3597, 3625, 3768, 3769, 3770, 3771, 3772, 3784, 3785, 3786, 3787, 3788, 3789, 3790, 3791, 3845, 3846, 3847, 3848, 3849, 3850, 3884, 3961, 3962, 4341, 4342, 4343, 4344, 4345, 4346, 4347, 4348, 4349, 4376, 4382, 4390, 4395, 4396, 4397, 4406, 4407, 4584, 4585, 4586, 4787, 4790, 4792, 4793, 4796, 4812, 4813, 4814, 4815, 4816, 4817, 4818, 4967, 4968, 4969, 4981, 4982, 4995, 4996, 4997, 4998, 4999, 5000, 5001, 5002, 5003]
        test_indices_subset[:] = [x - 1 for x in test_indices_subset]
        X_names['test'] = [self.names[i] for i in test_indices_subset]
        self.index['test'] = test_indices_subset
        print test_indices_subset
        
        #load x and y in memory
        for kind in ['train', 'valid', 'test']:
            self.X[kind] = [None] * len(X_names[kind])
            self.Y[kind] = [None] * len(X_names[kind])
         
        for kind in ['train', 'valid', 'test']:
            for idx, name in enumerate(X_names[kind]): 
                im = plt.imread('unprocessed_data/'+str(name[0]))
                if socket.gethostname() != 'vajra' and sys.platform != 'darwin':
                    im = misc.imrotate(im, 180.0)
                    im = numpy.fliplr(im)               
                self.X[kind][idx] = im
                
                self.Y[kind][idx] = []
                for pt_no, part in self.ptno_part.items():
                    self.Y[kind][idx].append((self.part_pos[part][0][self.index[kind][idx]], self.part_pos[part][1][self.index[kind][idx]]))
                if idx % 100 == 0:
                    print '{0:d} / {1:d}'.format(idx, len(self.names))
        
        for kind in ['train', 'valid', 'test']:
            print 'no of {0:s}: {1:d}'.format(kind, len(self.X[kind]))
        
        if type == 'SHOULDER':
            self.scale_and_crop_images()
        
        #flip train and valid
        for kind in ['train', 'valid']:
            for idx in range(0, len(self.X[kind])):
                flipped = numpy.fliplr(self.X[kind][idx])
                flip_name = '.'.join(X_names[kind][idx][0].split('.')[0:-1])+'-flipped.jpg'
                self.X[kind].append(flipped)
                X_names[kind].append([flip_name])
                flip_y = [(flipped.shape[1] - self.Y[kind][idx][pt_no][0], self.Y[kind][idx][pt_no][1]) for pt_no in self.ptno_part.keys()]
                remapped = [flip_y[j0] for j0 in [0, 4, 5, 6, 1, 2, 3]]
                flip_y = remapped
                self.Y[kind].append(flip_y)                
                
        for kind in ['train', 'valid', 'test']:
            print 'no of {0:s}: {1:d}'.format(kind, len(self.X[kind]))
          
        #------- Write it all down to disk -------#
        target_imgshape = (240, 320, 3)
        scalefactor = float(target_imgshape[0])/self.X['train'][0].shape[0] 
        print 'Image shape: '
        print self.X['train'][0].shape
        print 'Scalefactor is: ' + str(scalefactor)
        for kind in ['train', 'valid', 'test']:
            jnt_pos_2d = dict()
            print 'writing images for '+ kind
            if not os.path.exists('processed_data/'+ kind):
                os.makedirs('processed_data/' + kind)
            p_imname_tmpl = 'processed_data/' + kind + '/{0:s}.png'    
            for idx in range(0, len(self.X[kind])):  
                if idx % 100 == 0:
                    print '{0:d} / {1:d}'.format(idx, len(self.X[kind]))  
                scaled_im = misc.imresize(self.X[kind][idx], target_imgshape)  
                imname = X_names[kind][idx]
                imname = imname[0].split('/')[-1].split('.')[0]
                imname = p_imname_tmpl.format(imname)
                misc.imsave(imname, scaled_im)
                
                for pt_no, part in self.ptno_part.items():
                    x = self.Y[kind][idx][pt_no][0] * scalefactor 
                    y = self.Y[kind][idx][pt_no][1] * scalefactor 
                    if imname not in jnt_pos_2d.keys():
                        jnt_pos_2d[imname] = [(x, y)]
                    else:
                        jnt_pos_2d[imname].append((x, y)) 
                """
                plt.imshow(scaled_im)
                xs = [jnt[0] for jnt in jnt_pos_2d[imname]]
                ys = [jnt[1] for jnt in jnt_pos_2d[imname]]
                plt.scatter(xs, ys)
                plt.show()
                """
                 
                    
            tools.pickle_dump(jnt_pos_2d, 'processed_data/' + kind + '/jnt_pos_2d.pkl')
            al.write(jnt_pos_2d, 'processed_data/' + kind + '/jnt_pos_2d.al')        

Example 42

Project: tahoe-lafs
Source File: fixups.py
View license
def initialize():
    global done
    import sys
    if sys.platform != "win32" or done:
        return True
    done = True

    import codecs, re
    from ctypes import WINFUNCTYPE, WinError, windll, POINTER, byref, c_int, get_last_error
    from ctypes.wintypes import BOOL, HANDLE, DWORD, UINT, LPWSTR, LPCWSTR, LPVOID

    from allmydata.util import log
    from allmydata.util.encodingutil import canonical_encoding

    # <https://msdn.microsoft.com/en-us/library/ms680621%28VS.85%29.aspx>
    SetErrorMode = WINFUNCTYPE(
        UINT,  UINT,
        use_last_error=True
    )(("SetErrorMode", windll.kernel32))

    SEM_FAILCRITICALERRORS = 0x0001
    SEM_NOOPENFILEERRORBOX = 0x8000

    SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOOPENFILEERRORBOX)

    original_stderr = sys.stderr

    # If any exception occurs in this code, we'll probably try to print it on stderr,
    # which makes for frustrating debugging if stderr is directed to our wrapper.
    # So be paranoid about catching errors and reporting them to original_stderr,
    # so that we can at least see them.
    def _complain(message):
        print >>original_stderr, isinstance(message, str) and message or repr(message)
        log.msg(message, level=log.WEIRD)

    # Work around <http://bugs.python.org/issue6058>.
    codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)

    # Make Unicode console output work independently of the current code page.
    # This also fixes <http://bugs.python.org/issue1602>.
    # Credit to Michael Kaplan <https://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx>
    # and TZOmegaTZIOY
    # <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
    try:
        # <https://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
        # HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
        # returns INVALID_HANDLE_VALUE, NULL, or a valid handle
        #
        # <https://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
        # DWORD WINAPI GetFileType(DWORD hFile);
        #
        # <https://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
        # BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode);

        GetStdHandle = WINFUNCTYPE(
            HANDLE,  DWORD,
            use_last_error=True
        )(("GetStdHandle", windll.kernel32))

        STD_OUTPUT_HANDLE = DWORD(-11)
        STD_ERROR_HANDLE  = DWORD(-12)

        GetFileType = WINFUNCTYPE(
            DWORD,  DWORD,
            use_last_error=True
        )(("GetFileType", windll.kernel32))

        FILE_TYPE_CHAR   = 0x0002
        FILE_TYPE_REMOTE = 0x8000

        GetConsoleMode = WINFUNCTYPE(
            BOOL,  HANDLE, POINTER(DWORD),
            use_last_error=True
        )(("GetConsoleMode", windll.kernel32))

        INVALID_HANDLE_VALUE = DWORD(-1).value

        def not_a_console(handle):
            if handle == INVALID_HANDLE_VALUE or handle is None:
                return True
            return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
                    or GetConsoleMode(handle, byref(DWORD())) == 0)

        old_stdout_fileno = None
        old_stderr_fileno = None
        if hasattr(sys.stdout, 'fileno'):
            old_stdout_fileno = sys.stdout.fileno()
        if hasattr(sys.stderr, 'fileno'):
            old_stderr_fileno = sys.stderr.fileno()

        STDOUT_FILENO = 1
        STDERR_FILENO = 2
        real_stdout = (old_stdout_fileno == STDOUT_FILENO)
        real_stderr = (old_stderr_fileno == STDERR_FILENO)

        if real_stdout:
            hStdout = GetStdHandle(STD_OUTPUT_HANDLE)
            if not_a_console(hStdout):
                real_stdout = False

        if real_stderr:
            hStderr = GetStdHandle(STD_ERROR_HANDLE)
            if not_a_console(hStderr):
                real_stderr = False

        if real_stdout or real_stderr:
            # <https://msdn.microsoft.com/en-us/library/windows/desktop/ms687401%28v=vs.85%29.aspx>
            # BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars,
            #                           LPDWORD lpCharsWritten, LPVOID lpReserved);

            WriteConsoleW = WINFUNCTYPE(
                BOOL,  HANDLE, LPWSTR, DWORD, POINTER(DWORD), LPVOID,
                use_last_error=True
            )(("WriteConsoleW", windll.kernel32))

            class UnicodeOutput:
                def __init__(self, hConsole, stream, fileno, name):
                    self._hConsole = hConsole
                    self._stream = stream
                    self._fileno = fileno
                    self.closed = False
                    self.softspace = False
                    self.mode = 'w'
                    self.encoding = 'utf-8'
                    self.name = name
                    if hasattr(stream, 'encoding') and canonical_encoding(stream.encoding) != 'utf-8':
                        log.msg("%s: %r had encoding %r, but we're going to write UTF-8 to it" %
                                (name, stream, stream.encoding), level=log.CURIOUS)
                    self.flush()

                def isatty(self):
                    return False
                def close(self):
                    # don't really close the handle, that would only cause problems
                    self.closed = True
                def fileno(self):
                    return self._fileno
                def flush(self):
                    if self._hConsole is None:
                        try:
                            self._stream.flush()
                        except Exception, e:
                            _complain("%s.flush: %r from %r" % (self.name, e, self._stream))
                            raise

                def write(self, text):
                    try:
                        if self._hConsole is None:
                            if isinstance(text, unicode):
                                text = text.encode('utf-8')
                            self._stream.write(text)
                        else:
                            if not isinstance(text, unicode):
                                text = str(text).decode('utf-8')
                            remaining = len(text)
                            while remaining > 0:
                                n = DWORD(0)
                                # There is a shorter-than-documented limitation on the length of the string
                                # passed to WriteConsoleW (see #1232).
                                retval = WriteConsoleW(self._hConsole, text, min(remaining, 10000), byref(n), None)
                                if retval == 0:
                                    raise IOError("WriteConsoleW failed with WinError: %s" % (WinError(get_last_error()),))
                                if n.value == 0:
                                    raise IOError("WriteConsoleW returned %r, n.value = 0" % (retval,))
                                remaining -= n.value
                                if remaining == 0: break
                                text = text[n.value:]
                    except Exception, e:
                        _complain("%s.write: %r" % (self.name, e))
                        raise

                def writelines(self, lines):
                    try:
                        for line in lines:
                            self.write(line)
                    except Exception, e:
                        _complain("%s.writelines: %r" % (self.name, e))
                        raise

            if real_stdout:
                sys.stdout = UnicodeOutput(hStdout, None, STDOUT_FILENO, '<Unicode console stdout>')
            else:
                sys.stdout = UnicodeOutput(None, sys.stdout, old_stdout_fileno, '<Unicode redirected stdout>')

            if real_stderr:
                sys.stderr = UnicodeOutput(hStderr, None, STDERR_FILENO, '<Unicode console stderr>')
            else:
                sys.stderr = UnicodeOutput(None, sys.stderr, old_stderr_fileno, '<Unicode redirected stderr>')
    except Exception, e:
        _complain("exception %r while fixing up sys.stdout and sys.stderr" % (e,))

    # This works around <http://bugs.python.org/issue2128>.

    # <https://msdn.microsoft.com/en-us/library/windows/desktop/ms683156%28v=vs.85%29.aspx>
    GetCommandLineW = WINFUNCTYPE(
        LPWSTR,
        use_last_error=True
    )(("GetCommandLineW", windll.kernel32))

    # <https://msdn.microsoft.com/en-us/library/windows/desktop/bb776391%28v=vs.85%29.aspx>
    CommandLineToArgvW = WINFUNCTYPE(
        POINTER(LPWSTR),  LPCWSTR, POINTER(c_int),
        use_last_error=True
    )(("CommandLineToArgvW", windll.shell32))

    argc = c_int(0)
    argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
    if argv_unicode is None:
        raise WinError(get_last_error())

    # Because of <http://bugs.python.org/issue8775> (and similar limitations in
    # twisted), the 'bin/tahoe' script cannot invoke us with the actual Unicode arguments.
    # Instead it "mangles" or escapes them using \x7F as an escape character, which we
    # unescape here.
    def unmangle(s):
        return re.sub(ur'\x7F[0-9a-fA-F]*\;', lambda m: unichr(int(m.group(0)[1:-1], 16)), s)

    try:
        argv = [unmangle(argv_unicode[i]).encode('utf-8') for i in xrange(0, argc.value)]
    except Exception, e:
        _complain("%s:  could not unmangle Unicode arguments.\n%r"
                  % (sys.argv[0], [argv_unicode[i] for i in xrange(0, argc.value)]))
        raise

    # Take only the suffix with the same number of arguments as sys.argv.
    # This accounts for anything that can cause initial arguments to be stripped,
    # for example, the Python interpreter or any options passed to it, or runner
    # scripts such as 'coverage run'. It works even if there are no such arguments,
    # as in the case of a frozen executable created by bb-freeze or similar.

    sys.argv = argv[-len(sys.argv):]
    if sys.argv[0].endswith('.pyscript'):
        sys.argv[0] = sys.argv[0][:-9]

Example 43

Project: youtube-dl
Source File: __init__.py
View license
def _real_main(argv=None):
    # Compatibility fixes for Windows
    if sys.platform == 'win32':
        # https://github.com/rg3/youtube-dl/issues/820
        codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None)

    workaround_optparse_bug9161()

    setproctitle('youtube-dl')

    parser, opts, args = parseOpts(argv)

    # Set user agent
    if opts.user_agent is not None:
        std_headers['User-Agent'] = opts.user_agent

    # Set referer
    if opts.referer is not None:
        std_headers['Referer'] = opts.referer

    # Custom HTTP headers
    if opts.headers is not None:
        for h in opts.headers:
            if ':' not in h:
                parser.error('wrong header formatting, it should be key:value, not "%s"' % h)
            key, value = h.split(':', 1)
            if opts.verbose:
                write_string('[debug] Adding header from command line option %s:%s\n' % (key, value))
            std_headers[key] = value

    # Dump user agent
    if opts.dump_user_agent:
        write_string(std_headers['User-Agent'] + '\n', out=sys.stdout)
        sys.exit(0)

    # Batch file verification
    batch_urls = []
    if opts.batchfile is not None:
        try:
            if opts.batchfile == '-':
                batchfd = sys.stdin
            else:
                batchfd = io.open(
                    compat_expanduser(opts.batchfile),
                    'r', encoding='utf-8', errors='ignore')
            batch_urls = read_batch_urls(batchfd)
            if opts.verbose:
                write_string('[debug] Batch file urls: ' + repr(batch_urls) + '\n')
        except IOError:
            sys.exit('ERROR: batch file could not be read')
    all_urls = batch_urls + args
    all_urls = [url.strip() for url in all_urls]
    _enc = preferredencoding()
    all_urls = [url.decode(_enc, 'ignore') if isinstance(url, bytes) else url for url in all_urls]

    if opts.list_extractors:
        for ie in list_extractors(opts.age_limit):
            write_string(ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie._WORKING else '') + '\n', out=sys.stdout)
            matchedUrls = [url for url in all_urls if ie.suitable(url)]
            for mu in matchedUrls:
                write_string('  ' + mu + '\n', out=sys.stdout)
        sys.exit(0)
    if opts.list_extractor_descriptions:
        for ie in list_extractors(opts.age_limit):
            if not ie._WORKING:
                continue
            desc = getattr(ie, 'IE_DESC', ie.IE_NAME)
            if desc is False:
                continue
            if hasattr(ie, 'SEARCH_KEY'):
                _SEARCHES = ('cute kittens', 'slithering pythons', 'falling cat', 'angry poodle', 'purple fish', 'running tortoise', 'sleeping bunny', 'burping cow')
                _COUNTS = ('', '5', '10', 'all')
                desc += ' (Example: "%s%s:%s" )' % (ie.SEARCH_KEY, random.choice(_COUNTS), random.choice(_SEARCHES))
            write_string(desc + '\n', out=sys.stdout)
        sys.exit(0)
    if opts.ap_list_mso:
        table = [[mso_id, mso_info['name']] for mso_id, mso_info in MSO_INFO.items()]
        write_string('Supported TV Providers:\n' + render_table(['mso', 'mso name'], table) + '\n', out=sys.stdout)
        sys.exit(0)

    # Conflicting, missing and erroneous options
    if opts.usenetrc and (opts.username is not None or opts.password is not None):
        parser.error('using .netrc conflicts with giving username/password')
    if opts.password is not None and opts.username is None:
        parser.error('account username missing\n')
    if opts.ap_password is not None and opts.ap_username is None:
        parser.error('TV Provider account username missing\n')
    if opts.outtmpl is not None and (opts.usetitle or opts.autonumber or opts.useid):
        parser.error('using output template conflicts with using title, video ID or auto number')
    if opts.usetitle and opts.useid:
        parser.error('using title conflicts with using video ID')
    if opts.username is not None and opts.password is None:
        opts.password = compat_getpass('Type account password and press [Return]: ')
    if opts.ap_username is not None and opts.ap_password is None:
        opts.ap_password = compat_getpass('Type TV provider account password and press [Return]: ')
    if opts.ratelimit is not None:
        numeric_limit = FileDownloader.parse_bytes(opts.ratelimit)
        if numeric_limit is None:
            parser.error('invalid rate limit specified')
        opts.ratelimit = numeric_limit
    if opts.min_filesize is not None:
        numeric_limit = FileDownloader.parse_bytes(opts.min_filesize)
        if numeric_limit is None:
            parser.error('invalid min_filesize specified')
        opts.min_filesize = numeric_limit
    if opts.max_filesize is not None:
        numeric_limit = FileDownloader.parse_bytes(opts.max_filesize)
        if numeric_limit is None:
            parser.error('invalid max_filesize specified')
        opts.max_filesize = numeric_limit
    if opts.sleep_interval is not None:
        if opts.sleep_interval < 0:
            parser.error('sleep interval must be positive or 0')
    if opts.max_sleep_interval is not None:
        if opts.max_sleep_interval < 0:
            parser.error('max sleep interval must be positive or 0')
        if opts.max_sleep_interval < opts.sleep_interval:
            parser.error('max sleep interval must be greater than or equal to min sleep interval')
    else:
        opts.max_sleep_interval = opts.sleep_interval
    if opts.ap_mso and opts.ap_mso not in MSO_INFO:
        parser.error('Unsupported TV Provider, use --ap-list-mso to get a list of supported TV Providers')

    def parse_retries(retries):
        if retries in ('inf', 'infinite'):
            parsed_retries = float('inf')
        else:
            try:
                parsed_retries = int(retries)
            except (TypeError, ValueError):
                parser.error('invalid retry count specified')
        return parsed_retries
    if opts.retries is not None:
        opts.retries = parse_retries(opts.retries)
    if opts.fragment_retries is not None:
        opts.fragment_retries = parse_retries(opts.fragment_retries)
    if opts.buffersize is not None:
        numeric_buffersize = FileDownloader.parse_bytes(opts.buffersize)
        if numeric_buffersize is None:
            parser.error('invalid buffer size specified')
        opts.buffersize = numeric_buffersize
    if opts.playliststart <= 0:
        raise ValueError('Playlist start must be positive')
    if opts.playlistend not in (-1, None) and opts.playlistend < opts.playliststart:
        raise ValueError('Playlist end must be greater than playlist start')
    if opts.extractaudio:
        if opts.audioformat not in ['best', 'aac', 'mp3', 'm4a', 'opus', 'vorbis', 'wav']:
            parser.error('invalid audio format specified')
    if opts.audioquality:
        opts.audioquality = opts.audioquality.strip('k').strip('K')
        if not opts.audioquality.isdigit():
            parser.error('invalid audio quality specified')
    if opts.recodevideo is not None:
        if opts.recodevideo not in ['mp4', 'flv', 'webm', 'ogg', 'mkv', 'avi']:
            parser.error('invalid video recode format specified')
    if opts.convertsubtitles is not None:
        if opts.convertsubtitles not in ['srt', 'vtt', 'ass']:
            parser.error('invalid subtitle format specified')

    if opts.date is not None:
        date = DateRange.day(opts.date)
    else:
        date = DateRange(opts.dateafter, opts.datebefore)

    # Do not download videos when there are audio-only formats
    if opts.extractaudio and not opts.keepvideo and opts.format is None:
        opts.format = 'bestaudio/best'

    # --all-sub automatically sets --write-sub if --write-auto-sub is not given
    # this was the old behaviour if only --all-sub was given.
    if opts.allsubtitles and not opts.writeautomaticsub:
        opts.writesubtitles = True

    outtmpl = ((opts.outtmpl is not None and opts.outtmpl) or
               (opts.format == '-1' and opts.usetitle and '%(title)s-%(id)s-%(format)s.%(ext)s') or
               (opts.format == '-1' and '%(id)s-%(format)s.%(ext)s') or
               (opts.usetitle and opts.autonumber and '%(autonumber)s-%(title)s-%(id)s.%(ext)s') or
               (opts.usetitle and '%(title)s-%(id)s.%(ext)s') or
               (opts.useid and '%(id)s.%(ext)s') or
               (opts.autonumber and '%(autonumber)s-%(id)s.%(ext)s') or
               DEFAULT_OUTTMPL)
    if not os.path.splitext(outtmpl)[1] and opts.extractaudio:
        parser.error('Cannot download a video and extract audio into the same'
                     ' file! Use "{0}.%(ext)s" instead of "{0}" as the output'
                     ' template'.format(outtmpl))

    any_getting = opts.geturl or opts.gettitle or opts.getid or opts.getthumbnail or opts.getdescription or opts.getfilename or opts.getformat or opts.getduration or opts.dumpjson or opts.dump_single_json
    any_printing = opts.print_json
    download_archive_fn = compat_expanduser(opts.download_archive) if opts.download_archive is not None else opts.download_archive

    # PostProcessors
    postprocessors = []
    # Add the metadata pp first, the other pps will copy it
    if opts.metafromtitle:
        postprocessors.append({
            'key': 'MetadataFromTitle',
            'titleformat': opts.metafromtitle
        })
    if opts.addmetadata:
        postprocessors.append({'key': 'FFmpegMetadata'})
    if opts.extractaudio:
        postprocessors.append({
            'key': 'FFmpegExtractAudio',
            'preferredcodec': opts.audioformat,
            'preferredquality': opts.audioquality,
            'nopostoverwrites': opts.nopostoverwrites,
        })
    if opts.recodevideo:
        postprocessors.append({
            'key': 'FFmpegVideoConvertor',
            'preferedformat': opts.recodevideo,
        })
    if opts.convertsubtitles:
        postprocessors.append({
            'key': 'FFmpegSubtitlesConvertor',
            'format': opts.convertsubtitles,
        })
    if opts.embedsubtitles:
        postprocessors.append({
            'key': 'FFmpegEmbedSubtitle',
        })
    if opts.embedthumbnail:
        already_have_thumbnail = opts.writethumbnail or opts.write_all_thumbnails
        postprocessors.append({
            'key': 'EmbedThumbnail',
            'already_have_thumbnail': already_have_thumbnail
        })
        if not already_have_thumbnail:
            opts.writethumbnail = True
    # XAttrMetadataPP should be run after post-processors that may change file
    # contents
    if opts.xattrs:
        postprocessors.append({'key': 'XAttrMetadata'})
    # Please keep ExecAfterDownload towards the bottom as it allows the user to modify the final file in any way.
    # So if the user is able to remove the file before your postprocessor runs it might cause a few problems.
    if opts.exec_cmd:
        postprocessors.append({
            'key': 'ExecAfterDownload',
            'exec_cmd': opts.exec_cmd,
        })
    external_downloader_args = None
    if opts.external_downloader_args:
        external_downloader_args = compat_shlex_split(opts.external_downloader_args)
    postprocessor_args = None
    if opts.postprocessor_args:
        postprocessor_args = compat_shlex_split(opts.postprocessor_args)
    match_filter = (
        None if opts.match_filter is None
        else match_filter_func(opts.match_filter))

    ydl_opts = {
        'usenetrc': opts.usenetrc,
        'username': opts.username,
        'password': opts.password,
        'twofactor': opts.twofactor,
        'videopassword': opts.videopassword,
        'ap_mso': opts.ap_mso,
        'ap_username': opts.ap_username,
        'ap_password': opts.ap_password,
        'quiet': (opts.quiet or any_getting or any_printing),
        'no_warnings': opts.no_warnings,
        'forceurl': opts.geturl,
        'forcetitle': opts.gettitle,
        'forceid': opts.getid,
        'forcethumbnail': opts.getthumbnail,
        'forcedescription': opts.getdescription,
        'forceduration': opts.getduration,
        'forcefilename': opts.getfilename,
        'forceformat': opts.getformat,
        'forcejson': opts.dumpjson or opts.print_json,
        'dump_single_json': opts.dump_single_json,
        'simulate': opts.simulate or any_getting,
        'skip_download': opts.skip_download,
        'format': opts.format,
        'listformats': opts.listformats,
        'outtmpl': outtmpl,
        'autonumber_size': opts.autonumber_size,
        'restrictfilenames': opts.restrictfilenames,
        'ignoreerrors': opts.ignoreerrors,
        'force_generic_extractor': opts.force_generic_extractor,
        'ratelimit': opts.ratelimit,
        'nooverwrites': opts.nooverwrites,
        'retries': opts.retries,
        'fragment_retries': opts.fragment_retries,
        'skip_unavailable_fragments': opts.skip_unavailable_fragments,
        'buffersize': opts.buffersize,
        'noresizebuffer': opts.noresizebuffer,
        'continuedl': opts.continue_dl,
        'noprogress': opts.noprogress,
        'progress_with_newline': opts.progress_with_newline,
        'playliststart': opts.playliststart,
        'playlistend': opts.playlistend,
        'playlistreverse': opts.playlist_reverse,
        'noplaylist': opts.noplaylist,
        'logtostderr': opts.outtmpl == '-',
        'consoletitle': opts.consoletitle,
        'nopart': opts.nopart,
        'updatetime': opts.updatetime,
        'writedescription': opts.writedescription,
        'writeannotations': opts.writeannotations,
        'writeinfojson': opts.writeinfojson,
        'writethumbnail': opts.writethumbnail,
        'write_all_thumbnails': opts.write_all_thumbnails,
        'writesubtitles': opts.writesubtitles,
        'writeautomaticsub': opts.writeautomaticsub,
        'allsubtitles': opts.allsubtitles,
        'listsubtitles': opts.listsubtitles,
        'subtitlesformat': opts.subtitlesformat,
        'subtitleslangs': opts.subtitleslangs,
        'matchtitle': decodeOption(opts.matchtitle),
        'rejecttitle': decodeOption(opts.rejecttitle),
        'max_downloads': opts.max_downloads,
        'prefer_free_formats': opts.prefer_free_formats,
        'verbose': opts.verbose,
        'dump_intermediate_pages': opts.dump_intermediate_pages,
        'write_pages': opts.write_pages,
        'test': opts.test,
        'keepvideo': opts.keepvideo,
        'min_filesize': opts.min_filesize,
        'max_filesize': opts.max_filesize,
        'min_views': opts.min_views,
        'max_views': opts.max_views,
        'daterange': date,
        'cachedir': opts.cachedir,
        'youtube_print_sig_code': opts.youtube_print_sig_code,
        'age_limit': opts.age_limit,
        'download_archive': download_archive_fn,
        'cookiefile': opts.cookiefile,
        'nocheckcertificate': opts.no_check_certificate,
        'prefer_insecure': opts.prefer_insecure,
        'proxy': opts.proxy,
        'socket_timeout': opts.socket_timeout,
        'bidi_workaround': opts.bidi_workaround,
        'debug_printtraffic': opts.debug_printtraffic,
        'prefer_ffmpeg': opts.prefer_ffmpeg,
        'include_ads': opts.include_ads,
        'default_search': opts.default_search,
        'youtube_include_dash_manifest': opts.youtube_include_dash_manifest,
        'encoding': opts.encoding,
        'extract_flat': opts.extract_flat,
        'mark_watched': opts.mark_watched,
        'merge_output_format': opts.merge_output_format,
        'postprocessors': postprocessors,
        'fixup': opts.fixup,
        'source_address': opts.source_address,
        'call_home': opts.call_home,
        'sleep_interval': opts.sleep_interval,
        'max_sleep_interval': opts.max_sleep_interval,
        'external_downloader': opts.external_downloader,
        'list_thumbnails': opts.list_thumbnails,
        'playlist_items': opts.playlist_items,
        'xattr_set_filesize': opts.xattr_set_filesize,
        'match_filter': match_filter,
        'no_color': opts.no_color,
        'ffmpeg_location': opts.ffmpeg_location,
        'hls_prefer_native': opts.hls_prefer_native,
        'hls_use_mpegts': opts.hls_use_mpegts,
        'external_downloader_args': external_downloader_args,
        'postprocessor_args': postprocessor_args,
        'cn_verification_proxy': opts.cn_verification_proxy,
        'geo_verification_proxy': opts.geo_verification_proxy,

    }

    with YoutubeDL(ydl_opts) as ydl:
        # Update version
        if opts.update_self:
            update_self(ydl.to_screen, opts.verbose, ydl._opener)

        # Remove cache dir
        if opts.rm_cachedir:
            ydl.cache.remove()

        # Maybe do nothing
        if (len(all_urls) < 1) and (opts.load_info_filename is None):
            if opts.update_self or opts.rm_cachedir:
                sys.exit()

            ydl.warn_if_short_id(sys.argv[1:] if argv is None else argv)
            parser.error(
                'You must provide at least one URL.\n'
                'Type youtube-dl --help to see a list of all options.')

        try:
            if opts.load_info_filename is not None:
                retcode = ydl.download_with_info_file(compat_expanduser(opts.load_info_filename))
            else:
                retcode = ydl.download(all_urls)
        except MaxDownloadsReached:
            ydl.to_screen('--max-download limit reached, aborting.')
            retcode = 101

    sys.exit(retcode)

Example 44

Project: pytango
Source File: setup.py
View license
def setup_args():

    directories = {
        'include_dirs': [],
        'library_dirs': [],
        'libraries':    [],
    }
    sys_libs = []

    # Link specifically to libtango version 9
    tangolib = ':libtango.so.9' if 'linux' in sys.platform else 'tango'
    directories['libraries'].append(tangolib)

    add_lib('omni', directories, sys_libs, lib_name='omniORB4')
    add_lib('zmq', directories, sys_libs, lib_name='libzmq')
    add_lib('tango', directories, sys_libs, inc_suffix='tango')

    # special boost-python configuration

    BOOST_ROOT = os.environ.get('BOOST_ROOT')
    boost_library_name = 'boost_python'
    if BOOST_ROOT is None:
        if 'linux' in sys.platform:
            dist_name = platform.linux_distribution()[0].lower()
            debian_based = 'debian' in dist_name or 'ubuntu' in dist_name or \
                           'mint' in dist_name
            py_ver = platform.python_version_tuple()
            if debian_based:
                # when building with  multiple version of python  on debian we
                # need to link against boost_python-py25/-py26 etc...
                pyver = "-py"
                pyver += "".join(map(str, py_ver[:2]))
                boost_library_name += pyver
            elif 'gentoo' in dist_name:
                boost_library_name += "-" + ".".join(map(str, py_ver[:2]))
            elif 'fedora' in dist_name or 'centos' in dist_name:
                if int(py_ver[0]) == 3:
                    boost_library_name += '3'
    else:
        inc_dir = os.path.join(BOOST_ROOT, 'include')
        lib_dirs = [os.path.join(BOOST_ROOT, 'lib')]
        if is64:
            lib64_dir = os.path.join(BOOST_ROOT, 'lib64')
            if os.path.isdir(lib64_dir):
                lib_dirs.insert(0, lib64_dir)

        directories['include_dirs'].append(inc_dir)
        directories['library_dirs'].extend(lib_dirs)

    directories['libraries'].append(boost_library_name)

    # special numpy configuration

    numpy_c_include = get_c_numpy()
    if numpy_c_include is not None:
        directories['include_dirs'].append(numpy_c_include)

    macros = []
    if not has_numpy():
        macros.append(('DISABLE_PYTANGO_NUMPY', None))
    else:
        macros.append(('PYTANGO_NUMPY_VERSION', '"%s"' % numpy.__version__))

    if 'posix' in os.name:
        directories = pkg_config(*sys_libs, **directories)

    Release = get_release_info()

    author = Release.authors['Coutinho']

    please_debug = False

    packages = [
        'tango',
        'tango.databaseds',
        'tango.databaseds.db_access',
    ]

    py_modules = [
        'PyTango',  # Backward compatibilty
    ]

    provides = [
        'tango',
        'PyTango',  # Backward compatibilty
    ]

    requires = [
        'boost_python (>=1.33)',
        'numpy (>=1.1)',
        'six',
    ]

    install_requires = [
        'six',
    ]

    package_data = {
        'PyTango': [],
    }

    data_files = []

    classifiers = [
        'Development Status :: 5 - Production/Stable',
        'Environment :: Other Environment',
        'Intended Audience :: Developers',
        'License :: OSI Approved ::'
        ' GNU Library or Lesser General Public License (LGPL)',
        'Natural Language :: English',
        'Operating System :: Microsoft :: Windows',
        'Operating System :: POSIX',
        'Operating System :: POSIX :: Linux',
        'Operating System :: Unix',
        'Programming Language :: C',
        'Programming Language :: Python',
        'Programming Language :: Python :: 3',
        'Topic :: Scientific/Engineering',
        'Topic :: Software Development :: Libraries',
    ]

    # Note for PyTango developers:
    # Compilation time can be greatly reduced by compiling the file
    # src/precompiled_header.hpp as src/precompiled_header.hpp.gch
    # and then uncommenting this line. Someday maybe this will be
    # automated...
    extra_compile_args = [
        # '-include ext/precompiled_header.hpp',
    ]

    extra_link_args = [
        '-Wl,-h',
        '-Wl,--strip-all',
    ]

    if please_debug:
        extra_compile_args += ['-g', '-O0']
        extra_link_args += ['-g', '-O0']

    src_dir = abspath('ext')
    client_dir = src_dir
    server_dir = os.path.join(src_dir, 'server')

    clientfiles = sorted(
        os.path.join(client_dir, fname)
        for fname in os.listdir(client_dir)
        if fname.endswith('.cpp'))

    serverfiles = sorted(
        os.path.join(server_dir, fname)
        for fname in os.listdir(server_dir)
        if fname.endswith('.cpp'))

    cppfiles = clientfiles + serverfiles
    directories['include_dirs'].extend([client_dir, server_dir])

    include_dirs = uniquify(directories['include_dirs'])
    library_dirs = uniquify(directories['library_dirs'])
    libraries = uniquify(directories['libraries'])

    pytango_ext = Extension(
        name='_tango',
        sources=cppfiles,
        include_dirs=include_dirs,
        library_dirs=library_dirs,
        libraries=libraries,
        define_macros=macros,
        extra_compile_args=extra_compile_args,
        extra_link_args=extra_link_args,
        language='c++',
        depends=[])

    cmdclass = {
        'build': build,
        'build_ext': build_ext,
        'install_html': install_html,
        'install': install}

    if sphinx:
        cmdclass['build_doc'] = build_doc

    long_description = open('README.rst').read()

    opts = dict(
        name='pytango',
        version=Release.version,
        description=Release.description,
        long_description=long_description,
        author=author[0],
        author_email=author[1],
        url=Release.url,
        download_url=Release.download_url,
        platforms=Release.platform,
        license=Release.license,
        packages=packages,
        py_modules=py_modules,
        classifiers=classifiers,
        package_data=package_data,
        data_files=data_files,
        provides=provides,
        keywords=Release.keywords,
        requires=requires,
        install_requires=install_requires,
        ext_package='tango',
        ext_modules=[pytango_ext],
        cmdclass=cmdclass)

    return opts

Example 45

Project: qutebrowser
Source File: log.py
View license
def qt_message_handler(msg_type, context, msg):
    """Qt message handler to redirect qWarning etc. to the logging system.

    Args:
        QtMsgType msg_type: The level of the message.
        QMessageLogContext context: The source code location of the message.
        msg: The message text.
    """
    # Mapping from Qt logging levels to the matching logging module levels.
    # Note we map critical to ERROR as it's actually "just" an error, and fatal
    # to critical.
    qt_to_logging = {
        QtCore.QtDebugMsg: logging.DEBUG,
        QtCore.QtWarningMsg: logging.WARNING,
        QtCore.QtCriticalMsg: logging.ERROR,
        QtCore.QtFatalMsg: logging.CRITICAL,
    }
    try:
        # pylint: disable=no-member,useless-suppression
        qt_to_logging[QtCore.QtInfoMsg] = logging.INFO
    except AttributeError:
        # Qt < 5.5
        pass

    # Change levels of some well-known messages to debug so they don't get
    # shown to the user.
    #
    # If a message starts with any text in suppressed_msgs, it's not logged as
    # error.
    suppressed_msgs = [
        # PNGs in Qt with broken color profile
        # https://bugreports.qt.io/browse/QTBUG-39788
        'libpng warning: iCCP: Not recognizing known sRGB profile that has '
            'been edited',  # flake8: disable=E131
        'libpng warning: iCCP: known incorrect sRGB profile',
        # Hopefully harmless warning
        'OpenType support missing for script ',
        # Error if a QNetworkReply gets two different errors set. Harmless Qt
        # bug on some pages.
        # https://bugreports.qt.io/browse/QTBUG-30298
        'QNetworkReplyImplPrivate::error: Internal problem, this method must '
            'only be called once.',
        # Sometimes indicates missing text, but most of the time harmless
        'load glyph failed ',
        # Harmless, see https://bugreports.qt.io/browse/QTBUG-42479
        'content-type missing in HTTP POST, defaulting to '
            'application/x-www-form-urlencoded. '
            'Use QNetworkRequest::setHeader() to fix this problem.',
        # https://bugreports.qt.io/browse/QTBUG-43118
        'Using blocking call!',
        # Hopefully harmless
        '"Method "GetAll" with signature "s" on interface '
            '"org.freedesktop.DBus.Properties" doesn\'t exist',
        '"Method \\"GetAll\\" with signature \\"s\\" on interface '
            '\\"org.freedesktop.DBus.Properties\\" doesn\'t exist\\n"',
        'WOFF support requires QtWebKit to be built with zlib support.',
        # Weird Enlightment/GTK X extensions
        'QXcbWindow: Unhandled client message: "_E_',
        'QXcbWindow: Unhandled client message: "_ECORE_',
        'QXcbWindow: Unhandled client message: "_GTK_',
        # Happens on AppVeyor CI
        'SetProcessDpiAwareness failed:',
        # https://bugreports.qt.io/browse/QTBUG-49174
        'QObject::connect: Cannot connect (null)::stateChanged('
            'QNetworkSession::State) to '
            'QNetworkReplyHttpImpl::_q_networkSessionStateChanged('
            'QNetworkSession::State)',
        # https://bugreports.qt.io/browse/QTBUG-53989
        "Image of format '' blocked because it is not considered safe. If you "
            "are sure it is safe to do so, you can white-list the format by "
            "setting the environment variable QTWEBKIT_IMAGEFORMAT_WHITELIST=",
        # Installing Qt from the installer may cause it looking for SSL3 which
        # may not be available on the system
        "QSslSocket: cannot resolve SSLv3_client_method",
        "QSslSocket: cannot resolve SSLv3_server_method",
        # When enabling debugging with QtWebEngine
        "Remote debugging server started successfully. Try pointing a "
            "Chromium-based browser to ",
        # https://github.com/The-Compiler/qutebrowser/issues/1287
        "QXcbClipboard: SelectionRequest too old",
        # https://github.com/The-Compiler/qutebrowser/issues/2071
        'QXcbWindow: Unhandled client message: ""',
        # No idea where this comes from...
        "QObject::disconnect: Unexpected null parameter",
    ]
    if sys.platform == 'darwin':
        suppressed_msgs += [
            'libpng warning: iCCP: known incorrect sRGB profile',
            # https://bugreports.qt.io/browse/QTBUG-47154
            'virtual void QSslSocketBackendPrivate::transmit() SSLRead failed '
                'with: -9805',  # flake8: disable=E131
        ]

    # Messages which will trigger an exception immediately
    critical_msgs = [
        'Could not parse stylesheet of object',
    ]

    if any(msg.strip().startswith(pattern) for pattern in critical_msgs):
        # For some reason, the stack gets lost when raising here...
        logger = logging.getLogger('misc')
        logger.error("Got critical Qt warning!", stack_info=True)
        raise CriticalQtWarning(msg)
    elif any(msg.strip().startswith(pattern) for pattern in suppressed_msgs):
        level = logging.DEBUG
    else:
        level = qt_to_logging[msg_type]

    if context.function is None:
        func = 'none'
    elif ':' in context.function:
        func = '"{}"'.format(context.function)
    else:
        func = context.function

    if context.category is None or context.category == 'default':
        name = 'qt'
    else:
        name = 'qt-' + context.category
    if msg.splitlines()[0] == ('This application failed to start because it '
                               'could not find or load the Qt platform plugin '
                               '"xcb".'):
        # Handle this message specially.
        msg += ("\n\nOn Archlinux, this should fix the problem:\n"
                "    pacman -S libxkbcommon-x11")
        faulthandler.disable()
    stack = ''.join(traceback.format_stack())
    record = qt.makeRecord(name, level, context.file, context.line, msg, None,
                           None, func, sinfo=stack)
    qt.handle(record)

Example 46

Project: pinguino-ide
Source File: pinguino_tools.py
View license
    @Debugger.debug_method
    def link(self, filename):
        """Link.

        NB :  "--opt-code-size"   deprecated
              "--use-non-free"    implicit -I and -L options for non-free headers and libs
                    "-I" + os.path.join(self.P8_DIR, 'sdcc', 'include', 'pic16'),\
                    "-I" + os.path.join(self.P8_DIR, 'sdcc', 'non-free', 'include', 'pic16'),\
                    "-I" + os.path.join(self.P8_DIR, 'pinguino', 'core'),\
                    "-I" + os.path.join(self.P8_DIR, 'pinguino', 'libraries'),\
                    "-L" + os.path.join(self.P8_DIR, 'sdcc', 'lib', 'pic16'),\
                    "-L" + os.path.join(self.P8_DIR, 'sdcc', 'non-free', 'lib', 'pic16'),\
        """

        error = []
        board = self.get_board()
        fichier = open(os.path.join(os.path.expanduser(self.SOURCE_DIR), "stdout"), "w+")

        user_imports = self.get_user_imports_p8()
        #for lib_dir in self.USER_P8_LIBS:
            #user_imports.append("-I" + lib_dir)

        if board.arch == 8:

            if board.bldr == 'boot2':
                sortie = Popen([self.COMPILER_8BIT,
                    "--verbose",\
                    "-mpic16",\
                    "--denable-peeps",\
                    "--obanksel=9",\
                    "--optimize-cmp",\
                    "--optimize-df",\
                    "--no-crt",\
                    "-Wl-s" + os.path.join(self.P8_DIR, 'lkr', board.bldr + '.' + board.proc + '.lkr') + ",-m",\
                    "-p" + board.proc,\
                    "-D" + board.bldr,\
                    "-D" + board.board,\
                    "-DBOARD=\"" + board.board + "\"",\
                    "-DPROC=\"" + board.proc + "\"",\
                    "-DBOOT_VER=2",\
                    "--use-non-free",\
                    "-I" + os.path.join(self.P8_DIR, 'include', 'pinguino', 'core'),\
                    "-I" + os.path.join(self.P8_DIR, 'include', 'pinguino', 'libraries'),\
                    'libio' + board.proc + '.lib',\
                    'libdev' + board.proc + '.lib',\
                    'libc18f.lib',\
                    'libm18f.lib',\
                    'libsdcc.lib',\
                    "-o" + os.path.join(os.path.expanduser(self.SOURCE_DIR), 'main.hex'),\
                    os.path.join(self.P8_DIR, 'obj', 'application_iface.o'),\
                    os.path.join(self.P8_DIR, 'obj', 'boot_iface.o'),\
                    os.path.join(self.P8_DIR, 'obj', 'usb_descriptors.o'),\
                    os.path.join(self.P8_DIR, 'obj', 'crt0ipinguino.o'),\
                    os.path.join(os.path.expanduser(self.SOURCE_DIR), 'main.o')] + user_imports,\
                    stdout=fichier, stderr=STDOUT)

            elif board.bldr == 'boot4':
                sortie = Popen([self.COMPILER_8BIT,
                    "--verbose", "-V",\
                    "-mpic16",\
                    # optimization
                    "--denable-peeps",\
                    "--obanksel=9",\
                    "--optimize-cmp",\
                    "--optimize-df",\
                    # don't want to link default crt0i.o but crt0i.c
                    "--no-crt",\
                    # move all int. vectors after bootloader code
                    "--ivt-loc=" + str(board.memstart),\
                    # link memory map
                    "-Wl-s" + os.path.join(self.P8_DIR, 'lkr', board.bldr + '.' + board.proc + '.lkr') + ",-m",\
                    "-p" + board.proc,\
                    "-D" + board.bldr,\
                    "-D" + board.board,\
                    "-DBOARD=\"" + board.board + "\"",\
                    "-DPROC=\"" + board.proc + "\"",\
                    "-DBOOT_VER=4",\
                    "--use-non-free",\
                    "-I" + os.path.join(self.P8_DIR, 'include', 'pinguino', 'core'),\
                    "-I" + os.path.join(self.P8_DIR, 'include', 'pinguino', 'libraries'),\
                    os.path.join(os.path.expanduser(self.SOURCE_DIR), 'main.o'),\
                    'libio' + board.proc + '.lib',\
                    'libdev' + board.proc + '.lib',\
                    'libc18f.lib',\
                    'libm18f.lib',\
                    # link the default run-time module (crt0i.o)
                    # except when "-no-crt" option is used
                    'libsdcc.lib',\
                    "-o" + os.path.join(os.path.expanduser(self.SOURCE_DIR), 'main.hex'),\
                    ] + user_imports,\
                    stdout=fichier, stderr=STDOUT)

            elif board.bldr == 'noboot':
                sortie = Popen([self.COMPILER_8BIT,
                    "--verbose",\
                    "-mpic16",\
                    "--denable-peeps",\
                    "--obanksel=9",\
                    "--optimize-cmp",\
                    "--optimize-df",\
                    #"--no-crt",\ we use default run-time module inside libsdcc.lib
                    "-Wl-s" + os.path.join(self.P8_DIR, 'lkr', board.proc + '_g.lkr') + ",-m",\
                    "-p" + board.proc,\
                    "-D" + board.bldr,\
                    "-D" + board.board,\
                    "-DBOARD=\"" + board.board + "\"",\
                    "-DPROC=\"" + board.proc + "\"",\
                    "-DBOOT_VER=0",\
                    "--use-non-free",\
                    "-I" + os.path.join(self.P8_DIR, 'include', 'pinguino', 'core'),\
                    "-I" + os.path.join(self.P8_DIR, 'include', 'pinguino', 'libraries'),\
                    'libio' + board.proc + '.lib',\
                    'libdev' + board.proc + '.lib',\
                    'libc18f.lib',\
                    'libm18f.lib',\
                    # link the default run-time module
                    'libsdcc.lib',\
                    "-o" + os.path.join(os.path.expanduser(self.SOURCE_DIR), 'main.hex'),\
                    os.path.join(os.path.expanduser(self.SOURCE_DIR), 'main.o')] + user_imports,\
                    stdout=fichier, stderr=STDOUT)

        else:#if board.arch == 32:

            makefile = os.path.join(os.path.expanduser(self.SOURCE_DIR), 'Makefile32.'+os.getenv("PINGUINO_OS_NAME"))

            user_imports32 = self.get_user_imports_p32()

            if user_imports32: _IDE_USERLIBS_ = ["_IDE_USERLIBS_=" + user_imports32]
            else: _IDE_USERLIBS_ = []

            #self.report(makefile)

            sortie = Popen([self.MAKE,
                            "--makefile=" + makefile,
                            "_IDE_PDEDIR_=" + os.path.dirname(filename),
                            "_IDE_PROC_=" + board.proc,
                            "_IDE_BOARD_=" + board.board,
                            "_IDE_BINDIR_=" + self.P32_BIN,  #default /usr/bin
                            "_IDE_P32DIR_=" + self.P32_DIR,  #default /usr/share/pinguino-11.0/p32
                            "_IDE_SRCDIR_=" + self.SOURCE_DIR,
                            "_IDE_USERHOMEDIR_=" + os.getenv("PINGUINO_USER_PATH"),  #default ~/.pinguino
                            "_IDE_OSARCH_=" + os.getenv("PINGUINO_OS_ARCH"),
                            "_IDE_HEAP_SIZE_=" + self.HEAPSIZE,
                            "_IDE_MIPS16_ENABLE_=" + self.MIPS16,
                            "_IDE_OPTIMIZATION_=" + self.OPTIMIZATION,

                         ] + _IDE_USERLIBS_,

                         stdout=fichier, stderr=STDOUT)

        sortie.communicate()

        fichier.seek(0)
        # Check if child process has terminated
        if sortie.poll() != 0:
            for ligne in fichier:
                if ligne.find("error") != -1:
                    error.append(ligne)
        fichier.close()

        if sys.platform == "win32":

            if board.board in ["PIC32_PINGUINO_220", "Pinguino32MX220", "Pinguino32MX250", "Pinguino32MX270"]:
                badrecord = ":040000059D0040001A\n"
            else:
                badrecord = ":040000059D006000FA\n"

            if os.path.exists(os.path.join(os.path.expanduser(self.SOURCE_DIR), "main32tmp.hex")):
                fichiersource = open(os.path.join(os.path.expanduser(self.SOURCE_DIR), "main32tmp.hex"), "r")
                fichierdest = open(os.path.join(os.path.expanduser(self.SOURCE_DIR), "main32.hex"), "w+")
                for line in fichiersource:
                    if line != badrecord:
                        fichierdest.writelines(line)
                fichiersource.close()
                fichierdest.close()
                os.remove(os.path.join(os.path.expanduser(self.SOURCE_DIR), "main32tmp.hex"))

        return sortie.poll(), error

Example 47

Project: pymo
Source File: sysconfig.py
View license
def get_config_vars(*args):
    """With no arguments, return a dictionary of all configuration
    variables relevant for the current platform.

    On Unix, this means every variable defined in Python's installed Makefile;
    On Windows and Mac OS it's a much smaller set.

    With arguments, return a list of values that result from looking up
    each argument in the configuration variable dictionary.
    """
    import re
    global _CONFIG_VARS
    if _CONFIG_VARS is None:
        _CONFIG_VARS = {}
        # Normalized versions of prefix and exec_prefix are handy to have;
        # in fact, these are the standard versions used most places in the
        # Distutils.
        _CONFIG_VARS['prefix'] = _PREFIX
        _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
        _CONFIG_VARS['py_version'] = _PY_VERSION
        _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
        _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
        _CONFIG_VARS['base'] = _PREFIX
        _CONFIG_VARS['platbase'] = _EXEC_PREFIX
        _CONFIG_VARS['projectbase'] = _PROJECT_BASE

        if os.name in ('nt', 'os2'):
            _init_non_posix(_CONFIG_VARS)
        if os.name == 'posix':
            _init_posix(_CONFIG_VARS)

        # Setting 'userbase' is done below the call to the
        # init function to enable using 'get_config_var' in
        # the init-function.
        _CONFIG_VARS['userbase'] = _getuserbase()

        if 'srcdir' not in _CONFIG_VARS:
            _CONFIG_VARS['srcdir'] = _PROJECT_BASE

        # Convert srcdir into an absolute path if it appears necessary.
        # Normally it is relative to the build directory.  However, during
        # testing, for example, we might be running a non-installed python
        # from a different directory.
        if _PYTHON_BUILD and os.name == "posix":
            base = _PROJECT_BASE
            try:
                cwd = os.getcwd()
            except OSError:
                cwd = None
            if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
                base != cwd):
                # srcdir is relative and we are not in the same directory
                # as the executable. Assume executable is in the build
                # directory and make srcdir absolute.
                srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
                _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)

        if sys.platform == 'darwin':
            kernel_version = os.uname()[2] # Kernel version (8.4.3)
            major_version = int(kernel_version.split('.')[0])

            if major_version < 8:
                # On Mac OS X before 10.4, check if -arch and -isysroot
                # are in CFLAGS or LDFLAGS and remove them if they are.
                # This is needed when building extensions on a 10.3 system
                # using a universal build of python.
                for key in ('LDFLAGS', 'BASECFLAGS',
                        # a number of derived variables. These need to be
                        # patched up as well.
                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
                    flags = _CONFIG_VARS[key]
                    flags = re.sub('-arch\s+\w+\s', ' ', flags)
                    flags = re.sub('-isysroot [^ \t]*', ' ', flags)
                    _CONFIG_VARS[key] = flags
            else:
                # Allow the user to override the architecture flags using
                # an environment variable.
                # NOTE: This name was introduced by Apple in OSX 10.5 and
                # is used by several scripting languages distributed with
                # that OS release.
                if 'ARCHFLAGS' in os.environ:
                    arch = os.environ['ARCHFLAGS']
                    for key in ('LDFLAGS', 'BASECFLAGS',
                        # a number of derived variables. These need to be
                        # patched up as well.
                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):

                        flags = _CONFIG_VARS[key]
                        flags = re.sub('-arch\s+\w+\s', ' ', flags)
                        flags = flags + ' ' + arch
                        _CONFIG_VARS[key] = flags

                # If we're on OSX 10.5 or later and the user tries to
                # compiles an extension using an SDK that is not present
                # on the current machine it is better to not use an SDK
                # than to fail.
                #
                # The major usecase for this is users using a Python.org
                # binary installer  on OSX 10.6: that installer uses
                # the 10.4u SDK, but that SDK is not installed by default
                # when you install Xcode.
                #
                CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
                m = re.search('-isysroot\s+(\S+)', CFLAGS)
                if m is not None:
                    sdk = m.group(1)
                    if not os.path.exists(sdk):
                        for key in ('LDFLAGS', 'BASECFLAGS',
                             # a number of derived variables. These need to be
                             # patched up as well.
                            'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):

                            flags = _CONFIG_VARS[key]
                            flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)
                            _CONFIG_VARS[key] = flags

    if args:
        vals = []
        for name in args:
            vals.append(_CONFIG_VARS.get(name))
        return vals
    else:
        return _CONFIG_VARS

Example 48

Project: imagrium
Source File: sysconfig.py
View license
def get_config_vars(*args):
    """With no arguments, return a dictionary of all configuration
    variables relevant for the current platform.

    On Unix, this means every variable defined in Python's installed Makefile;
    On Windows and Mac OS it's a much smaller set.

    With arguments, return a list of values that result from looking up
    each argument in the configuration variable dictionary.
    """
    import re
    global _CONFIG_VARS
    if _CONFIG_VARS is None:
        _CONFIG_VARS = {}
        # Normalized versions of prefix and exec_prefix are handy to have;
        # in fact, these are the standard versions used most places in the
        # Distutils.
        _CONFIG_VARS['prefix'] = _PREFIX
        _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
        _CONFIG_VARS['py_version'] = _PY_VERSION
        _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
        _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
        _CONFIG_VARS['base'] = _PREFIX
        _CONFIG_VARS['platbase'] = _EXEC_PREFIX
        _CONFIG_VARS['projectbase'] = _PROJECT_BASE

        if os.name in ('nt', 'os2'):
            _init_non_posix(_CONFIG_VARS)
        if os.name == 'posix':
            _init_posix(_CONFIG_VARS)

        # Setting 'userbase' is done below the call to the
        # init function to enable using 'get_config_var' in
        # the init-function.
        _CONFIG_VARS['userbase'] = _getuserbase()

        if 'srcdir' not in _CONFIG_VARS:
            _CONFIG_VARS['srcdir'] = _PROJECT_BASE

        # Convert srcdir into an absolute path if it appears necessary.
        # Normally it is relative to the build directory.  However, during
        # testing, for example, we might be running a non-installed python
        # from a different directory.
        if _PYTHON_BUILD and os.name == "posix":
            base = _PROJECT_BASE
            try:
                cwd = os.getcwd()
            except OSError:
                cwd = None
            if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
                base != cwd):
                # srcdir is relative and we are not in the same directory
                # as the executable. Assume executable is in the build
                # directory and make srcdir absolute.
                srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
                _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)

        if sys.platform == 'darwin':
            kernel_version = os.uname()[2] # Kernel version (8.4.3)
            major_version = int(kernel_version.split('.')[0])

            if major_version < 8:
                # On Mac OS X before 10.4, check if -arch and -isysroot
                # are in CFLAGS or LDFLAGS and remove them if they are.
                # This is needed when building extensions on a 10.3 system
                # using a universal build of python.
                for key in ('LDFLAGS', 'BASECFLAGS',
                        # a number of derived variables. These need to be
                        # patched up as well.
                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
                    flags = _CONFIG_VARS[key]
                    flags = re.sub('-arch\s+\w+\s', ' ', flags)
                    flags = re.sub('-isysroot [^ \t]*', ' ', flags)
                    _CONFIG_VARS[key] = flags
            else:
                # Allow the user to override the architecture flags using
                # an environment variable.
                # NOTE: This name was introduced by Apple in OSX 10.5 and
                # is used by several scripting languages distributed with
                # that OS release.
                if 'ARCHFLAGS' in os.environ:
                    arch = os.environ['ARCHFLAGS']
                    for key in ('LDFLAGS', 'BASECFLAGS',
                        # a number of derived variables. These need to be
                        # patched up as well.
                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):

                        flags = _CONFIG_VARS[key]
                        flags = re.sub('-arch\s+\w+\s', ' ', flags)
                        flags = flags + ' ' + arch
                        _CONFIG_VARS[key] = flags

                # If we're on OSX 10.5 or later and the user tries to
                # compiles an extension using an SDK that is not present
                # on the current machine it is better to not use an SDK
                # than to fail.
                #
                # The major usecase for this is users using a Python.org
                # binary installer  on OSX 10.6: that installer uses
                # the 10.4u SDK, but that SDK is not installed by default
                # when you install Xcode.
                #
                CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
                m = re.search('-isysroot\s+(\S+)', CFLAGS)
                if m is not None:
                    sdk = m.group(1)
                    if not os.path.exists(sdk):
                        for key in ('LDFLAGS', 'BASECFLAGS',
                             # a number of derived variables. These need to be
                             # patched up as well.
                            'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):

                            flags = _CONFIG_VARS[key]
                            flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)
                            _CONFIG_VARS[key] = flags

    if args:
        vals = []
        for name in args:
            vals.append(_CONFIG_VARS.get(name))
        return vals
    else:
        return _CONFIG_VARS

Example 49

Project: brickv
Source File: red_tab_overview.py
View license
    def cb_overview(self, result):
        # check if the tab is still on view or not
        if not self.is_tab_on_focus:
            self.refresh_timer.stop()
            return

        self.refresh_counter = 0
        self.refresh_timer.start(REFRESH_TIMEOUT)

        okay, message = check_script_result(result, decode_stderr=True)

        if not okay:
            self.label_error.setText('<b>Error:</b> ' + QtCore.Qt.escape(message))
            self.label_error.show()
            return

        self.label_error.hide()

        try:
            data = json.loads(zlib.decompress(buffer(result.stdout)).decode('utf-8'))

            days, days_remainder = divmod(int(data['uptime']), 24 * 60 * 60)
            hours, hours_remainder = divmod(days_remainder, 60 * 60)
            minutes, _ = divmod(hours_remainder, 60)
            uptime = ''

            if days > 0:
                uptime += str(days)

                if days == 1:
                    uptime += ' day '
                else:
                    uptime += ' days '

            if hours > 0:
                uptime += str(hours)

                if hours == 1:
                    uptime += ' hour '
                else:
                    uptime += ' hours '

            uptime += str(minutes)

            if minutes == 1:
                uptime += ' minute'
            else:
                uptime += ' minutes'

            cpu_percent = data['cpu_used']
            cpu_percent_v = int(data['cpu_used'])

            memory_used = self.bytes2human(int(data['mem_used']))
            memory_total = self.bytes2human(int(data['mem_total']))
            memory_percent = "%.1f" % ((float(memory_used) / float(memory_total)) * 100)
            memory_percent_v = int(memory_percent.split('.')[0])

            storage_used = self.bytes2human(int(data['disk_used']))
            storage_total = self.bytes2human(int(data['disk_total']))
            storage_percent = "%.1f" % ((float(storage_used) / float(storage_total)) * 100)
            storage_percent_v = int(storage_percent.split('.')[0])

            nic_data_dict = data['ifaces']
            processes_data_list = data['processes']
        except:
            # some parsing error due to malfromed or incomplete output occured.
            # ignore it and wait for the next update
            return

        self.label_uptime_value.setText(uptime)

        pbar_cpu_fmt = "{0}%".format(cpu_percent)
        pbar_memory_fmt = "{0}% [{1} of {2} MiB]".format(memory_percent, memory_used, memory_total)
        pbar_storage_fmt = "{0}% [{1} of {2} GiB]".format(storage_percent, storage_used, storage_total)

        if sys.platform == 'darwin':
            self.label_pbar_cpu.show()
            self.label_pbar_memory.show()
            self.label_pbar_storage.show()
            self.label_pbar_cpu.setText(pbar_cpu_fmt)
            self.label_pbar_memory.setText(pbar_memory_fmt)
            self.label_pbar_storage.setText(pbar_storage_fmt)
        else:
            self.pbar_cpu.setFormat(pbar_cpu_fmt)
            self.pbar_memory.setFormat(pbar_memory_fmt)
            self.pbar_storage.setFormat(pbar_storage_fmt)

        self.pbar_cpu.setValue(cpu_percent_v)
        self.pbar_memory.setValue(memory_percent_v)
        self.pbar_storage.setValue(storage_percent_v)

        self.nic_item_model.removeRows(0, self.nic_item_model.rowCount())

        def _get_nic_transfer_rate(bytes_now, bytes_previous, delta_time):
            return "%.1f" % float(((bytes_now - bytes_previous) / delta_time) / 1024)

        new_time = time.time()
        delta = new_time - self.nic_time
        self.nic_time = new_time

        for i, key in enumerate(nic_data_dict):
            if key not in self.nic_previous_bytes:
                self.nic_time = time.time()
                self.nic_item_model.setItem(i, 0, QtGui.QStandardItem(key))
                self.nic_item_model.setItem(i, 1, QtGui.QStandardItem("Collecting data..."))
                self.nic_item_model.setItem(i, 2, QtGui.QStandardItem("Collecting data..."))
            else:
                download_rate = _get_nic_transfer_rate(nic_data_dict[key][1],
                                                       self.nic_previous_bytes[key]['received'],
                                                       delta)

                upload_rate = _get_nic_transfer_rate(nic_data_dict[key][0],
                                                     self.nic_previous_bytes[key]['sent'],
                                                     delta)

                self.nic_item_model.setItem(i, 0, QtGui.QStandardItem(key))
                self.nic_item_model.setItem(i, 1, QtGui.QStandardItem(download_rate + " KiB/s"))
                self.nic_item_model.setItem(i, 2, QtGui.QStandardItem(upload_rate + " KiB/s"))

            self.nic_previous_bytes[key] = {'sent': nic_data_dict[key][0],
                                            'received': nic_data_dict[key][1]}

        self.nic_item_model.sort(self.tview_nic_previous_sort['column_index'],
                                 self.tview_nic_previous_sort['order'])

        self.process_item_model.removeRows(0, self.process_item_model.rowCount())

        if self.cbox_based_on.currentIndex() == 0:
            processes_data_list_sorted = sorted(processes_data_list,
                                                key=itemgetter('cpu'),
                                                reverse=True)
        elif self.cbox_based_on.currentIndex() == 1:
            processes_data_list_sorted = sorted(processes_data_list,
                                                key=itemgetter('mem'),
                                                reverse=True)

        processes_data_list_sorted = processes_data_list_sorted[:self.sbox_number_of_process.value()]

        for i, p in enumerate(processes_data_list_sorted):
            name = unicode(p['name'])
            cmdline = unicode(p['cmd'])

            if len(cmdline) == 0:
                cmdline = name

            item_name = QtGui.QStandardItem(name)
            item_name.setToolTip(cmdline)
            self.process_item_model.setItem(i, 0, item_name)

            item_pid = QtGui.QStandardItem(unicode(p['pid']))
            self.process_item_model.setItem(i, 1, item_pid)

            item_user = QtGui.QStandardItem(unicode(p['user']))
            self.process_item_model.setItem(i, 2, item_user)

            cpu = p['cpu']
            item_cpu = QtGui.QStandardItem(unicode(cpu / 10.0)+'%')
            item_cpu.setData(cpu)
            self.process_item_model.setItem(i, 3, item_cpu)

            mem = p['mem']
            item_mem = QtGui.QStandardItem(unicode(mem / 10.0)+'%')
            item_mem.setData(mem)
            self.process_item_model.setItem(i, 4, item_mem)

        self.process_item_model.sort(self.tview_process_previous_sort['column_index'],
                                     self.tview_process_previous_sort['order'])

Example 50

Project: scandir
Source File: test_walk.py
View license
    def test_traversal(self):
        # Build:
        #     TESTFN/
        #       TEST1/              a file kid and two directory kids
        #         tmp1
        #         SUB1/             a file kid and a directory kid
        #           tmp2
        #           SUB11/          no kids
        #         SUB2/             a file kid and a dirsymlink kid
        #           tmp3
        #           link/           a symlink to TESTFN.2
        #       TEST2/
        #         tmp4              a lone file
        walk_path = os.path.join(self.testfn, "TEST1")
        sub1_path = os.path.join(walk_path, "SUB1")
        sub11_path = os.path.join(sub1_path, "SUB11")
        sub2_path = os.path.join(walk_path, "SUB2")
        tmp1_path = os.path.join(walk_path, "tmp1")
        tmp2_path = os.path.join(sub1_path, "tmp2")
        tmp3_path = os.path.join(sub2_path, "tmp3")
        link_path = os.path.join(sub2_path, "link")
        t2_path = os.path.join(self.testfn, "TEST2")
        tmp4_path = os.path.join(self.testfn, "TEST2", "tmp4")

        # Create stuff.
        os.makedirs(sub11_path)
        os.makedirs(sub2_path)
        os.makedirs(t2_path)
        for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path:
            f = open(path, "w")
            f.write("I'm " + path + " and proud of it.  Blame test_os.\n")
            f.close()
        has_symlink = hasattr(os, "symlink")
        if has_symlink:
            try:
                if sys.platform == 'win32' and sys.version_info >= (3, 2):
                    # "target_is_directory" was only added in Python 3.2 (on Windows)
                    os.symlink(os.path.abspath(t2_path), link_path, target_is_directory=True)
                else:
                    os.symlink(os.path.abspath(t2_path), link_path)
                sub2_tree = (sub2_path, ["link"], ["tmp3"])
            except NotImplementedError:
                sub2_tree = (sub2_path, [], ["tmp3"])
        else:
            sub2_tree = (sub2_path, [], ["tmp3"])

        # Walk top-down.
        all = list(walk_func(walk_path))
        self.assertEqual(len(all), 4)
        # We can't know which order SUB1 and SUB2 will appear in.
        # Not flipped:  TESTFN, SUB1, SUB11, SUB2
        #     flipped:  TESTFN, SUB2, SUB1, SUB11
        flipped = all[0][1][0] != "SUB1"
        all[0][1].sort()
        self.assertEqual(all[0], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
        self.assertEqual(all[1 + flipped], (sub1_path, ["SUB11"], ["tmp2"]))
        self.assertEqual(all[2 + flipped], (sub11_path, [], []))
        self.assertEqual(all[3 - 2 * flipped], sub2_tree)

        # Prune the search.
        all = []
        for root, dirs, files in walk_func(walk_path):
            all.append((root, dirs, files))
            # Don't descend into SUB1.
            if 'SUB1' in dirs:
                # Note that this also mutates the dirs we appended to all!
                dirs.remove('SUB1')
        self.assertEqual(len(all), 2)
        self.assertEqual(all[0], (walk_path, ["SUB2"], ["tmp1"]))
        self.assertEqual(all[1], sub2_tree)

        # Walk bottom-up.
        all = list(walk_func(walk_path, topdown=False))
        self.assertEqual(len(all), 4)
        # We can't know which order SUB1 and SUB2 will appear in.
        # Not flipped:  SUB11, SUB1, SUB2, TESTFN
        #     flipped:  SUB2, SUB11, SUB1, TESTFN
        flipped = all[3][1][0] != "SUB1"
        all[3][1].sort()
        self.assertEqual(all[3], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
        self.assertEqual(all[flipped], (sub11_path, [], []))
        self.assertEqual(all[flipped + 1], (sub1_path, ["SUB11"], ["tmp2"]))
        self.assertEqual(all[2 - 2 * flipped], sub2_tree)

        if has_symlink:
            # Walk, following symlinks.
            for root, dirs, files in walk_func(walk_path, followlinks=True):
                if root == link_path:
                    self.assertEqual(dirs, [])
                    self.assertEqual(files, ["tmp4"])
                    break
            else:
                self.fail("Didn't follow symlink with followlinks=True")

        # Test creating a directory and adding it to dirnames
        sub3_path = os.path.join(walk_path, "SUB3")
        all = []
        for root, dirs, files in walk_func(walk_path):
            all.append((root, dirs, files))
            if 'SUB1' in dirs:
                os.makedirs(sub3_path)
                dirs.append('SUB3')
        all.sort()
        self.assertEqual(os.path.split(all[-1][0])[1], 'SUB3')