sys.path.append

Here are the examples of the python api sys.path.append taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

View license
    def setup_import_paths(self):
        """ Set import paths for external import """
        # general path for all drivers (problematic because many modules in
        # the Adafruit don't have __init__.py files)
        sys.path.append('../external/adafruit/')
        sys.path.append('../external/adafruit/Adafruit_I2C')
        sys.path.append('../external/adafruit/Adafruit_PWM_Servo_Driver')

Example 2

Project: mootiro-maps
Source File: base.py
View license
def setup_django():
    # TODO: Verify if fabric's Django integration works.

    import os
    import sys

    FAB_DIR = os.path.abspath(os.path.dirname(__file__))
    PROJ_DIR = os.path.abspath(os.path.join(FAB_DIR, '../mootiro_maps'))
    APP_DIR = os.path.abspath(os.path.join(PROJ_DIR, 'apps'))
    LIB_DIR = os.path.abspath(os.path.join(PROJ_DIR, 'lib'))
    SITE_ROOT = os.path.abspath(os.path.join(PROJ_DIR, '../mootiro_maps'))
    sys.path.append(PROJ_DIR)
    sys.path.append(APP_DIR)
    sys.path.append(LIB_DIR)
    sys.path.append(SITE_ROOT)
    from django.core.management import setup_environ
    exec 'import {} as environ'.format(env.komoo_django_settings)
    setup_environ(environ)

Example 3

View license
    def injectPaths(self):
        """ Inject paths needed """
        # already done in mayaMenuBoot.py
        sys.path.append('%s/python/maya/site-packages' % self.PCStoolsLocation)
        #sys.path.append('%s/python/maya/site-packages/pymel-1.0.5' % self.PCStoolsLocation) # add to Maya.env to get it
        # in early enough

        mVer = about(version=1)
        # if search('x64', mVer):
        if about(win64=1):
            mVer = mVer.split(' ')[0]
            putEnv("MAYA_PLUG_IN_PATH", [getEnv('MAYA_PLUG_IN_PATH'), '%s/plugins/%s/win64' % (self.PCSmenuPath, mVer),
                                         '%s/plugins/%s/win64/others' % (self.PCSmenuPath, mVer)])
            # sys.path.append('%s/python/common/pyQt/PyQt4/2012/win64' % self.PCStoolsLocation)
        else:
            putEnv("MAYA_PLUG_IN_PATH", [getEnv('MAYA_PLUG_IN_PATH'), '%s/plugins/%s/win32' % (self.PCSmenuPath, mVer),
                                         '%s/plugins/%s/win32/others' % (self.PCSmenuPath, mVer)])

Example 4

Project: pymo
Source File: test_import.py
View license
    def _test_UNC_path(self):
        with open(os.path.join(self.path, 'test_trailing_slash.py'), 'w') as f:
            f.write("testdata = 'test_trailing_slash'")
        # Create the UNC path, like \\myhost\c$\foo\bar.
        path = os.path.abspath(self.path)
        import socket
        hn = socket.gethostname()
        drive = path[0]
        unc = "\\\\%s\\%s$"%(hn, drive)
        unc += path[2:]
        sys.path.append(path)
        mod = __import__("test_trailing_slash")
        self.assertEqual(mod.testdata, 'test_trailing_slash')
        unload("test_trailing_slash")

Example 5

Project: mra
Source File: extensions.py
View license
    def load_plugins_dir(self, dir_path):
        path, pkg = os.path.split(os.path.abspath(dir_path))
        pkg, _ = os.path.splitext(pkg)
        print "Loading %s from %s" % (pkg, path)

        sys.path.append(path)
        try:
            self.load_plugins(pkg)
        except ImportError:
            logging.error("Could not load plugin package \"%s\" from %s" % (pkg, path))
        else:
            logging.info("Loaded plugin package \"%s\" from %s" % (pkg, path))
        sys.path.remove(path)

Example 6

Project: captchure
Source File: main.py
View license
def main(args):
    """Accepts a list of command-line parameters except the first one and calls
    all internal functions as needed."""
    parser = makeParser()
    options = optsort(parser.parse_args(args))
    rootDir = os.getcwd()
    engineDir = os.path.join(rootDir, options.engine)
    sys.path.append(engineDir)
    os.chdir(engineDir)
    importModules(options)
    report = mainLoop(options)
    if not options.single:
        print "Done."
        report.printErrors()
        if options.recognise:
            report.printRecStats(options.total)
    os.chdir(rootDir)
    if __name__ != "__main__" and report.successes + report.mismatches != 0:
        return report.error

Example 7

Project: fusioncatcher
Source File: configuration.py
View license
def _pythonpath(p,c,k,v,skip=False,last=False):
    r = _get_config(c,k,v)
    if r:
        p[v.upper()] = _expand(r)
        sys.path.append(_expand(r))
        if not skip:
            ep = os.getenv('PYTHONPATH')
            if ep:
                if last:
                    os.environ["PYTHONPATH"] = ep + os.pathsep + _expand(r)
                else:
                    os.environ["PYTHONPATH"] = _expand(r) + os.pathsep + ep
            else:
                os.environ["PYTHONPATH"] = _expand(r)

Example 8

Project: importanize
Source File: test_utils.py
View license
    def test_ignore_site_packages_paths(self):
        sys.path.append(os.getcwd())
        paths = sys.path[:]

        with ignore_site_packages_paths():
            self.assertNotEqual(sys.path, paths)
            self.assertLess(len(sys.path), len(paths))

        self.assertIn(os.getcwd(), sys.path)
        self.assertListEqual(sys.path, paths)
        sys.path.remove(os.getcwd())

Example 9

Project: colout
Source File: colout.py
View license
def load_themes( themes_dir):
    global context
    logging.debug("search for themes in: %s" % themes_dir)
    os.chdir( themes_dir )
    sys.path.append( themes_dir )

    # load available themes
    for f in glob.iglob("colout_*.py"):
        module = ".".join(f.split(".")[:-1]) # remove extension
        name = "_".join(module.split("_")[1:]) # remove the prefix
        if name in context["themes"]:
            raise DuplicatedTheme(name)
        logging.debug("load theme %s" % name)
        context["themes"][name] = importlib.import_module(module)

Example 10

Project: pyp2rpm
Source File: test_monkeypatch.py
View license
def test_syspath_prepend_double_undo(mp):
    mp.syspath_prepend('hello world')
    mp.undo()
    sys.path.append('more hello world')
    mp.undo()
    assert sys.path[-1] == 'more hello world'

Example 11

Project: stacker
Source File: base.py
View license
    def execute(self, *args, **kwargs):
        if "sys_path" in self.context.config:
            sys.path.append(self.context.config["sys_path"])
        self.pre_run(*args, **kwargs)
        self.run(*args, **kwargs)
        self.post_run(*args, **kwargs)

Example 12

Project: json-sempai
Source File: test_sempai.py
View license
    def setUp(self):
        self.direc = tempfile.mkdtemp(prefix='jsonsempai')
        sys.path.append(self.direc)

        with open(os.path.join(self.direc, 'sempai.json'), 'w') as f:
            f.write(TEST_FILE)

Example 13

Project: sublimious
Source File: package_controller.py
View license
    def __init__(self):
        sublime_dir = os.path.dirname(sublime.packages_path())
        installed_packages_dir = os.path.join(sublime_dir, 'Installed Packages')
        packages_dir = os.path.join(sublime_dir, 'Packages')

        self.user_dir = os.path.join(packages_dir, 'User')

        package_control = os.path.join(installed_packages_dir, "Package Control.sublime-package")
        sys.path.append(package_control)

        from package_control.package_manager import PackageManager
        self.package_control = PackageManager()

Example 14

Project: simple-monitor-alert
Source File: alerts.py
View license
    def __init__(self, sma, alerts_dir, alerts=None, valid_alerts=None):
        super(Alerts, self).__init__()
        self.sma = sma
        self.config = sma.config
        self.alerts_dir = alerts_dir
        sys.path.append(alerts_dir)
        self.valid_alerts = valid_alerts or self.get_valid_alerts()
        self.set_alerts(alerts)

Example 15

Project: pilove
Source File: pilove.py
View license
def import_xsysroot():
    '''
    Find path to XSysroot and import it
    You need to create a symlink xsysroot.py -> xsysroot
    '''
    which_xsysroot=os.popen('which xsysroot').read().strip()
    if not which_xsysroot:
        print 'Could not find xsysroot tool'
        print 'Please install from https://github.com/skarbat/xsysroot'
        return None
    else:
        print 'xsysroot found at: {}'.format(which_xsysroot)
        sys.path.append(os.path.dirname(which_xsysroot))
        import xsysroot
        return xsysroot

Example 16

Project: build-mozharness
Source File: device.py
View license
    def query_devicemanager(self):
        if self.devicemanager:
            return self.devicemanager
        c = self.config
        site_packages_path = self.script_obj.query_python_site_packages_path()
        dm_path = os.path.join(site_packages_path, 'mozdevice')
        sys.path.append(dm_path)
        try:
            from devicemanagerSUT import DeviceManagerSUT
            from devicemanagerSUT import DMError
            self.DMError = DMError
            self.devicemanager = DeviceManagerSUT(c['device_ip'])
            # TODO configurable?
            self.devicemanager.debug = c.get('devicemanager_debug_level', 0)
        except ImportError, e:
            self.fatal("Can't import DeviceManagerSUT! %s\nDid you check out talos?" % str(e))
        return self.devicemanager

Example 17

Project: chronology
Source File: executor.py
View license
def _setup_pyspark():
  # Set SPARK_HOME environment variable.
  os.putenv('SPARK_HOME', app.config['SPARK_HOME'])
  # From Python docs: Calling putenv() directly does not change os.environ, so
  # it's better to modify os.environ. Also some platforms don't support
  # os.putenv. We'll just do both.
  os.environ['SPARK_HOME'] = app.config['SPARK_HOME']
  # Add PySpark to path.
  sys.path.append(os.path.join(app.config['SPARK_HOME'], 'python'))

Example 18

Project: mythbox
Source File: test_modules.py
View license
    def test_nonDirectoryPaths(self):
        """
        Verify that L{modules.walkModules} ignores entries in sys.path which
        refer to regular files in the filesystem.
        """
        existentPath = self.pathEntryWithOnePackage()

        nonDirectoryPath = FilePath(self.mktemp())
        self.failIf(nonDirectoryPath.exists())
        nonDirectoryPath.setContent("zip file or whatever\n")

        self.replaceSysPath([existentPath.path])

        beforeModules = list(modules.walkModules())
        sys.path.append(nonDirectoryPath.path)
        afterModules = list(modules.walkModules())

        self.assertEquals(beforeModules, afterModules)

Example 19

Project: carcade
Source File: cli.py
View license
def main():
    sys.path.append(os.getcwd())
    argh.dispatch_commands([
        init,
        build,
        runserver,
        extract_messages,
    ])

Example 20

Project: python-haystack
Source File: test_text.py
View license
    def setUp(self):
        self.memory_handler = dump_loader.load('test/src/test-ctypes5.32.dump')
        self._load_offsets_values('test/src/test-ctypes5.32.dump')
        sys.path.append('test/src/')
        my_model = self.memory_handler.get_model()
        self.ctypes5_gen32 = my_model.import_module("ctypes5_gen32")
        my_model.build_python_class_clones(self.ctypes5_gen32)

Example 21

Project: DIRAC
Source File: buildScriptsDOC.py
View license
def run( tmpDir = None ):
  ''' Generates a temp directory where to copy over all scripts renamed so
      that we can import them into python. Once that is done, we import them
      one by one, to get the docstring.
  '''
  
  if tmpDir is None:
    tmpDir = getTmpDir()
    
  commandRefPath = generateCommandReference( tmpDir )
  scriptsDict    = prepareScripts( tmpDir )
  sys.path.append( tmpDir )
  scriptPaths    = writeScriptsDocs( scriptsDict, commandRefPath )
  
  print 'RSTs generated'
  
  overwriteCommandReference( commandRefPath, scriptPaths )
  
  print 'Done'

Example 22

Project: geodude
Source File: geodude.py
View license
def load_geodude():
    # Add current directory to path so we can import settings.
    sys.path.append(os.path.abspath(os.path.dirname(__file__)))
    import settings
    fmt = getattr(settings, 'GEO_DB_FORMAT', 'geoip')
    if fmt == 'geoip':
        load = load_geoip
    else:
        load = load_mmdb

    return make_application(
        load(settings.GEO_DB_PATH),
        settings.ALLOW_POST)

Example 23

Project: chipsec
Source File: test_import.py
View license
    def test_trailing_slash(self):
        with open(os.path.join(self.path, 'test_trailing_slash.py'), 'w') as f:
            f.write("testdata = 'test_trailing_slash'")
        sys.path.append(self.path+'/')
        mod = __import__("test_trailing_slash")
        self.assertEqual(mod.testdata, 'test_trailing_slash')
        unload("test_trailing_slash")

Example 24

Project: graphite-dashgen
Source File: dashgen.py
View license
def dash_save(dash):
    """Save dashboard using Graphite libraries."""
    # Graphite libraries
    sys.path.append(dashconf["webapp_path"])
    os.environ["DJANGO_SETTINGS_MODULE"] = "graphite.settings"
    from graphite.dashboard.models import Dashboard
    dash_name = dash["name"]
    dash_state = str(json.dumps(dash))
    try:
        dashboard = Dashboard.objects.get(name=dash_name)
    except Dashboard.DoesNotExist:
        dashboard = Dashboard.objects.create(name=dash_name, state=dash_state)
    else:
        dashboard.state = dash_state
        dashboard.save()

Example 25

Project: yatsm
Source File: setup.py
View license
def _build_pickles():
    # Build pickles
    here = os.path.dirname(__file__)
    sys.path.append(os.path.join(here, 'yatsm', 'regression', 'pickles'))
    from yatsm.regression.pickles import serialize as serialize_pickles  # noqa
    serialize_pickles.make_pickles()

Example 26

Project: scrapy
Source File: __init__.py
View license
    def setUp(self):
        orig_spiders_dir = os.path.join(module_dir, 'test_spiders')
        self.tmpdir = self.mktemp()
        os.mkdir(self.tmpdir)
        self.spiders_dir = os.path.join(self.tmpdir, 'test_spiders_xxx')
        shutil.copytree(orig_spiders_dir, self.spiders_dir)
        sys.path.append(self.tmpdir)
        settings = Settings({'SPIDER_MODULES': ['test_spiders_xxx']})
        self.spider_loader = SpiderLoader.from_settings(settings)

Example 27

Project: graphserver
Source File: routeserver.py
View license
def import_class(handler_class_path_string):
    sys.path.append( os.getcwd() )
    
    handler_class_path = handler_class_path_string.split(".")
    
    class_name = handler_class_path[-1]
    package_name = ".".join(handler_class_path[:-1])
    
    package = __import__(package_name, fromlist=[class_name])
    
    try:
        handler_class = getattr( package, class_name )
    except AttributeError:
        raise AttributeError( "Can't find %s. Only %s"%(class_name, dir(package)) )
    
    return handler_class

Example 28

Project: django-documents
Source File: setup.py
View license
    def run(self):
        this_dir = os.getcwd()
        testproj_dir = os.path.join(this_dir, "test_project")
        os.chdir(testproj_dir)
        sys.path.append(testproj_dir)
        from django.core.management import execute_manager
        os.environ["DJANGO_SETTINGS_MODULE"] = 'test_project.settings'
        settings_file = os.environ["DJANGO_SETTINGS_MODULE"]
        settings_mod = __import__(settings_file, {}, {}, [''])
        execute_manager(settings_mod, argv=[
            __file__, "test", "documents"])
        os.chdir(this_dir)

Example 29

Project: jpgcompressor
Source File: build.py
View license
def generate_doc(config):
	docdir = os.path.join(cwd,'documentation')
	if not os.path.exists(docdir):
		print "Couldn't find documentation file at: %s" % docdir
		return None
	sdk = config['TITANIUM_SDK']
	support_dir = os.path.join(sdk,'module','support')
	sys.path.append(support_dir)
	import markdown
	documentation = []
	for file in os.listdir(docdir):
		md = open(os.path.join(docdir,file)).read()
		html = markdown.markdown(md)
		documentation.append({file:html});
	return documentation

Example 30

Project: dojango
Source File: dojobuild.py
View license
def setup_environ():
    # we assume, that dojango is installed within your django's project dir
    project_directory = os.path.abspath(os.path.dirname(__file__)+'/../../')
    settings_filename = "settings.py"
    if not project_directory:
        project_directory = os.getcwd()
    project_name = os.path.basename(project_directory)
    settings_name = os.path.splitext(settings_filename)[0]
    sys.path.append(project_directory)
    sys.path.append(os.path.abspath(project_directory + "/.."))
    project_module = __import__(project_name, {}, {}, [''])
    sys.path.pop()
    # Set DJANGO_SETTINGS_MODULE appropriately.
    os.environ['DJANGO_SETTINGS_MODULE'] = '%s.%s' % (project_name, settings_name)
    return project_directory

Example 31

Project: sparts
Source File: ctx.py
View license
@contextmanager
def add_path(path, index=None):
    """Temporarily add `path` to the PYTHONPATH. Not thread-safe.
    
    If `index` is None, append to the end, otherwise, use `index` as specified
    to `list.insert()`"""
    if index is None:
        sys.path.append(path)
    else:
        sys.path.insert(index, path)
    try:
        yield path
    finally:
        sys.path.remove(path)

Example 32

Project: bakthat
Source File: plugin.py
View license
def setup_plugins(conf=None):
    """ Add the plugin dir to the PYTHON_PATH,
    and activate them."""
    global plugin_setup
    if not plugin_setup:
        log.debug("Setting up plugins")
        plugins_dir = conf.get("plugins_dir", PLUGINS_DIR)

        if os.path.isdir(plugins_dir):
            log.debug("Adding {0} to plugins dir".format(plugins_dir))
            sys.path.append(plugins_dir)

        for plugin in conf.get("plugins", []):
            p = load_class(plugin)
            if issubclass(p, Plugin):
                load_plugin(p, conf)
            else:
                raise Exception("Plugin must be a bakthat.plugin.Plugin subclass!")
        plugin_setup = True

Example 33

Project: Django-facebook
Source File: runtests.py
View license
def runtests(args=None):
    import pytest
    sys.path.append(example_dir)

    if not args:
        args = []

    if not any(a for a in args[1:] if not a.startswith('-')):
        args.append('tests')

    result = pytest.main(['django_facebook', 'open_facebook'])
    sys.exit(result)

Example 34

Project: pypet
Source File: all_examples.py
View license
def main():
    os.chdir(os.path.join('..','..','examples'))
    sys.path.append(os.getcwd())
    simple_examples = glob.glob('*.py')
    assert len(simple_examples) == 24 - 5 + 1 + 2
    # + Number of Examples - Number of Packages +  __init__.py + 19b and 19c

    for simple_example in simple_examples:
        if simple_example == '__init__':
            continue

        if skip(simple_example):
            print("---------- Skipping %s ----------" % simple_example)
        else:
            print("########## Running %s ###########" % simple_example)
            execute_example(simple_example)

    ex17 = 'example_17_wrapping_an_existing_project'
    if skip(ex17):
        print("------- Skipping %s -------" % ex17)
    else:
        os.chdir(ex17)
        sys.path.append(os.getcwd())
        print('Running original')
        execute_example('original.py')
        print('Running pypet wrapping')
        execute_example('pypetwrap.py')
        os.chdir('..')

    ex13 = 'example_13_post_processing'
    if skip(ex13):
        print("------- Skipping %s -------" % ex13)
    else:
        print("########## Running %s ###########" % ex13)
        os.chdir(ex13)
        sys.path.append(os.getcwd())
        print("Running main")
        execute_example('main.py')
        print("Running analysis")
        execute_example('analysis.py')
        print("Running pipeline")
        execute_example('pipeline.py')
        os.chdir('..')

    ex11 = 'example_11_large_scale_brian_simulation'
    if skip(ex11):
        print("------- Skipping %s -------" % ex11)
    else:
        print("########## Running %s ###########" % ex11)

        os.chdir(ex11)
        sys.path.append(os.getcwd())
        print("Running script")
        execute_example('runscript.py')
        print("Running analysis")
        execute_example('plotff.py')
        os.chdir('..')

    ex24 = 'example_24_large_scale_brian2_simulation'
    if skip(ex24):
        print("------- Skipping %s -------" % ex24)
    else:
        print("########## Running %s ###########" % ex24)

        os.chdir(ex24)
        sys.path.append(os.getcwd())
        print("Running script")
        execute_example('runscript.py')
        print("Running analysis")
        execute_example('plotff.py')
        os.chdir('..')

Example 35

Project: qualitio
Source File: manage.py
View license
def manage_local():

    try:
        os.environ["VIRTUAL_ENV"]
    except KeyError:
        developemnt_virtualenv = os.path.join(os.path.dirname(__file__),
                                              "../.virtualenv/bin/activate_this.py")
        execfile(developemnt_virtualenv, dict(__file__=developemnt_virtualenv))

    tests_path = os.path.join(os.path.dirname(__file__), "../tests")
    sys.path.append(tests_path)

    try:
        import settings
    except ImportError:
        sys.stderr.write(import_error_message)

    from django.core.management import execute_manager
    execute_manager(settings)

Example 36

Project: filmkodi
Source File: default.py
View license
    def __init__(self):
        #BASE_RESOURCE_PATH = os.path.join( os.getcwd(), "resources" )
        BASE_RESOURCE_PATH = os.path.join( ptv.getAddonInfo('path'), "resources" )
        #BASE_RESOURCE_PATH = os.path.join( ptv2.getAddonInfo('path'), "resources" )

        sys.path.append( os.path.join( BASE_RESOURCE_PATH, "lib" ) )
        sys.path.append( os.path.join( BASE_RESOURCE_PATH, "resources" ) )

        sys.path.append( os.path.join( ptv.getAddonInfo('path'), "host" ) )
        import mrknow_pLog, settings, mrknow_Parser, mrknow_pCommon

        self.cm = mrknow_pCommon.common()
        self.log = mrknow_pLog.pLog()
        self.settings = settings.TVSettings()
        self.parser = mrknow_Parser.mrknow_Parser()
        #self.log.info("DIR " + common.Paths.modulesDir + 'mainMenu.cfg')
        if ptv.getSetting('adults') == 'false':
            self.MAIN_MENU_FILE = 'mainMenu.cfg'
        else:
            self.MAIN_MENU_FILE = 'mainMenuAdult.cfg'
        self.SPORT_MENU_FILE = 'sportMenu.cfg'

        if not os.path.exists(common.Paths.pluginDataDir):
            os.makedirs(common.Paths.pluginDataDir, 0777)

        self.favouritesManager = FavouritesManager(common.Paths.favouritesFolder)
        self.customModulesManager = CustomModulesManager(common.Paths.customModulesDir, common.Paths.customModulesRepo)

        if not os.path.exists(common.Paths.customModulesDir):
            os.makedirs(common.Paths.customModulesDir, 0777)

        self.parser2 = Parser2()
        self.currentlist = None

        self.addon = None
        self.log.info('Filmy online www.mrknow.pl')
        common.clearCache()

Example 37

Project: ray
Source File: runtest.py
View license
  def testCachingFunctionsToRun(self):
    # Test that we export functions to run on all workers before the driver is connected.
    def f(worker):
      sys.path.append(1)
    ray.worker.global_worker.run_function_on_all_workers(f)
    def f(worker):
      sys.path.append(2)
    ray.worker.global_worker.run_function_on_all_workers(f)
    def g(worker):
      sys.path.append(3)
    ray.worker.global_worker.run_function_on_all_workers(g)
    def f(worker):
      sys.path.append(4)
    ray.worker.global_worker.run_function_on_all_workers(f)

    ray.init(start_ray_local=True, num_workers=2)

    @ray.remote
    def get_state():
      time.sleep(1)
      return sys.path[-4], sys.path[-3], sys.path[-2], sys.path[-1]

    res1 = get_state.remote()
    res2 = get_state.remote()
    self.assertEqual(ray.get(res1), (1, 2, 3, 4))
    self.assertEqual(ray.get(res2), (1, 2, 3, 4))

    # Clean up the path on the workers.
    def f(worker):
      sys.path.pop()
      sys.path.pop()
      sys.path.pop()
      sys.path.pop()
    ray.worker.global_worker.run_function_on_all_workers(f)

    ray.worker.cleanup()

Example 38

Project: filmkodi
Source File: default.py
View license
    def __init__(self):
        #BASE_RESOURCE_PATH = os.path.join( os.getcwd(), "resources" )
        BASE_RESOURCE_PATH = os.path.join( ptv.getAddonInfo('path'), "resources" )
        #BASE_RESOURCE_PATH = os.path.join( ptv2.getAddonInfo('path'), "resources" )

        sys.path.append( os.path.join( BASE_RESOURCE_PATH, "lib" ) )
        sys.path.append( os.path.join( BASE_RESOURCE_PATH, "resources" ) )

        sys.path.append( os.path.join( ptv.getAddonInfo('path'), "host" ) )
        import mrknow_pLog, settings, mrknow_Parser, mrknow_pCommon

        self.cm = mrknow_pCommon.common()
        self.log = mrknow_pLog.pLog()
        self.settings = settings.TVSettings()
        self.parser = mrknow_Parser.mrknow_Parser()
        #self.log.info("DIR " + common.Paths.modulesDir + 'mainMenu.cfg')
        if ptv.getSetting('adults') == 'false':
            self.MAIN_MENU_FILE = 'mainMenu.cfg'
        else:
            self.MAIN_MENU_FILE = 'mainMenuAdult.cfg'
        self.SPORT_MENU_FILE = 'sportMenu.cfg'

        if not os.path.exists(common.Paths.pluginDataDir):
            os.makedirs(common.Paths.pluginDataDir, 0777)

        self.favouritesManager = FavouritesManager(common.Paths.favouritesFolder)
        self.customModulesManager = CustomModulesManager(common.Paths.customModulesDir, common.Paths.customModulesRepo)

        if not os.path.exists(common.Paths.customModulesDir):
            os.makedirs(common.Paths.customModulesDir, 0777)

        self.parser2 = Parser2()
        self.currentlist = None

        self.addon = None
        self.log.info('Filmy online www.mrknow.pl')
        common.clearCache()

Example 39

Project: idapython
Source File: build.py
View license
def build_source_package():
    """ Build a directory and a ZIP file with all the sources """
    SRCDISTDIR = "idapython-%d.%d.%d" % (VERSION_MAJOR,
                                         VERSION_MINOR,
                                         VERSION_PATCH)
    # Build the source distribution
    srcmanifest = []
    srcmanifest.extend(BINDIST_MANIFEST)
    srcmanifest.extend(SRCDIST_MANIFEST)
    srcmanifest.extend([(x, "python") for x in "python/init.py", "python/idc.py", "python/idautils.py"])
    build_distribution(srcmanifest, SRCDISTDIR, ea64=False, nukeold=True)

# -----------------------------------------------------------------------
def gen_docs(z = False):
        print "Generating documentation....."
        old_dir = os.getcwd()
        try:
            curdir = os.getcwd() + os.sep
            docdir = 'idapython-reference-%d.%d.%d' % (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
            sys.path.append(curdir + 'python')
            sys.path.append(curdir + 'tools')
            sys.path.append(curdir + 'docs')
            import epydoc.cli
            import swigdocs
            os.chdir('docs')
            PYWRAPS_FN = 'pywraps'
            swigdocs.gen_docs(outfn = PYWRAPS_FN + '.py')
            epydoc.cli.optparse.sys.argv = [ 'epydoc',
                                             '--no-sourcecode',
                                             '-u', 'http://code.google.com/p/idapython/',
                                             '--navlink', '<a href="http://www.hex-rays.com/idapro/idapython_docs/">IDAPython Reference</a>',
                                             '--no-private',
                                             '--simple-term',
                                             '-o', docdir,
                                             '--html',
                                             'idc', 'idautils', PYWRAPS_FN, 'idaapi']
            # Generate the documentation
            epydoc.cli.cli()

            print "Documentation generated!"

            # Clean temp files
            for f in [PYWRAPS_FN + '.py', PYWRAPS_FN + '.pyc']:
                if os.path.exists(f):
                  os.unlink(f)

            if z:
                z = docdir + '-doc.zip'
                zip = zipfile.ZipFile(z, "w", zipfile.ZIP_DEFLATED)
                for fn in glob.glob(docdir + os.sep + '*'):
                    zip.write(fn)
                zip.close()
                print "Documentation compressed to", z
        except Exception, e:
            print 'Failed to generate documentation:', e
        finally:
            os.chdir(old_dir)
        return

# -----------------------------------------------------------------------
def usage():
    print """IDAPython build script.

Available switches:
  --doc:
    Generate documentation into the 'docs' directory
  --zip:
    Used with '--doc' switch. It will compress the generated documentation
  --ea64:
    Builds also the 64bit version of the plugin
  --with-hexrays:
    Build with the Hex-Rays Decompiler wrappings
  --no-early-load:
    The plugin will be compiled as normal plugin
    This switch disables processor, plugin and loader scripts
"""

# -----------------------------------------------------------------------
def main():
    if '--help' in sys.argv:
        return usage()
    elif '--doc' in sys.argv:
        return gen_docs(z = '--zip' in sys.argv)

    # Parse options
    options = parse_options(sys.argv)
    ea64 = options[S_EA64]

    # Always build the non __EA64__ version
    options[S_EA64] = False
    build_binary_package(options, nukeold=True)

    # Rebuild package with __EA64__ if needed
    if ea64:
        options[S_EA64] = True
        build_binary_package(options, nukeold=False)

    # Always build the source package
    build_source_package()

# -----------------------------------------------------------------------
if __name__ == "__main__":
    main()

Example 40

Project: hexrays-python
Source File: build.py
View license
def gen_docs(z = False):
        print "Generating documentation....."
        old_dir = os.getcwd()
        try:
            curdir = os.getcwd() + os.sep
            docdir = 'hexrays-python-reference-%d.%d.%d' % (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
            sys.path.append(curdir + 'python')
            #~ sys.path.append(curdir + 'tools')
            #~ sys.path.append(curdir + 'docs')
            import epydoc.cli
            import swigdocs
            os.chdir('docs')
            PYWRAPS_FN = 'pywraps'
            swigdocs.gen_docs(outfn = PYWRAPS_FN + '.py')
            epydoc.cli.optparse.sys.argv = [ 'epydoc',
                                             '--no-sourcecode',
                                             '-u', 'http://github.com/EiNSTeiN-/hexrays_python/',
                                             '--navlink', '<a href="http://www.hex-rays.com/idapro/idapython_docs/">IDAPython Reference</a>',
                                             '--no-private',
                                             '--simple-term',
                                             '-o', docdir,
                                             '--html',
                                             'idc', 'idautils', PYWRAPS_FN, 'idaapi']
            # Generate the documentation
            epydoc.cli.cli()

            print "Documentation generated!"

            # Clean temp files
            for f in [PYWRAPS_FN + '.py', PYWRAPS_FN + '.pyc']:
                if os.path.exists(f):
                  os.unlink(f)

            if z:
                z = docdir + '-doc.zip'
                zip = zipfile.ZipFile(z, "w", zipfile.ZIP_DEFLATED)
                for fn in glob.glob(docdir + os.sep + '*'):
                    zip.write(fn)
                zip.close()
                print "Documentation compressed to", z
        except Exception, e:
            print 'Failed to generate documentation:', e
        finally:
            os.chdir(old_dir)
        return

Example 41

Project: sftf
Source File: SFTF.py
View license
	def run(self):
		"""The main function of the SFTF class which read in all tests, checks
		if the tests matches the desired characteristics determined by the
		command line arguments and finally prints the result summary.
		"""
		Log.logDebug("======================", 2)
		Log.logDebug("SFTF.run(): v" + str(self.ver) + " started", 4)
		#sys.path.append(Config.TEST_CASE_PATH)
		for d in self.tcdirs:
			sys.path.append(d)
		for p in self.directories:
			sys.path.append(p)
		if len(self.test) > 0:
			tcloadlist = self.test
			self.test = []
			for i in tcloadlist:
				self.readInTestCase(i)
		else:
			self.readInTestCases()
		Log.logDebug("SFTF.run(): read in test cases: " + str(self.test), 4)
		if len(self.test) == 0:
			Log.logDebug("no test cases loaded, nothing to do, exiting...", 1)
			return
		Log.logTest("configuring tests ...")
		final_tests = []
		for i in self.test * self.numIterations:
			i.config()
			if (i.name is None) or (i.description is None) or (i.transport is None) or (i.isClient is None):
				Log.logDebug("SFTF.run(): mandatory configuration (name, description, transport and isClient) is incomplete, ignoring " + str(i), 1)
				Log.logTest("ignoring " + str(i) + " because of incomplete configuration")
			else:
				if (not i.isClient) and (not i.interactRequired):
					Log.logDebug("SFTF.run(): WARNING: test " + str(i.name) + " is server and interaction is not required: fixing", 1)
					Log.logTest("WARNING: test " + str(i.name) + " is server and interaction is not required: fixing")
					i.interactRequired = True
				append = True
				if i.isClient and self.serverOnly:
					append = False
				if (not i.isClient) and self.clientOnly:
					append = False
				if i.interactRequired and self.nonInteractOnly:
					append = False
				if (not i.interactRequired) and self.interactOnly:
					append = False
				if i.register and self.nonRegister:
					append = False
				if (not i.register) and self.register:
					append = False
				if (i.minAPIVersion is not None) and (TestCase.TC_API_VERSION < i.minAPIVersion):
					Log.logTest("'" + i.name + "' required minimum API version (" + str(i.minAPIVersion) + ") is above your API version (" + str(TestCase.TC_API_VERSION) + ")")
					Log.logTest("please update the framework if you want to be able to run this test")
					append = False
				if (i.maxAPIVersion is not None) and (TestCase.TC_API_VERSION > i.maxAPIVersion):
					Log.logTest("'" + i.name + "' required maximum API version is below your API version (" + TestCase.TC_API_VERSION + ")")
					Log.logTest("pleasse update the test case to the new API of your framework")
					append = False
				if append:
					final_tests.append(i)
		self.test = final_tests
		if len(self.test) == 0:
			Log.logDebug("no test cases loaded, nothing to do, exiting...", 1)
			return
		if not hasattr(Config, "resources"):
			Config.resources = {'NEH': {}, 'MediaSockets': [], 'FEH': {}, 'XMLEH': {}}
		if Config.LOG_SIPVIEWER_XML:
			try:
				import XMLEventHandler
				Config.XMLEHavailable = True
			except ImportError:
				Log.logDebug("SFTF.run(): SIPVIEWER_XML enabled but XMLEventHandler is missing", 1)
				Config.XMLEHavailable = False
		Log.logDebug("SFTF.run(): running these tests: " + str(self.test), 5)
		for j in self.test:
			if Config.LOG_SIPVIEWER_XML and Config.XMLEHavailable:
				Config.resources['XMLEH'][j.name] = XMLEventHandler.XMLEventHandler(j.name + ".xml")
			Log.logTest("===================")
			Log.logTest("starting test: " + j.name + " ...")
			Log.logDebug("===================", 1)
			Log.logDebug("starting test: " + j.name + " ...", 1)
			try:
				j.run()
			except KeyboardInterrupt:
				Log.logDebug("test run interrupted by user through keyboard => exiting", 1)
				Log.logTest("test run interrupted by user => exiting")
				self.totalSuccess = False
				if self.debug:
					return
				else:
					sys.exit()
			except socket.error, inst:
				if inst[0] == 98:
					print " The address from the above message is allready in use!!!\n Please change the port in Config.py or in the test '" + j.name + "'\n or close the application which uses this address."
				else:
					Log.logDebug("socket.error: " + str(inst), 1)
				sys.exit(3)
			except Exception, inst:
				print "  Caught exception during running '" + j.name + "'"
				traceback.print_exc(file=Log.debugLogFile)
				traceback.print_exc(file=sys.stdout)
				print "  ! Please report this bug as described at the web page !"
				sys.exit(3)
			for k in j.results:
				Log.logTest(getResultString(k.result) + ": " + k.reason)
			Log.logTest("\'" + j.name + "\' result = " + getResultString(j.getOneResult()))
			Log.logTest("===================")
			if Config.resources.has_key('MediaSockets'):
				for sp in Config.resources['MediaSockets']:
					sp[0].close()
					sp[1].close()
				Config.resources['MediaSockets'] = []
			if Config.resources.has_key('XMLEH') and Config.resources['XMLEH'].has_key(j.name):
				Config.resources['XMLEH'][j.name].close()
		for i in Config.resources['NEH']:
			Config.resources['NEH'][i].close()
		for j in Config.resources['FEH']:
			Config.resources['FEH'][j].close()
		for k in Config.resources['MediaSockets']:
			k[0].close()
			k[1].close()
		print "====================\nTest result summary:\n===================="
		if self.colorOutput:
			nocol = "\x1b[0;0m"
		else:
			nocol = ''
		Config.LOG_TESTS_STD_OUT = False
		for i in self.test:
			res = i.getOneResult()
			if res != TestCase.TC_PASSED:
				self.totalSuccess = False
			if self.colorOutput:
				col = resToCol(res)
			else:
				col = nocol
			res_s = "[" + col + getResultString(res) + nocol + "]"
			print i.name + ":\t" + res_s
			res_log = "[" + getResultString(res) + "]"
			Log.logTest(i.name + ":\t" + res_log)
			if self.verboseSummary:
				print "  Description: " + i.description
				Log.logTest("  Description: " + i.description)
				for j in i.results:
					if self.colorOutput:
						col = resToCol(j.result)
					else:
						col = nocol
					print "\t\t[" + col + getResultString(j.result) + nocol + "] : " + j.reason
					Log.logTest("\t\t[" + getResultString(j.result) + "] : " + j.reason)
				print "--------------------------------------------"
				Log.logTest("--------------------------------------------")

Example 42

Project: sd-agent
Source File: plugins.py
View license
    def check(self, agentConfig):

        self.logger.debug('getPlugins: start')

        plugin_directory = agentConfig.get('plugin_directory', None)
        if plugin_directory:

            self.logger.info(
                'getPlugins: plugin_directory %s', plugin_directory)

            if not os.access(plugin_directory, os.R_OK):
                self.logger.warning(
                    'getPlugins: Plugin path %s is set but not readable by ' +
                    'agent. Skipping plugins.', plugin_directory)
                return False
        else:
            self.logger.debug('getPlugins: plugin_directory not set')

            return False

        # Have we already imported the plugins?
        # Only load the plugins once
        if self.plugins is None:
            self.logger.debug(
                'getPlugins: initial load from %s', plugin_directory)

            sys.path.append(plugin_directory)

            self.plugins = []
            plugins = []

            # Loop through all the plugin files
            for root, dirs, files in os.walk(plugin_directory):
                for name in files:
                    self.logger.debug('getPlugins: considering: %s', name)

                    name = name.split('.', 1)

                    # Only pull in .py files (ignores others, inc .pyc files)
                    try:
                        if name[1] == 'py':

                            self.logger.debug(
                                'getPlugins: ' + name[0] + '.' + name[1] +
                                ' is a plugin')

                            plugins.append(name[0])
                    except IndexError:
                        continue

            # Loop through all the found plugins, import them then create new
            # objects
            for plugin_name in plugins:
                self.logger.debug('getPlugins: loading %s', plugin_name)

                plugin_path = os.path.join(
                    plugin_directory, '%s.py' % plugin_name)

                if not os.access(plugin_path, os.R_OK):
                    self.logger.error(
                        'getPlugins: Unable to read %s so skipping this '
                        'plugin.', plugin_path)
                    continue

                try:
                    # Import the plugin, but only from the plugin directory
                    # (ensures no conflicts with other module names elsewhere
                    # in the sys.path
                    import imp
                    imported_plugin = imp.load_source(plugin_name, plugin_path)

                    self.logger.debug('getPlugins: imported %s', plugin_name)

                    # Find out the class name and then instantiate it
                    plugin_class = getattr(imported_plugin, plugin_name, None)
                    if plugin_class is None:
                        self.logger.info(
                            'getPlugins: Unable to locate class %s in %s, '
                            'skipping', plugin_name, plugin_path)
                        continue

                    try:
                        plugin_obj = plugin_class(
                            agentConfig, self.logger, self.raw_config)
                    except TypeError:

                        try:
                            plugin_obj = plugin_class(
                                agentConfig, self.logger)
                        except TypeError:
                            # Support older plugins.
                            plugin_obj = plugin_class()

                    self.logger.debug('getPlugins: instantiated %s', plugin_name)

                    # Store in class var so we can execute it again on the
                    # next cycle
                    self.plugins.append(plugin_obj)

                except Exception:
                    self.logger.error(
                        'getPlugins (%s): exception = %s', plugin_name,
                        traceback.format_exc())

        # Now execute the objects previously created
        if self.plugins is not None:
            self.logger.debug('getPlugins: executing plugins')

            # Execute the plugins
            output = {}

            for plugin in self.plugins:
                self.logger.info(
                    'getPlugins: executing  %s', plugin.__class__.__name__)

                try:
                    value = plugin.run()
                    if value:
                        output[plugin.__class__.__name__] = value
                        self.logger.debug(
                            'getPlugins: %s output: %s',
                            plugin.__class__.__name__,
                            output[plugin.__class__.__name__])
                        self.logger.info(
                            'getPlugins: executed %s',
                            plugin.__class__.__name__)
                    else:
                        self.logger.info(
                            'getPlugins: executed %s but returned no data',
                            plugin.__class__.__name__)
                except Exception:
                    self.logger.error(
                        'getPlugins: exception = %s', traceback.format_exc())

            self.logger.debug('getPlugins: returning')
            # Each plugin should output a dictionary so we can convert it to
            # JSON later
            return output

        else:
            self.logger.debug('getPlugins: no plugins, returning false')

            return False

Example 43

Project: pyspace
Source File: launch.py
View license
def main():
    #### Find pySPACE package and import it ####

    # Determine path of current file
    path = os.path.realpath(__file__)

    # Move up to parent directory that contains the pySPACE tree
    suffix = []
    for i in range(3):
        path, tail = os.path.split(path)
        suffix.append(tail)
    parent_dir = path

    # Check proper directory structure
    if suffix != ['launch.py', 'run', 'pySPACE']:
        raise RuntimeError, "Encountered incorrect directory structure. "\
                            "launch.py needs to reside in $PARENT_DIR/pySPACE/run"


    # Workaround for eegserver crashing after 255 open ports
    # - Now it crashes after 4096 open ports ;-)
    import resource
    (fd1, fd2) = resource.getrlimit(resource.RLIMIT_NOFILE)
    fd1 = 4096 if fd2 == resource.RLIM_INFINITY else fd2-1
    resource.setrlimit(resource.RLIMIT_NOFILE, (fd1,fd2))
    # ------------------------------------------------------

    #########################################

    ### Parsing of command line arguments
    usage = "Usage: %prog [BACKEND_SPECIFICATION]  [--config <conf.yaml>] "\
            "[--operation <operation.yaml> | --operation_chain <operation_chain.yaml>] "\
            "[--profile]"\
            " where BACKEND_SPECIFICATION can be --serial, --mcore, --loadl or --mpi"

    parser = LaunchParser(usage=usage, epilog=epilog)

    # Configuration
    parser.add_option("-c", "--configuration",
                      default="config.yaml",
                      help="Choose the configuration file, which is looked up in PYSPACE_CONF_DIR",
                      action="store")
    # Backends
    parser.add_option("-s", "--serial", action="store_true", default=False,
                      help="Enables execution on the SerialBackend (one local process)")
    parser.add_option("-m", "--mcore", action="store_true", default=False,
                      help="Enables execution on the MulticoreBackend (one process per CPU core)")
    parser.add_option("-l", "--local", action="store_true", default=False,
                      help="Enables execution on the MulticoreBackend (one process per CPU core)")
    parser.add_option("-i", "--mpi", action="store_true", default=False,
                      help="Enables execution via MPI")
    parser.add_option("-L", "--loadl", action="store_true", default=False,
                      help="Enables execution via LoadLeveler.")
    # Operation / operation chain
    parser.add_option("-o", "--operation",
                      help="Chooses the operation that will be executed. The "
                           "operation specification file is looked up in "
                           "$SPEC_DIR/operations",
                      action="store")
    parser.add_option("-O", "-C", "--operation_chain",
                      help="Chooses the operation chain that will be executed. "
                           "The operation chain specification file is looked up "
                           "in $SPEC_DIR/operation_chains",
                      action="store")
    # Profiling
    parser.add_option("-p", "--profile",
                      help="Profiles execution.",
                      action="store_true", default=False,)

    (options, args) = parser.parse_args()

    # Load configuration file
    pySPACE.load_configuration(options.configuration)

    if hasattr(pySPACE.configuration, "eeg_acquisition_dir"):
        eeg_parent_dir =\
        os.sep.join(pySPACE.configuration.eeg_acquisition_dir.split(os.sep)[:-1])
        if not hasattr(pySPACE.configuration, "eeg_acquisition_dir"):
            pySPACE.configuration.eeg_module_path = eeg_parent_dir
    else:
        eeg_parent_dir, tail = os.path.split(parent_dir)
        eeg_parent_dir = os.path.join(eeg_parent_dir, "eeg_modules")
        pySPACE.configuration.eeg_module_path = eeg_parent_dir
    sys.path.append(eeg_parent_dir)

    # Create backend
    if options.serial:
        default_backend = create_backend("serial")
    elif options.mcore or options.local:
        default_backend = create_backend("mcore")
    elif options.mpi:
        default_backend = create_backend("mpi")
    elif options.loadl:
        default_backend = create_backend("loadl")
    else: # Falling back to serial backend
        default_backend = create_backend("serial")

    print(" --> Using backend: \n\t\t %s."%str(default_backend))

    if not options.operation is None:
        # Create operation for the given name
        operation = create_operation_from_file(options.operation)
        # Store current source code for later inspection
        create_source_archive(archive_path=operation.get_output_directory())
        if not options.profile:
            # Execute the current operation
            run_operation(default_backend, operation)
        else:
            # Execute and profile operation
            cProfile.runctx('pySPACE.run_operation(default_backend, operation)',
                            globals(), locals(),
                            filename = operation.get_output_directory()\
                                       + os.sep + "profile.pstat")
    elif not options.operation_chain is None:
        # Create operation chain for the given name
        operation_chain = create_operation_chain(options.operation_chain)
        # Store current source code for later inspection
        create_source_archive(archive_path=operation_chain.get_output_directory())

        if not options.profile:
            # Execute the current operation_chain
            run_operation_chain(default_backend, operation_chain)
        else:
            # Execute and profile operation
            cProfile.runctx('pySPACE.run_operation_chain(default_backend, operation_chain)',
                            globals(), locals(),
                            filename=operation_chain.get_output_directory()\
                                     + os.sep + "profile.pstat")
    else:
        parser.error("Neither operation chain nor operation specification file given!")

    logging.shutdown()
    # Stop logger thread in backend
    default_backend._stop_logging()

    del default_backend

Example 44

Project: deepcca
Source File: utils.py
View license
def load_vc(dataset='../gitlab/voice-conversion/src/test/data/clb_slt_MCEP24_static_span0.data'):
    import sys
    sys.path.append('../gitlab/voice-conversion/src')
    import voice_conversion
    
    import pickle
    f=open(dataset,'r')
    vcdata=pickle.load(f)
    x=vcdata['aligned_data1'][:,:24]
    y=vcdata['aligned_data2'][:,:24]
    num = x.shape[0]
    st_train = 0
    en_train = int(num * (64.0/200.0))
    st_valid = en_train
    en_valid = en_train+int(num * (36.0/200.0))
    st_test = en_valid
    en_test = num
    
    x_mean = x[st_train:en_train,:].mean(axis=0)
    y_mean = y[st_train:en_train,:].mean(axis=0)
    x_std = x[st_train:en_train,:].std(axis=0)
    y_std = y[st_train:en_train,:].std(axis=0)
    x -= x_mean
    y -= y_mean
    x /= x_std
    y /= y_std

    import theano
    train_set_x = theano.shared(numpy.asarray(x[st_train:en_train,:],
                                dtype=theano.config.floatX),
                                 borrow=True)
    train_set_y = theano.shared(numpy.asarray(y[st_train:en_train,:],
                                dtype=theano.config.floatX),
                                 borrow=True)
    test_set_x = theano.shared(numpy.asarray(x[st_test:en_test,:],
                                dtype=theano.config.floatX),
                                 borrow=True)
    test_set_y = theano.shared(numpy.asarray(y[st_test:en_test,:],
                                dtype=theano.config.floatX),
                                 borrow=True)
    valid_set_x = theano.shared(numpy.asarray(x[st_valid:en_valid,:],
                                dtype=theano.config.floatX),
                                 borrow=True)
    valid_set_y = theano.shared(numpy.asarray(y[st_valid:en_valid,:],
                                dtype=theano.config.floatX),
                                 borrow=True)
    rval = [(train_set_x, train_set_y), (valid_set_x, valid_set_y),
            (test_set_x, test_set_y)]
    return rval, x_mean, y_mean, x_std, y_std

Example 45

Project: KSP
Source File: main.py
View license
def main():
	args = _args()
	etc_path = abspath(args.etc_path)
	sys.path.append(etc_path)

	_processConfiguration()

	import config
	config.logs_path = None if args.console else abspath(config.logs_path, True)
	if args.log_level:
		config.log_level = args.log_level
	_make_root_logger(_stdstream(config.logs_path), config.log_level)

	logging.info("%s start-up", '*' * 20)
	logging.info("read configuration from %s", etc_path)

	config.database_path = abspath(config.database_path, True)
	config.server_certificate = abspath(config.server_certificate)

	# add the src/ folder to the import path
	sys.path.append(abspath('src'))

	# doing this here because if the pipe does not exit, we want to fail fast,
	# before we load the devices and calibre databases
	import ctrl
	pipe_file = open(args.control_pipe, 'rb') if args.control_pipe else None

	try:
		# import calibre and devices here because the config has to be fully processed for them to work
		import devices
		import calibre
		import server
		http_server = server.HTTP()
		if pipe_file:
			ctrl.start_server_controller(http_server, pipe_file, args.control_pipe)
		http_server.run()
	except:
		logging.exception("")
	finally:
		if pipe_file:
			try: pipe_file.close()
			except: pass
		devices.save_all()

	logging.info("%s shutdown", '*' * 20)
	logging.shutdown()

Example 46

Project: Diamond
Source File: netapp.py
View license
    def collect(self, device, ip, user, password):
        """
        This function collects the metrics for one filer.
        """
        sys.path.append(self.config['netappsdkpath'])
        try:
            import NaServer
        except ImportError:
            self.log.error("Unable to load NetApp SDK from %s" % (
                self.config['netappsdkpath']))
            return

        # Set up the parameters
        server = NaServer.NaServer(ip, 1, 3)
        server.set_transport_type('HTTPS')
        server.set_style('LOGIN')
        server.set_admin_user(user, password)

        # We're only able to query a single object at a time,
        # so we'll loop over the objects.
        for na_object in self.METRICS.keys():

            # For easy reference later, generate a new dict for this object
            LOCALMETRICS = {}
            for metric in self.METRICS[na_object]:
                metricname, prettyname, multiplier = metric
                LOCALMETRICS[metricname] = {}
                LOCALMETRICS[metricname]["prettyname"] = prettyname
                LOCALMETRICS[metricname]["multiplier"] = multiplier

            # Keep track of how long has passed since we checked last
            CollectTime = time.time()
            time_delta = None
            if na_object in self.LastCollectTime.keys():
                time_delta = CollectTime - self.LastCollectTime[na_object]
            self.LastCollectTime[na_object] = CollectTime

            self.log.debug("Collecting metric of object %s" % na_object)
            query = NaServer.NaElement("perf-object-get-instances-iter-start")
            query.child_add_string("objectname", na_object)
            counters = NaServer.NaElement("counters")
            for metric in LOCALMETRICS.keys():
                counters.child_add_string("counter", metric)
            query.child_add(counters)

            res = server.invoke_elem(query)
            if(res.results_status() == "failed"):
                self.log.error("Connection to filer %s failed; %s" % (
                    device, res.results_reason()))
                return

            iter_tag = res.child_get_string("tag")
            num_records = 1
            max_records = 100

            # For some metrics there are dependencies between metrics for
            # a single object, so we'll need to collect all, so we can do
            # calculations later.
            raw = {}

            while(num_records != 0):
                query = NaServer.NaElement(
                    "perf-object-get-instances-iter-next")
                query.child_add_string("tag", iter_tag)
                query.child_add_string("maximum", max_records)
                res = server.invoke_elem(query)

                if(res.results_status() == "failed"):
                    print "Connection to filer %s failed; %s" % (
                        device, res.results_reason())
                    return

                num_records = res.child_get_int("records")

                if(num_records > 0):
                    instances_list = res.child_get("instances")
                    instances = instances_list.children_get()

                    for instance in instances:
                        raw_name = unicodedata.normalize(
                            'NFKD',
                            instance.child_get_string("name")).encode(
                            'ascii', 'ignore')
                        # Shorten the name for disks as they are very long and
                        # padded with zeroes, eg:
                        # 5000C500:3A236B0B:00000000:00000000:00000000:...
                        if na_object is "disk":
                            non_zero_blocks = [
                                block for block in raw_name.split(":")
                                if block != "00000000"
                            ]
                            raw_name = "".join(non_zero_blocks)
                        instance_name = re.sub(r'\W', '_', raw_name)
                        counters_list = instance.child_get("counters")
                        counters = counters_list.children_get()

                        for counter in counters:
                            metricname = unicodedata.normalize(
                                'NFKD',
                                counter.child_get_string("name")).encode(
                                'ascii', 'ignore')
                            metricvalue = counter.child_get_string("value")
                            # We'll need a long complete pathname to not
                            # confuse self.derivative
                            pathname = ".".join([self.config["path_prefix"],
                                                 device, na_object,
                                                 instance_name, metricname])
                            raw[pathname] = int(metricvalue)

            # Do the math
            self.log.debug("Processing %i metrics for object %s" % (len(raw),
                                                                    na_object))

            # Since the derivative function both returns the derivative
            # and saves a new point, we'll need to store all derivatives
            # for local reference.
            derivative = {}
            for key in raw.keys():
                derivative[key] = self.derivative(key, raw[key])

            for key in raw.keys():
                metricname = key.split(".")[-1]
                prettyname = LOCALMETRICS[metricname]["prettyname"]
                multiplier = LOCALMETRICS[metricname]["multiplier"]

                if metricname in self.DROPMETRICS:
                    continue
                elif metricname in self.DIVIDERS.keys():
                    self._gen_delta_depend(key, derivative, multiplier,
                                           prettyname, device)
                else:
                    self._gen_delta_per_sec(key, derivative[key], time_delta,
                                            multiplier, prettyname, device)

Example 47

View license
    def loadTestsFromFile(self, filename):
        """Load doctests from the file.

        Tests are loaded only if filename's extension matches
        configured doctest extension.

        """
        if self.extension and anyp(filename.endswith, self.extension):
            name = os.path.basename(filename)
            dh = open(filename)
            try:
                doc = dh.read()
            finally:
                dh.close()

            fixture_context = None
            globs = {'__file__': filename}
            if self.fixtures:
                base, ext = os.path.splitext(name)
                dirname = os.path.dirname(filename)
                sys.path.append(dirname)
                fixt_mod = base + self.fixtures
                try:
                    fixture_context = __import__(
                        fixt_mod, globals(), locals(), ["nop"])
                except ImportError, e:
                    log.debug(
                        "Could not import %s: %s (%s)", fixt_mod, e, sys.path)
                log.debug("Fixture module %s resolved to %s",
                          fixt_mod, fixture_context)
                if hasattr(fixture_context, 'globs'):
                    globs = fixture_context.globs(globs)                    
            parser = doctest.DocTestParser()
            test = parser.get_doctest(
                doc, globs=globs, name=name,
                filename=filename, lineno=0)
            if test.examples:
                case = DocFileCase(
                    test,
                    optionflags=self.optionflags,
                    setUp=getattr(fixture_context, 'setup_test', None),
                    tearDown=getattr(fixture_context, 'teardown_test', None),
                    result_var=self.doctest_result_var)
                if fixture_context:
                    yield ContextList((case,), context=fixture_context)
                else:
                    yield case
            else:
                yield False # no tests to load

Example 48

Project: prosodic
Source File: Dictionary.py
View license
	def __init__(self,lang):
		import prosodic
		dirself=prosodic.dir_prosodic
		libfolder=os.path.join(dirself,'lib')
		dictsfolder=os.path.join(dirself,'dicts')
		self.config=prosodic.config
		
		
		self.lang = lang
		self.libfolder = libfolder
		self.dictsfolder = os.path.join(dictsfolder,self.lang)
		sys.path.append(self.dictsfolder)
		
		self.language=""
		self.getprep=False
		self.booted=False
		
		for filename in glob.glob(os.path.join(self.dictsfolder, self.lang+'*')):
			self.language = filename.split(os.path.sep).pop().split(".")[0]
			break
		if not self.language:
			exit('!! language could not be ascertained from files in '+self.dictsfolder+'. Please name your .tsv and/or .py dictionary file(s) using a string which begins with the two characters which serve as the name for the dictionary folder (eg, "en")') 
		
		self.unstressedWords=[]
		for filename in glob.glob(os.path.join(self.dictsfolder, 'unstressed*')):
			file=codecs.open(filename,encoding='utf-8')
			for ln in file:
				for word in ln.split():
					self.unstressedWords.append(word)
			file.close()			
			break

		self.maybestressedWords=[]
		for filename in glob.glob(os.path.join(self.dictsfolder, 'maybestressed*')):
			file=codecs.open(filename,encoding='utf-8')
			for ln in file:
				for word in ln.split():
					self.maybestressedWords.append(word)
			file.close()			
			break
		
		pyfile=os.path.join(self.dictsfolder,self.language+'.py')
		if os.path.exists(pyfile):
			self.getprep=get_class(self.language+'.get')
		
		self.cachefolder=os.path.join(self.dictsfolder,'_cache')
		self.dictentries=None
		build=False
		
		## language objects
		timestart=time.clock()
		if being.persists:
			if __name__=='__main__':
				print "## booting ontology: " + self.language + " ..."
			if not os.path.exists(self.cachefolder):os.mkdir(self.cachefolder)
			self.storage = FileStorage(self.cachefolder+'ontology.zodb')
			self.db = DB(self.storage)
			self.conn = self.db.open()
			self.dict = self.conn.root()
			self.t=transaction
			
			if not len(self.dict.values()):
				build=True
		else:
			self.dict={}
			self.refresh()
			topickle=self.exists_pickle()
			topickle=False
			if topickle:
				self.boot_pickle(topickle)
			else:
				build=True		
		
		if build:
			self.refresh()
			self.boot()
		
		if __name__=='__main__':
			print self.stats(prefix="\t").replace("[[time]]",str(round((time.clock() - timestart),2)))

Example 49

Project: OpenSesame
Source File: docstruct.py
View license
def docpkg(folder, lvl=2):
	
	"""
	Documents a package.
	
	Arguments:
	path 	--	The path to the package.
	
	Keyword arguments:
	lvl		--	The depth in the hierarchy. (default=2)
	
	Returns:
	A full documentation string.
	"""	
	
	md = u''
	
	path = os.path.join(folder, u'__init__.py')
	name = os.path.basename(folder)
	full_name = folder.replace(u'/', '.')
	header = u'#' * lvl
	src = u'https://github.com/smathot/OpenSesame/blob/master/%s' % path
	if not os.path.exists(path) or not ingit(path):
		return md
	sys.path.append(os.path.abspath(folder))
	pkg = imp.load_source(u'dummy', path)
	sys.path.pop()
	doc = docstr(pkg)
	contains = objcontains(pkg)
	md += obj_doc % {u'header' : header, u'name' : name, u'full_name' : \
		full_name, u'doc' : doc, u'src': src, u'type' : u'package', \
		u'contains' : contains}
	# Document modules
	for fname in sorted(os.listdir(folder)):
		path = os.path.join(folder, fname)
		if path.endswith(u'.py') and fname != u'__init__.py' and ingit(path):
			md += docmod(path, lvl+1)
	# Document packages
	for fname in sorted(os.listdir(folder)):
		path = os.path.join(folder, fname)
		if os.path.isdir(path):
			md += docpkg(path, lvl+1)
	return md

Example 50

Project: HPOlib
Source File: hyperopt_august2013_mod.py
View license
def main(config, options, experiment_dir, experiment_directory_prefix, **kwargs):
    # config:           Loaded .cfg file
    # options:          Options containing seed, restore_dir, 
    # experiment_dir:   Experiment directory/Benchmark_directory
    # **kwargs:         Nothing so far
    time_string = wrapping_util.get_time_string()
    cmd = ""

    # Add path_to_optimizer to PYTHONPATH and to sys.path
    # Only for HYPEROPT
    if not 'PYTHONPATH' in os.environ:
        os.environ['PYTHONPATH'] = config.get('TPE', 'path_to_optimizer')
    else:
        os.environ['PYTHONPATH'] = config.get('TPE', 'path_to_optimizer') + os.pathsep + os.environ['PYTHONPATH']
    sys.path.append(config.get('TPE', 'path_to_optimizer'))

    optimizer_str = os.path.splitext(os.path.basename(__file__))[0]

# TODO: Check whether we might need this again
#    SYSTEM_WIDE = 0
#    AUGUST_2013_MOD = 1
#    try:
#        import hyperopt
#        version = SYSTEM_WIDE
#    except ImportError:
#        try:
#            cmd += "export PYTHONPATH=$PYTHONPATH:" + os.path.dirname(os.path.abspath(__file__)) + \
#                "/optimizers/hyperopt_august2013_mod\n"
#            import optimizers.hyperopt_august2013_mod.hyperopt as hyperopt
#        except ImportError, e:
#            import HPOlib.optimizers.hyperopt_august2013_mod.hyperopt as hyperopt
#        version = AUGUST_2013_MOD

    # Find experiment directory
    if options.restore:
        if not os.path.exists(options.restore):
            raise Exception("The restore directory does not exist")
        optimizer_dir = options.restore
    else:
        optimizer_dir = os.path.join(experiment_dir,
                                     experiment_directory_prefix
                                     + optimizer_str + "_" +
                                     str(options.seed) + "_" +
                                     time_string)

    # Build call
    cmd += build_tpe_call(config, options, optimizer_dir)

    # Set up experiment directory
    if not os.path.exists(optimizer_dir):
        os.mkdir(optimizer_dir)
        space = config.get('TPE', 'space')
        abs_space = os.path.abspath(space)
        parent_space = os.path.join(experiment_dir, optimizer_str, space)
        if os.path.exists(abs_space):
            space = abs_space
        elif os.path.exists(parent_space):
            space = parent_space
        else:
            raise Exception("TPE search space not found. Searched at %s and "
                            "%s" % (abs_space, parent_space))
        # Copy the hyperopt search space
        if not os.path.exists(os.path.join(optimizer_dir, os.path.basename(space))):
            os.symlink(os.path.join(experiment_dir, optimizer_str, space),
                       os.path.join(optimizer_dir, os.path.basename(space)))

    import hyperopt
    path_to_loaded_optimizer = os.path.abspath(os.path.dirname(os.path.dirname(hyperopt.__file__)))

    logger.info("### INFORMATION ################################################################")
    logger.info("# You are running:                                                             #")
    logger.info("# %76s #" % path_to_loaded_optimizer)
    if not os.path.samefile(path_to_loaded_optimizer, config.get('TPE', 'path_to_optimizer')):
        logger.warning("# BUT hyperopt_august2013_modDefault.cfg says:")
        logger.warning("# %76s #" % config.get('TPE', 'path_to_optimizer'))
        logger.warning("# Found a global hyperopt version. This installation will be used!             #")
    else:
        logger.info("# To reproduce our results you need version 0.0.3.dev, which can be found here:#")
        logger.info("%s" % version_info)
        logger.info("# A newer version might be available, but not yet built in.                    #")
    logger.info("################################################################################")
    return cmd, optimizer_dir