os.path.dirname

Here are the examples of the python api os.path.dirname taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

140 Examples 7

Example 1

Project: permute Source File: plot_directive.py
def run(arguments, content, options, state_machine, state, lineno):
    docuement = state_machine.docuement
    config = docuement.settings.env.config
    nofigs = 'nofigs' in options

    formats = get_plot_formats(config)
    default_fmt = formats[0][0]

    options.setdefault('include-source', config.plot_include_source)
    keep_context = 'context' in options
    context_opt = None if not keep_context else options['context']

    rst_file = docuement.attributes['source']
    rst_dir = os.path.dirname(rst_file)

    if len(arguments):
        if not config.plot_basedir:
            source_file_name = os.path.join(setup.app.builder.srcdir,
                                            directives.uri(arguments[0]))
        else:
            source_file_name = os.path.join(setup.confdir, config.plot_basedir,
                                            directives.uri(arguments[0]))

        # If there is content, it will be passed as a caption.
        caption = '\n'.join(content)

        # If the optional function name is provided, use it
        if len(arguments) == 2:
            function_name = arguments[1]
        else:
            function_name = None

        with io.open(source_file_name, 'r', encoding='utf-8') as fd:
            code = fd.read()
        output_base = os.path.basename(source_file_name)
    else:
        source_file_name = rst_file
        code = textwrap.dedent("\n".join(map(str, content)))
        counter = docuement.attributes.get('_plot_counter', 0) + 1
        docuement.attributes['_plot_counter'] = counter
        base, ext = os.path.splitext(os.path.basename(source_file_name))
        output_base = '%s-%d.py' % (base, counter)
        function_name = None
        caption = ''

    base, source_ext = os.path.splitext(output_base)
    if source_ext in ('.py', '.rst', '.txt'):
        output_base = base
    else:
        source_ext = ''

    # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
    output_base = output_base.replace('.', '-')

    # is it in doctest format?
    is_doctest = contains_doctest(code)
    if 'format' in options:
        if options['format'] == 'python':
            is_doctest = False
        else:
            is_doctest = True

    # determine output directory name fragment
    source_rel_name = relpath(source_file_name, setup.confdir)
    source_rel_dir = os.path.dirname(source_rel_name)
    while source_rel_dir.startswith(os.path.sep):
        source_rel_dir = source_rel_dir[1:]

    # build_dir: where to place output files (temporarily)
    build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
                             'plot_directive',
                             source_rel_dir)
    # get rid of .. in paths, also changes pathsep
    # see note in Python docs for warning about symbolic links on Windows.
    # need to compare source and dest paths at end
    build_dir = os.path.normpath(build_dir)

    if not os.path.exists(build_dir):
        os.makedirs(build_dir)

    # output_dir: final location in the builder's directory
    dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
                                            source_rel_dir))
    if not os.path.exists(dest_dir):
        os.makedirs(dest_dir) # no problem here for me, but just use built-ins

    # how to link to files from the RST file
    dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir),
                                 source_rel_dir).replace(os.path.sep, '/')
    try:
        build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
    except ValueError:
        # on Windows, relpath raises ValueError when path and start are on
        # different mounts/drives
        build_dir_link = build_dir
    source_link = dest_dir_link + '/' + output_base + source_ext

    # make figures
    try:
        results = render_figures(code,
                                 source_file_name,
                                 build_dir,
                                 output_base,
                                 keep_context,
                                 function_name,
                                 config,
                                 context_reset=context_opt == 'reset',
                                 close_figs=context_opt == 'close-figs')
        errors = []
    except PlotError as err:
        reporter = state.memo.reporter
        sm = reporter.system_message(
            2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
                                                source_file_name, err),
            line=lineno)
        results = [(code, [])]
        errors = [sm]

    # Properly indent the caption
    caption = '\n'.join('      ' + line.strip()
                        for line in caption.split('\n'))

    # generate output restructuredtext
    total_lines = []
    for j, (code_piece, images) in enumerate(results):
        if options['include-source']:
            if is_doctest:
                lines = ['']
                lines += [row.rstrip() for row in code_piece.split('\n')]
            else:
                lines = ['.. code-block:: python', '']
                lines += ['    %s' % row.rstrip()
                          for row in code_piece.split('\n')]
            source_code = "\n".join(lines)
        else:
            source_code = ""

        if nofigs:
            images = []

        opts = [':%s: %s' % (key, val) for key, val in six.iteritems(options)
                if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]

        only_html = ".. only:: html"
        only_latex = ".. only:: latex"
        only_texinfo = ".. only:: texinfo"

        # Not-None src_link signals the need for a source link in the generated
        # html
        if j == 0 and config.plot_html_show_source_link:
            src_link = source_link
        else:
            src_link = None

        result = format_template(
            config.plot_template or TEMPLATE,
            default_fmt=default_fmt,
            dest_dir=dest_dir_link,
            build_dir=build_dir_link,
            source_link=src_link,
            multi_image=len(images) > 1,
            only_html=only_html,
            only_latex=only_latex,
            only_texinfo=only_texinfo,
            options=opts,
            images=images,
            source_code=source_code,
            html_show_formats=config.plot_html_show_formats and len(images),
            caption=caption)

        total_lines.extend(result.split("\n"))
        total_lines.extend("\n")

    if total_lines:
        state_machine.insert_input(total_lines, source=source_file_name)

    # copy image files to builder's output directory, if necessary
    if not os.path.exists(dest_dir):
        cbook.mkdirs(dest_dir)

    for code_piece, images in results:
        for img in images:
            for fn in img.filenames():
                destimg = os.path.join(dest_dir, os.path.basename(fn))
                if fn != destimg:
                    shutil.copyfile(fn, destimg)

    # copy script (if necessary)
    target_name = os.path.join(dest_dir, output_base + source_ext)
    with io.open(target_name, 'w', encoding="utf-8") as f:
        if source_file_name == rst_file:
            code_escaped = unescape_doctest(code)
        else:
            code_escaped = code
        f.write(code_escaped)

    return errors

Example 2

Project: AWS-Lambda-ML-Microservice-Skeleton Source File: setup.py
Function: configuration
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration, dot_join
    from numpy.distutils.system_info import get_info

    config = Configuration('core', parent_package, top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir, 'code_generators')

    if is_released(config):
        warnings.simplefilter('error', MismatchCAPIWarning)

    # Check whether we have a mismatch between the set C API VERSION and the
    # actual C API VERSION
    check_api_version(C_API_VERSION, codegen_dir)

    generate_umath_py = join(codegen_dir, 'generate_umath.py')
    n = dot_join(config.name, 'generate_umath')
    generate_umath = imp.load_module('_'.join(n.split('.')),
                                     open(generate_umath_py, 'U'), generate_umath_py,
                                     ('.py', 'U', 1))

    header_dir = 'include/numpy'  # this is relative to config.path_in_package

    cocache = CallOnceOnly()

    def generate_config_h(ext, build_dir):
        target = join(build_dir, header_dir, 'config.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)

        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)

            # Check math library and C99 math funcs availability
            mathlibs = check_mathlib(config_cmd)
            moredefs.append(('MATHLIB', ','.join(mathlibs)))

            check_math_capabilities(config_cmd, moredefs, mathlibs)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])

            # Signal check
            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            # Windows checks
            if sys.platform == 'win32' or os.name == 'nt':
                win32_checks(moredefs)

            # C99 restrict keyword
            moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict()))

            # Inline check
            inline = config_cmd.check_inline()

            # Check whether we need our own wide character support
            if not config_cmd.check_decl('Py_UNICODE_WIDE', headers=['Python.h']):
                PYTHON_HAS_UNICODE_WIDE = True
            else:
                PYTHON_HAS_UNICODE_WIDE = False

            if ENABLE_SEPARATE_COMPILATION:
                moredefs.append(('ENABLE_SEPARATE_COMPILATION', 1))

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Get long double representation
            if sys.platform != 'darwin':
                rep = check_long_double_representation(config_cmd)
                if rep in ['INTEL_EXTENDED_12_BYTES_LE',
                           'INTEL_EXTENDED_16_BYTES_LE',
                           'MOTOROLA_EXTENDED_12_BYTES_BE',
                           'IEEE_QUAD_LE', 'IEEE_QUAD_BE',
                           'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE',
                           'DOUBLE_DOUBLE_BE', 'DOUBLE_DOUBLE_LE']:
                    moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))
                else:
                    raise ValueError("Unrecognized long double format: %s" % rep)

            # Py3K check
            if sys.version_info[0] == 3:
                moredefs.append(('NPY_PY3K', 1))

            # Generate the config.h file from moredefs
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # define inline to our keyword, or nothing
            target_f.write('#ifndef __cplusplus\n')
            if inline == 'inline':
                target_f.write('/* #undef inline */\n')
            else:
                target_f.write('#define inline %s\n' % inline)
            target_f.write('#endif\n')

            # add the guard to make sure config.h is never included directly,
            # but always through npy_config.h
            target_f.write("""
#ifndef _NPY_NPY_CONFIG_H_
#error config.h should never be included directly, include npy_config.h instead
#endif
""")

            target_f.close()
            print('File:', target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f:
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        # Ugly: this can be called within a library and not an extension,
        # in which case there is no libraries attributes (and none is
        # needed).
        if hasattr(ext, 'libraries'):
            ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        # put private include directory in build_dir on search path
        # allows using code generation in headers headers
        config.add_include_dirs(join(build_dir, "src", "private"))

        target = join(build_dir, header_dir, '_numpyconfig.h')
        d = os.path.dirname(target)
        if not os.path.exists(d):
            os.makedirs(d)
        if newer(__file__, target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s', target)

            # Check sizeof
            ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)

            if is_npy_no_signal():
                moredefs.append(('NPY_NO_SIGNAL', 1))

            if is_npy_no_smp():
                moredefs.append(('NPY_NO_SMP', 1))
            else:
                moredefs.append(('NPY_NO_SMP', 0))

            mathlibs = check_mathlib(config_cmd)
            moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
            moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])

            if ENABLE_SEPARATE_COMPILATION:
                moredefs.append(('NPY_ENABLE_SEPARATE_COMPILATION', 1))

            if NPY_RELAXED_STRIDES_CHECKING:
                moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))

            # visibility check
            hidden_visibility = visibility_define(config_cmd)
            moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))

            # Add the C API/ABI versions
            moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
            moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))

            # Add moredefs to header
            target_f = open(target, 'w')
            for d in moredefs:
                if isinstance(d, str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0], d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            print('File: %s' % target)
            target_f = open(target)
            print(target_f.read())
            target_f.close()
            print('EOF')
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file),
                                  (header_dir, doc_file))
            return (h_file,)
        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    config.add_include_dirs(join(local_dir, "src", "private"))
    config.add_include_dirs(join(local_dir, "src"))
    config.add_include_dirs(join(local_dir))

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs(join('src', 'npymath'))
    config.add_include_dirs(join('src', 'multiarray'))
    config.add_include_dirs(join('src', 'umath'))
    config.add_include_dirs(join('src', 'npysort'))

    config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
    config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
    config.add_define_macros([('_LARGEFILE_SOURCE', '1')])
    config.add_define_macros([('_LARGEFILE64_SOURCE', '1')])

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [join('src', 'npymath', '_signbit.c'),
            join('include', 'numpy', '*object.h'),
            join(codegen_dir, 'genapi.py'),
            ]

    #######################################################################
    #                            dummy module                             #
    #######################################################################

    # npymath needs the config.h and numpyconfig.h files to be generated, but
    # build_clib cannot handle generate_config_h and generate_numpyconfig_h
    # (don't ask). Because clib are generated before extensions, we have to
    # explicitly add an extension which has generate_config_h and
    # generate_numpyconfig_h as sources *before* adding npymath.

    config.add_extension('_dummy',
                         sources=[join('src', 'dummymodule.c'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api]
                         )

    #######################################################################
    #                          npymath library                            #
    #######################################################################

    subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])

    def get_mathlib_info(*args):
        # Another ugly hack: the mathlib info is known once build_src is run,
        # but we cannot use add_installed_pkg_config here either, so we only
        # update the substition dictionary during npymath build
        config_cmd = config.get_config_cmd()

        # Check that the toolchain works, to fail early if it doesn't
        # (avoid late errors with MATHLIB which are confusing if the
        # compiler does not work).
        st = config_cmd.try_link('int main(void) { return 0;}')
        if not st:
            raise RuntimeError("Broken toolchain: cannot link a simple C program")
        mlibs = check_mathlib(config_cmd)

        posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
        msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
        subst_dict["posix_mathlib"] = posix_mlib
        subst_dict["msvc_mathlib"] = msvc_mlib

    npymath_sources = [join('src', 'npymath', 'npy_math.c.src'),
                       join('src', 'npymath', 'ieee754.c.src'),
                       join('src', 'npymath', 'npy_math_complex.c.src'),
                       join('src', 'npymath', 'halffloat.c')
                       ]
    config.add_installed_library('npymath',
            sources=npymath_sources + [get_mathlib_info],
            install_dir='lib')
    config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
            subst_dict)
    config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config",
            subst_dict)

    #######################################################################
    #                         npysort library                             #
    #######################################################################

    # This library is created for the build but it is not installed
    npysort_sources = [join('src', 'npysort', 'quicksort.c.src'),
                       join('src', 'npysort', 'mergesort.c.src'),
                       join('src', 'npysort', 'heapsort.c.src'),
                       join('src', 'private', 'npy_partition.h.src'),
                       join('src', 'npysort', 'selection.c.src'),
                       join('src', 'private', 'npy_binsearch.h.src'),
                       join('src', 'npysort', 'binsearch.c.src'),
                       ]
    config.add_library('npysort',
                       sources=npysort_sources,
                       include_dirs=[])

    #######################################################################
    #                        multiarray module                            #
    #######################################################################

    # Multiarray version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_multiarray_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'multiarray')
        sources = [join(local_dir, subpath, 'scalartypes.c.src'),
                   join(local_dir, subpath, 'arraytypes.c.src'),
                   join(local_dir, subpath, 'nditer_templ.c.src'),
                   join(local_dir, subpath, 'lowlevel_strided_loops.c.src'),
                   join(local_dir, subpath, 'einsum.c.src'),
                   join(local_dir, 'src', 'private', 'templ_common.h.src')
                   ]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))
        cmd = get_cmd('build_src')
        cmd.ensure_finalized()
        cmd.template_sources(sources, ext)

    multiarray_deps = [
            join('src', 'multiarray', 'arrayobject.h'),
            join('src', 'multiarray', 'arraytypes.h'),
            join('src', 'multiarray', 'array_assign.h'),
            join('src', 'multiarray', 'buffer.h'),
            join('src', 'multiarray', 'calculation.h'),
            join('src', 'multiarray', 'cblasfuncs.h'),
            join('src', 'multiarray', 'common.h'),
            join('src', 'multiarray', 'convert_datatype.h'),
            join('src', 'multiarray', 'convert.h'),
            join('src', 'multiarray', 'conversion_utils.h'),
            join('src', 'multiarray', 'ctors.h'),
            join('src', 'multiarray', 'descriptor.h'),
            join('src', 'multiarray', 'getset.h'),
            join('src', 'multiarray', 'hashdescr.h'),
            join('src', 'multiarray', 'iterators.h'),
            join('src', 'multiarray', 'mapping.h'),
            join('src', 'multiarray', 'methods.h'),
            join('src', 'multiarray', 'multiarraymodule.h'),
            join('src', 'multiarray', 'nditer_impl.h'),
            join('src', 'multiarray', 'numpymemoryview.h'),
            join('src', 'multiarray', 'number.h'),
            join('src', 'multiarray', 'numpyos.h'),
            join('src', 'multiarray', 'refcount.h'),
            join('src', 'multiarray', 'scalartypes.h'),
            join('src', 'multiarray', 'sequence.h'),
            join('src', 'multiarray', 'shape.h'),
            join('src', 'multiarray', 'ucsnarrow.h'),
            join('src', 'multiarray', 'usertypes.h'),
            join('src', 'multiarray', 'vdot.h'),
            join('src', 'private', 'npy_config.h'),
            join('src', 'private', 'templ_common.h.src'),
            join('src', 'private', 'lowlevel_strided_loops.h'),
            join('include', 'numpy', 'arrayobject.h'),
            join('include', 'numpy', '_neighborhood_iterator_imp.h'),
            join('include', 'numpy', 'npy_endian.h'),
            join('include', 'numpy', 'arrayscalars.h'),
            join('include', 'numpy', 'noprefix.h'),
            join('include', 'numpy', 'npy_interrupt.h'),
            join('include', 'numpy', 'npy_3kcompat.h'),
            join('include', 'numpy', 'npy_math.h'),
            join('include', 'numpy', 'halffloat.h'),
            join('include', 'numpy', 'npy_common.h'),
            join('include', 'numpy', 'npy_os.h'),
            join('include', 'numpy', 'utils.h'),
            join('include', 'numpy', 'ndarrayobject.h'),
            join('include', 'numpy', 'npy_cpu.h'),
            join('include', 'numpy', 'numpyconfig.h'),
            join('include', 'numpy', 'ndarraytypes.h'),
            join('include', 'numpy', 'npy_1_7_deprecated_api.h'),
            join('include', 'numpy', '_numpyconfig.h.in'),
            # add library sources as distuils does not consider libraries
            # dependencies
            ] + npysort_sources + npymath_sources

    multiarray_src = [
            join('src', 'multiarray', 'alloc.c'),
            join('src', 'multiarray', 'arrayobject.c'),
            join('src', 'multiarray', 'arraytypes.c.src'),
            join('src', 'multiarray', 'array_assign.c'),
            join('src', 'multiarray', 'array_assign_scalar.c'),
            join('src', 'multiarray', 'array_assign_array.c'),
            join('src', 'multiarray', 'buffer.c'),
            join('src', 'multiarray', 'calculation.c'),
            join('src', 'multiarray', 'compiled_base.c'),
            join('src', 'multiarray', 'common.c'),
            join('src', 'multiarray', 'convert.c'),
            join('src', 'multiarray', 'convert_datatype.c'),
            join('src', 'multiarray', 'conversion_utils.c'),
            join('src', 'multiarray', 'ctors.c'),
            join('src', 'multiarray', 'datetime.c'),
            join('src', 'multiarray', 'datetime_strings.c'),
            join('src', 'multiarray', 'datetime_busday.c'),
            join('src', 'multiarray', 'datetime_busdaycal.c'),
            join('src', 'multiarray', 'descriptor.c'),
            join('src', 'multiarray', 'dtype_transfer.c'),
            join('src', 'multiarray', 'einsum.c.src'),
            join('src', 'multiarray', 'flagsobject.c'),
            join('src', 'multiarray', 'getset.c'),
            join('src', 'multiarray', 'hashdescr.c'),
            join('src', 'multiarray', 'item_selection.c'),
            join('src', 'multiarray', 'iterators.c'),
            join('src', 'multiarray', 'lowlevel_strided_loops.c.src'),
            join('src', 'multiarray', 'mapping.c'),
            join('src', 'multiarray', 'methods.c'),
            join('src', 'multiarray', 'multiarraymodule.c'),
            join('src', 'multiarray', 'nditer_templ.c.src'),
            join('src', 'multiarray', 'nditer_api.c'),
            join('src', 'multiarray', 'nditer_constr.c'),
            join('src', 'multiarray', 'nditer_pywrap.c'),
            join('src', 'multiarray', 'number.c'),
            join('src', 'multiarray', 'numpymemoryview.c'),
            join('src', 'multiarray', 'numpyos.c'),
            join('src', 'multiarray', 'refcount.c'),
            join('src', 'multiarray', 'sequence.c'),
            join('src', 'multiarray', 'shape.c'),
            join('src', 'multiarray', 'scalarapi.c'),
            join('src', 'multiarray', 'scalartypes.c.src'),
            join('src', 'multiarray', 'usertypes.c'),
            join('src', 'multiarray', 'ucsnarrow.c'),
            join('src', 'multiarray', 'vdot.c'),
            join('src', 'private', 'templ_common.h.src'),
            ]

    blas_info = get_info('blas_opt', 0)
    if blas_info and ('HAVE_CBLAS', None) in blas_info.get('define_macros', []):
        extra_info = blas_info
        # These files are also in MANIFEST.in so that they are always in
        # the source distribution independently of HAVE_CBLAS.
        multiarray_src.extend([join('src', 'multiarray', 'cblasfuncs.c'),
                               join('src', 'multiarray', 'python_xerbla.c'),
                               ])
        if uses_accelerate_framework(blas_info):
            multiarray_src.extend(get_sgemv_fix())
    else:
        extra_info = {}

    if not ENABLE_SEPARATE_COMPILATION:
        multiarray_deps.extend(multiarray_src)
        multiarray_src = [join('src', 'multiarray', 'multiarraymodule_onefile.c')]
        multiarray_src.append(generate_multiarray_templated_sources)

    config.add_extension('multiarray',
                         sources=multiarray_src +
                                 [generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  join(codegen_dir, 'generate_numpy_api.py'),
                                  join('*.py')],
                         depends=deps + multiarray_deps,
                         libraries=['npymath', 'npysort'],
                         extra_info=extra_info)

    #######################################################################
    #                           umath module                              #
    #######################################################################

    # umath version: this function is needed to build foo.c from foo.c.src
    # when foo.c is included in another file and as such not in the src
    # argument of build_ext command
    def generate_umath_templated_sources(ext, build_dir):
        from numpy.distutils.misc_util import get_cmd

        subpath = join('src', 'umath')
        sources = [
            join(local_dir, subpath, 'loops.h.src'),
            join(local_dir, subpath, 'loops.c.src'),
            join(local_dir, subpath, 'scalarmath.c.src'),
            join(local_dir, subpath, 'simd.inc.src')]

        # numpy.distutils generate .c from .c.src in weird directories, we have
        # to add them there as they depend on the build_dir
        config.add_include_dirs(join(build_dir, subpath))
        cmd = get_cmd('build_src')
        cmd.ensure_finalized()
        cmd.template_sources(sources, ext)

    def generate_umath_c(ext, build_dir):
        target = join(build_dir, header_dir, '__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script, target):
            f = open(target, 'w')
            f.write(generate_umath.make_code(generate_umath.defdict,
                                             generate_umath.__file__))
            f.close()
        return []

    umath_src = [
            join('src', 'umath', 'umathmodule.c'),
            join('src', 'umath', 'reduction.c'),
            join('src', 'umath', 'funcs.inc.src'),
            join('src', 'umath', 'simd.inc.src'),
            join('src', 'umath', 'loops.h.src'),
            join('src', 'umath', 'loops.c.src'),
            join('src', 'umath', 'ufunc_object.c'),
            join('src', 'umath', 'scalarmath.c.src'),
            join('src', 'umath', 'ufunc_type_resolution.c')]

    umath_deps = [
            generate_umath_py,
            join('src', 'multiarray', 'common.h'),
            join('src', 'private', 'templ_common.h.src'),
            join('src', 'umath', 'simd.inc.src'),
            join(codegen_dir, 'generate_ufunc_api.py'),
            join('src', 'private', 'ufunc_override.h')] + npymath_sources

    if not ENABLE_SEPARATE_COMPILATION:
        umath_deps.extend(umath_src)
        umath_src = [join('src', 'umath', 'umathmodule_onefile.c')]
        umath_src.append(generate_umath_templated_sources)
        umath_src.append(join('src', 'umath', 'funcs.inc.src'))
        umath_src.append(join('src', 'umath', 'simd.inc.src'))

    config.add_extension('umath',
                         sources=umath_src +
                                 [generate_config_h,
                                 generate_numpyconfig_h,
                                 generate_umath_c,
                                 generate_ufunc_api],
                         depends=deps + umath_deps,
                         libraries=['npymath'],
                         )

    #######################################################################
    #                        umath_tests module                           #
    #######################################################################

    config.add_extension('umath_tests',
                    sources=[join('src', 'umath', 'umath_tests.c.src')])

    #######################################################################
    #                   custom rational dtype module                      #
    #######################################################################

    config.add_extension('test_rational',
                    sources=[join('src', 'umath', 'test_rational.c.src')])

    #######################################################################
    #                        struct_ufunc_test module                     #
    #######################################################################

    config.add_extension('struct_ufunc_test',
                    sources=[join('src', 'umath', 'struct_ufunc_test.c.src')])

    #######################################################################
    #                     multiarray_tests module                         #
    #######################################################################

    config.add_extension('multiarray_tests',
                    sources=[join('src', 'multiarray', 'multiarray_tests.c.src')])

    #######################################################################
    #                        operand_flag_tests module                    #
    #######################################################################

    config.add_extension('operand_flag_tests',
                    sources=[join('src', 'umath', 'operand_flag_tests.c.src')])

    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config

Example 3

Project: ck-env Source File: customize.py
Function: set_up
def setup(i):
    """
    Input:  {
              cfg              - meta of this soft entry
              self_cfg         - meta of module soft
              ck_kernel        - import CK kernel module (to reuse functions)

              host_os_uoa      - host OS UOA
              host_os_uid      - host OS UID
              host_os_dict     - host OS meta

              target_os_uoa    - target OS UOA
              target_os_uid    - target OS UID
              target_os_dict   - target OS meta

              target_device_id - target device ID (if via ADB)

              tags             - list of tags used to search this entry

              env              - updated environment vars from meta
              customize        - updated customize vars from meta

              deps             - resolved dependencies for this soft

              interactive      - if 'yes', can ask questions, otherwise quiet
            }

    Output: {
              return       - return code =  0, if successful
                                         >  0, if error
              (error)      - error text if return > 0

              bat          - prepared string for bat file
            }

    """

    import os

    # Get variables
    ck=i['ck_kernel']
    s=''

    iv=i.get('interactive','')

    env=i.get('env',{})
    cfg=i.get('cfg',{})
    deps=i.get('deps',{})
    tags=i.get('tags',[])
    cus=i.get('customize',{})

    hosd=i['host_os_dict']
    tosd=i['target_os_dict']

    hplat=hosd.get('ck_name','')
    hbits=hosd.get('bits','')
    tbits=tosd.get('bits','')

    target_d=i.get('target_os_dict',{})
    winh=hosd.get('windows_base','')
    win=target_d.get('windows_base','')
    remote=target_d.get('remote','')
    mingw=target_d.get('mingw','')
    tbits=target_d.get('bits','')

    eqis=hosd.get('env_quotes_if_space','')

    envp=cus.get('env_prefix','')
    pi=cus.get('path_install','')

    fp=cus.get('full_path','')

    ############################################################
    platform=target_d.get('android_ndk_platform','')
    if platform=='':
       return {'return':1, 'error':'platform is not defined in target OS'}

    ############################################################
    arch=target_d.get('android_ndk_arch','')
    if arch=='':
       return {'return':1, 'error':'platform architecture is not defined in target OS'}

    ############################################################
    abi=target_d.get('abi','')
    if arch=='':
       return {'return':1, 'error':'abi is not defined in target OS'}

    ############################################################
    atc=tosd.get('android_toolchain','')
    if atc=='':
       return {'return':1, 'error':'android_toolchain is not specified in target OS meta'}

    acp=tosd.get('android_compiler_prefix','')
    if acp=='':
       return {'return':1, 'error':'android_compiler_prefix is not specified in target OS meta'}

    env['CK_ANDROID_COMPILER_PREFIX']=acp
    env['CK_ANDROID_TOOLCHAIN']=atc
    env['CK_ANDROID_ABI']=abi
    env['CK_ANDROID_NDK_ARCH']=arch
    env['CK_ANDROID_NDK_PLATFORM']=platform

    # Check path
    ep=cus.get('env_prefix','')
    if fp!='':
       p1=os.path.dirname(fp)
       p2=os.path.dirname(p1)
       p3=os.path.dirname(p2)
       p4=os.path.dirname(p3)
       p5=os.path.dirname(p4)
       pi=os.path.dirname(p5)

       if winh=='yes':
          s+='\nset PATH='+pi+';%PATH%\n\n'
       else:
          s+='\nexport PATH='+pi+':$PATH\n\n'

       if ep!='':
          env[ep]=p2
          env[ep+'_BIN']=p1

       prebuilt=''
       if hplat=='win':
          if hbits=='64':
             prebuilt='windows-x86_64'
          else:
             prebuilt='windows-x86'
       else:
          if hbits=='64':
             prebuilt='linux-x86_64'
          else:
             prebuilt='linux-x86'

       cus['tool_prefix_configured']='yes'
       cus['tool_prefix']=acp+'-'
       cus['platform_path_configured']='yes'
       cus['platform_path']=os.path.join(pi,'platforms')
       cus['add_extra_path_configured']='yes'
       cus['add_extra_path']=os.path.join(pi,'prebuilt',prebuilt,'bin')

       cus['ef_configured']='yes'
       x=''
#       if arch=='arm64': 
       x='-fPIE -pie'
       cus['ef']=x

       j=p4.find(atc)
       if j>0:
          ver=p4[j+len(atc)+1:]

          cus['libstdcpppath_include_configured']='yes'
          cus['libstdcpppath_include']=os.path.join(pi,'sources','cxx-stl','gnu-libstdc++',ver,'include')

          cus['libstdcpppath_configured']='yes'
          cus['libstdcpppath']=os.path.join(pi,'sources','cxx-stl','gnu-libstdc++',ver,'libs',abi)

    env.update({
      "CK_AR": "$#tool_prefix#$ar", 
      "CK_ASM_EXT": ".s", 
      "CK_CC": "$#tool_prefix#$gcc", 
      "CK_COMPILER_FLAGS_OBLIGATORY": "", 
      "CK_COMPILER_FLAG_CPP11": "-std=c++11", 
      "CK_COMPILER_FLAG_CPP0X": "-std=c++0x", 
      "CK_COMPILER_FLAG_GPROF": "-pg", 
      "CK_COMPILER_FLAG_OPENMP": "-fopenmp", 
      "CK_COMPILER_FLAG_PLUGIN": "-fplugin=", 
      "CK_COMPILER_FLAG_PTHREAD_LIB": "-lpthread", 
      "CK_CXX": "$#tool_prefix#$g++", 
      "CK_OPT_ALL_WARNINGS": "-Wall", 
      "CK_DLL_EXT": ".so", 
      "CK_EXE_EXT": ".out", 
      "CK_EXTRA_LIB_DL": "-ldl", 
      "CK_EXTRA_LIB_M": "-lm", 
      "CK_FLAGS_CREATE_ASM": "-S", 
      "CK_FLAGS_CREATE_OBJ": "-c", 
      "CK_FLAGS_DLL": "-shared -fPIC", 
      "CK_FLAGS_DLL_EXTRA": "", 
      "CK_FLAGS_OUTPUT": "-o ", 
      "CK_FLAGS_STATIC_BIN": "-static -fPIC", 
      "CK_FLAGS_STATIC_LIB": "-fPIC", 
      "CK_FLAG_PREFIX_INCLUDE": "-I", 
      "CK_FLAG_PREFIX_LIB_DIR": "-L", 
      "CK_FLAG_PREFIX_VAR": "-D", 
      "CK_GPROF_OUT_FILE": "gmon.out", 
      "CK_LB": "$#tool_prefix#$ar rcs", 
      "CK_LB_OUTPUT": "", 
      "CK_LD": "$#tool_prefix#$ld", 
      "CK_LD_FLAGS_EXTRA": "", 
      "CK_LIB_EXT": ".a", 
      "CK_LINKER_FLAG_OPENMP": "-lgomp", 
      "CK_MAKE": "make", 
      "CK_OBJDUMP": "$#tool_prefix#$objdump -d", 
      "CK_OBJ_EXT": ".o", 
      "CK_OPT_SIZE": "-Os", 
      "CK_OPT_SPEED": "-O3", 
      "CK_OPT_SPEED_SAFE": "-O2", 
      "CK_PLUGIN_FLAG": "-fplugin=", 
      "CK_PROFILER": "gprof"
    })

    ############################################################
    # Ask a few more questions

    ############################################################
    prefix_configured=cus.get('tool_prefix_configured','')
    prefix=cus.get('tool_prefix','')
    if prefix_configured!='yes' and iv=='yes':
       if prefix!='':
          ck.out('Current compiler name prefix: '+prefix)
       else:
          ra=ck.inp({'text':'Enter compiler name prefix, if needed (such as aarch64-linux-android-): '})
          prefix=ra['string'].strip()
          cus['tool_prefix_configured']='yes'

    if prefix!='':
       env['CK_COMPILER_PREFIX']=prefix
       cus['tool_prefix']=prefix
       cus['tool_prefix_configured']='yes'

    for k in env:
        v=env[k]
        v=v.replace('$#tool_prefix#$',prefix)
        env[k]=v

    ############################################################
    extra_path_configured=cus.get('add_extra_path_configured','')
    extra_path=cus.get('add_extra_path','')
    if extra_path_configured!='yes' and iv=='yes':
       if extra_path!='':
          ck.out('Full path to pre-built Android tools: '+extra_path)
       else:
          ra=ck.inp({'text':'Enter full path to pre-built Android tools (such as ...prebuilt/linux-x86_64/bin) : '})
          extra_path=ra['string']
          cus['extra_path_configured']='yes'

    if extra_path!='':
       cus['add_extra_path']=extra_path
       cus['add_extra_path_configured']='yes'

    if extra_path!='':
       if winh=='yes':
          s+='\nset PATH='+extra_path+';%PATH%\n\n'
       else:
          s+='\nexport PATH='+extra_path+':$PATH\n\n'

    ############################################################
    platform_path_configured=cus.get('platform_path_configured','')
    platform_path=cus.get('platform_path','')
    if platform_path_configured!='yes' and iv=='yes':
       if platform_path!='':
          ck.out('Full path to directory with Android NDK platforms: '+platform_path)
       else:
          ra=ck.inp({'text':'Enter full path to directory with Android NDK platforms : '})
          platform_path=ra['string']
          cus['platform_path_configured']='yes'

    if platform_path=='':
       return {'return':1, 'error':'path to Android platforms is not defined'}

    cus['platform_path']=platform_path
    cus['platform_path_configured']='yes'

    ############################################################
    libstdcpppathi_configured=cus.get('libstdcpppath_include_configured','')
    libstdcpppathi=cus.get('libstdcpppath_include','')
    if libstdcpppathi_configured!='yes' and iv=='yes':
       if libstdcpppathi!='':
          ck.out('Full path to include directory with libstdc++: '+libstdcpppathi)
       else:
          ra=ck.inp({'text':'* If needed, enter full path to include directory with libstdc++ (such as ...sources/cxx-stl/gnu-libstdc++/4.9/include: '})
          libstdcpppathi=ra['string']
          cus['libstdcpppath_include_configured']='yes'

    cus['libstdcpppath_include']=libstdcpppathi
    env['CK_ENV_LIB_STDCPP_INCLUDE']=libstdcpppathi
    cus['libstdcpppath_include_configured']='yes'

    libstdcpppath_configured=cus.get('libstdcpppath_configured','')
    libstdcpppath=cus.get('libstdcpppath','')
    if libstdcpppath_configured!='yes' and iv=='yes':
       if libstdcpppath!='':
          ck.out('Full path to include directory with libstdc++: '+libstdcpppath)
       else:
          ra=ck.inp({'text':'* If needed, enter full path to lib directory with libstdc++ (such as ...sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a: '})
          libstdcpppath=ra['string']
          cus['libstdcpppath_configured']='yes'

    if winh=='yes':
       sep='\\'
    else:
       sep='/'

    cus['libstdcpppath']=libstdcpppath
    if libstdcpppath!='':
       env['CK_ENV_LIB_STDCPP_STATIC']=libstdcpppath+sep+'libgnustl_static.a'
       env['CK_ENV_LIB_STDCPP_DYNAMIC']=libstdcpppath+sep+'libgnustl_shared.so'
       env['CK_ENV_LIB_STDCPP_INCLUDE_EXTRA']=libstdcpppath+sep+'include'
    else:
       env['CK_ENV_LIB_STDCPP_STATIC']=''
       env['CK_ENV_LIB_STDCPP_DYNAMIC']=''
       env['CK_ENV_LIB_STDCPP_INCLUDE_EXTRA']=''
    cus['libstdcpppath_configured']='yes'

    ############################################################
    ef_configured=cus.get('ef_configured','')
    ef=cus.get('ef','')
    if ef_configured!='yes' and iv=='yes':
       ra=ck.inp({'text':'Force extra flags, if needed (such as -fPIE -pie for aarch64): '})
       ef=ra['string']
       cus['ef']=ef
       cus['ef_configured']='yes'

    ##############
    if winh=='yes':
       psysroot=platform_path+'\\'+platform+'\\arch-'+arch
    else:
       psysroot=platform_path+'/'+platform+'/arch-'+arch
    sysroot='--sysroot "'+psysroot+'"'

    env['CK_SYS_ROOT']=psysroot

    x=env.get('CK_COMPILER_FLAGS_OBLIGATORY','')
    if sysroot not in x:
       x=sysroot+' '+x
    env['CK_COMPILER_FLAGS_OBLIGATORY']=x

    if ef!='':
       x=env['CK_CC']
       if x.find(ef)<0:
          x=eqis+x+' '+ef+eqis
       env['CK_CC']=x

       x=env['CK_CXX']
       if x.find(ef)<0:
          x=eqis+x+' '+ef+eqis
       env['CK_CXX']=x

    if pi!='':
       env['CK_ANDROID_NDK_ROOT_DIR']=pi

#    x=env.get('CK_LD_FLAGS_EXTRA','')
#    if sysroot not in x:
#       x=sysroot+' '+x
#    env['CK_LD_FLAGS_EXTRA']=x

    return {'return':0, 'bat':s, 'env':env, 'tags':tags, 'cus':cus}

Example 4

Project: scipy Source File: test_solvers.py
def test_solve_continuous_are():
    mat6 = np.load(os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                'data', 'carex_6_data.npz'))
    mat15 = np.load(os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                 'data', 'carex_15_data.npz'))
    mat18 = np.load(os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                 'data', 'carex_18_data.npz'))
    mat19 = np.load(os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                 'data', 'carex_19_data.npz'))
    mat20 = np.load(os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                 'data', 'carex_20_data.npz'))
    cases = [
        # Carex examples taken from (with default parameters):
        # [1] P.BENNER, A.J. LAUB, V. MEHRMANN: 'A Collection of Benchmark
        #     Examples for the Numerical Solution of Algebraic Riccati
        #     Equations II: Continuous-Time Case', Tech. Report SPC 95_23,
        #     Fak. f. Mathematik, TU Chemnitz-Zwickau (Germany), 1995.
        #
        # The format of the data is (a, b, q, r, knownfailure), where
        # knownfailure is None if the test passes or a string
        # indicating the reason for failure.
        #
        # Test Case 0: carex #1
        (np.diag([1.], 1),
         np.array([[0], [1]]),
         block_diag(1., 2.),
         1,
         None),
        # Test Case 1: carex #2
        (np.array([[4, 3], [-4.5, -3.5]]),
         np.array([[1], [-1]]),
         np.array([[9, 6], [6, 4.]]),
         1,
         None),
        # Test Case 2: carex #3
        (np.array([[0, 1, 0, 0],
                   [0, -1.89, 0.39, -5.53],
                   [0, -0.034, -2.98, 2.43],
                   [0.034, -0.0011, -0.99, -0.21]]),
         np.array([[0, 0], [0.36, -1.6], [-0.95, -0.032], [0.03, 0]]),
         np.array([[2.313, 2.727, 0.688, 0.023],
                   [2.727, 4.271, 1.148, 0.323],
                   [0.688, 1.148, 0.313, 0.102],
                   [0.023, 0.323, 0.102, 0.083]]),
         np.eye(2),
         None),
        # Test Case 3: carex #4
        (np.array([[-0.991, 0.529, 0, 0, 0, 0, 0, 0],
                   [0.522, -1.051, 0.596, 0, 0, 0, 0, 0],
                   [0, 0.522, -1.118, 0.596, 0, 0, 0, 0],
                   [0, 0, 0.522, -1.548, 0.718, 0, 0, 0],
                   [0, 0, 0, 0.922, -1.64, 0.799, 0, 0],
                   [0, 0, 0, 0, 0.922, -1.721, 0.901, 0],
                   [0, 0, 0, 0, 0, 0.922, -1.823, 1.021],
                   [0, 0, 0, 0, 0, 0, 0.922, -1.943]]),
         np.array([[3.84, 4.00, 37.60, 3.08, 2.36, 2.88, 3.08, 3.00],
                   [-2.88, -3.04, -2.80, -2.32, -3.32, -3.82, -4.12, -3.96]]
                  ).T * 0.001,
         np.array([[1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.1],
                   [0.0, 1.0, 0.0, 0.0, 0.1, 0.0, 0.0, 0.0],
                   [0.0, 0.0, 1.0, 0.0, 0.0, 0.5, 0.0, 0.0],
                   [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
                   [0.5, 0.1, 0.0, 0.0, 0.1, 0.0, 0.0, 0.0],
                   [0.0, 0.0, 0.5, 0.0, 0.0, 0.1, 0.0, 0.0],
                   [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.0],
                   [0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1]]),
         np.eye(2),
         None),
        # Test Case 4: carex #5
        (np.array(
          [[-4.019, 5.120, 0., 0., -2.082, 0., 0., 0., 0.870],
           [-0.346, 0.986, 0., 0., -2.340, 0., 0., 0., 0.970],
           [-7.909, 15.407, -4.069, 0., -6.450, 0., 0., 0., 2.680],
           [-21.816, 35.606, -0.339, -3.870, -17.800, 0., 0., 0., 7.390],
           [-60.196, 98.188, -7.907, 0.340, -53.008, 0., 0., 0., 20.400],
           [0, 0, 0, 0, 94.000, -147.200, 0., 53.200, 0.],
           [0, 0, 0, 0, 0, 94.000, -147.200, 0, 0],
           [0, 0, 0, 0, 0, 12.800, 0.000, -31.600, 0],
           [0, 0, 0, 0, 12.800, 0.000, 0.000, 18.800, -31.600]]),
         np.array([[0.010, -0.011, -0.151],
                   [0.003, -0.021, 0.000],
                   [0.009, -0.059, 0.000],
                   [0.024, -0.162, 0.000],
                   [0.068, -0.445, 0.000],
                   [0.000, 0.000, 0.000],
                   [0.000, 0.000, 0.000],
                   [0.000, 0.000, 0.000],
                   [0.000, 0.000, 0.000]]),
         np.eye(9),
         np.eye(3),
         None),
        # Test Case 5: carex #6
        (mat6['A'], mat6['B'], mat6['Q'], mat6['R'], None),
        # Test Case 6: carex #7
        (np.array([[1, 0], [0, -2.]]),
         np.array([[1e-6], [0]]),
         np.ones((2, 2)),
         1.,
         'Bad residual accuracy'),
        # Test Case 7: carex #8
        (block_diag(-0.1, -0.02),
         np.array([[0.100, 0.000], [0.001, 0.010]]),
         np.array([[100, 1000], [1000, 10000]]),
         np.ones((2, 2)) + block_diag(1e-6, 0),
         None),
        # Test Case 8: carex #9
        (np.array([[0, 1e6], [0, 0]]),
         np.array([[0], [1.]]),
         np.eye(2),
         1.,
         None),
        # Test Case 9: carex #10
        (np.array([[1.0000001, 1], [1., 1.0000001]]),
         np.eye(2),
         np.eye(2),
         np.eye(2),
         None),
        # Test Case 10: carex #11
        (np.array([[3, 1.], [4, 2]]),
         np.array([[1], [1]]),
         np.array([[-11, -5], [-5, -2.]]),
         1.,
         None),
        # Test Case 11: carex #12
        (np.array([[7000000., 2000000., -0.],
                   [2000000., 6000000., -2000000.],
                   [0., -2000000., 5000000.]]) / 3,
         np.eye(3),
         np.array([[1., -2., -2.], [-2., 1., -2.], [-2., -2., 1.]]).dot(
                np.diag([1e-6, 1, 1e6])).dot(
            np.array([[1., -2., -2.], [-2., 1., -2.], [-2., -2., 1.]])) / 9,
         np.eye(3) * 1e6,
         'Bad Residual Accuracy'),
        # Test Case 12: carex #13
        (np.array([[0, 0.4, 0, 0],
                   [0, 0, 0.345, 0],
                   [0, -0.524e6, -0.465e6, 0.262e6],
                   [0, 0, 0, -1e6]]),
         np.array([[0, 0, 0, 1e6]]).T,
         np.diag([1, 0, 1, 0]),
         1.,
         None),
        # Test Case 13: carex #14
        (np.array([[-1e-6, 1, 0, 0],
                   [-1, -1e-6, 0, 0],
                   [0, 0, 1e-6, 1],
                   [0, 0, -1, 1e-6]]),
         np.ones((4, 1)),
         np.ones((4, 4)),
         1.,
         None),
        # Test Case 14: carex #15
        (mat15['A'], mat15['B'], mat15['Q'], mat15['R'], None),
        # Test Case 15: carex #16
        (np.eye(64, 64, k=-1) + np.eye(64, 64)*(-2.) + np.rot90(
                 block_diag(1, np.zeros((62, 62)), 1)) + np.eye(64, 64, k=1),
         np.eye(64),
         np.eye(64),
         np.eye(64),
         None),
        # Test Case 16: carex #17
        (np.diag(np.ones((20, )), 1),
         np.flipud(np.eye(21, 1)),
         np.eye(21, 1) * np.eye(21, 1).T,
         1,
         'Bad Residual Accuracy'),
        # Test Case 17: carex #18
        (mat18['A'], mat18['B'], mat18['Q'], mat18['R'], None),
        # Test Case 18: carex #19
        (mat19['A'], mat19['B'], mat19['Q'], mat19['R'],
         'Bad Residual Accuracy'),
        # Test Case 19: carex #20
        (mat20['A'], mat20['B'], mat20['Q'], mat20['R'],
         'Bad Residual Accuracy')
        ]
    # Makes the minimum precision requirements customized to the test.
    # Here numbers represent the number of decimals that agrees with zero
    # matrix when the solution x is plugged in to the equation.
    #
    # res = array([[8e-3,1e-16],[1e-16,1e-20]]) --> min_decimal[k] = 2
    #
    # If the test is failing use "None" for that entry.
    #
    min_decimal = (14, 12, 14, 14, 11, 7, None, 5, 7, 14, 14,
                   None, 10, 14, 13, 14, None, 12, None, None)

    def _test_factory(case, dec):
        """Checks if 0 = XA + A'X - XB(R)^{-1} B'X + Q is true"""
        a, b, q, r, knownfailure = case
        if knownfailure:
            raise KnownFailureTest(knownfailure)

        x = solve_continuous_are(a, b, q, r)
        res = x.dot(a) + a.conj().T.dot(x) + q
        out_fact = x.dot(b)
        res -= out_fact.dot(solve(np.atleast_2d(r), out_fact.conj().T))
        assert_array_almost_equal(res, np.zeros_like(res), decimal=dec)

    for ind, case in enumerate(cases):
        yield _test_factory, case, min_decimal[ind]

Example 5

Project: Arelle Source File: CntlrGenVersReports.py
    def runFromExcel(self, options):
        #testGenFileName = options.excelfilename
        testGenFileName = r"C:\Users\Herm Fischer\Docuements\mvsl\projects\XBRL.org\conformance-versioning\trunk\versioningReport\conf\creation-index.xls"
        testGenDir = os.path.dirname(testGenFileName)
        schemaDir = os.path.dirname(testGenDir) + os.sep + "schema"
        timeNow = XmlUtil.dateunionValue(datetime.datetime.now())
        if options.testfiledate:
            today = options.testfiledate
        else:
            today = XmlUtil.dateunionValue(datetime.date.today())
        startedAt = time.time()
        
        LogHandler(self) # start logger

        self.logMessages = []
        logMessagesFile = testGenDir + os.sep + 'log-generation-messages.txt'

        modelTestcases = ModelXbrl.create(self.modelManager, url=testGenFileName, isEntry=True)
        testcaseIndexBook = xlrd.open_workbook(testGenFileName)
        testcaseIndexSheet = testcaseIndexBook.sheet_by_index(0)
        self.addToLog(_("[info] xls loaded in {0:.2} secs at {1}").format(time.time() - startedAt, timeNow))
        
        # start index file
        indexFiles = [testGenDir + os.sep + 'creation-testcases-index.xml',
                      testGenDir + os.sep + 'consumption-testcases-index.xml']
        indexDocs = []
        testcasesElements = []
        for purpose in ("Creation","Consumption"):
            file = io.StringIO(
                #'<?xml version="1.0" encoding="UTF-8"?>'
                '<!-- XBRL Versioning 1.0 {0} Tests -->'
                '<!-- Copyright 2011 XBRL International.  All Rights Reserved. -->'
                '<?xml-stylesheet type="text/xsl" href="infrastructure/testcases-index.xsl"?>'
                '<testcases name="XBRL Versioning 1.0 {0} Tests" '
                ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"'
                ' xsi:noNamespaceSchemaLocation="infrastructure/testcases-index.xsd">'
                '</testcases>'.format(purpose, today)
                )
            doc = etree.parse(file)
            file.close()
            indexDocs.append(doc)
            testcasesElements.append(doc.getroot())
        priorTestcasesDir = None
        testcaseFiles = None
        testcaseDocs = None
        for iRow in range(1, testcaseIndexSheet.nrows):
            try:
                row = testcaseIndexSheet.row(iRow)
                if (row[0].ctype == xlrd.XL_CELL_EMPTY or # must have directory
                    row[1].ctype == xlrd.XL_CELL_EMPTY or # from
                    row[2].ctype == xlrd.XL_CELL_EMPTY):  # to
                    continue
                testDir = row[0].value
                uriFrom = row[1].value
                uriTo = row[2].value
                overrideReport = row[3].value
                description = row[4].value
                if description is None or len(description) == 0:
                    continue # test not ready to run
                assignment = row[5].value
                expectedEvents = row[6].value # comma space separated if multiple
                note = row[7].value
                useCase = row[8].value
                base = os.path.join(os.path.dirname(testGenFileName),testDir) + os.sep
                self.addToLog(_("[info] testcase uriFrom {0}").format(uriFrom))
                if uriFrom and uriTo and assignment.lower() not in ("n.a.", "error") and expectedEvents != "N.A.":
                    modelDTSfrom = modelDTSto = None
                    for URIs, msg, isFrom in ((uriFrom, _("loading from DTS"), True), (uriTo, _("loading to DTS"), False)):
                        if ',' not in URIs:
                            modelDTS = ModelXbrl.load(self.modelManager, URIs, msg, base=base)
                        else:
                            modelDTS = ModelXbrl.create(self.modelManager, 
                                         ModelDocuement.Type.DTSENTRIES,
                                         self.webCache.normalizeUrl(URIs.replace(", ","_") + ".dts", 
                                                                    base),
                                         isEntry=True)
                            DTSdoc = modelDTS.modelDocuement
                            DTSdoc.inDTS = True
                            for uri in URIs.split(','):
                                doc = ModelDocuement.load(modelDTS, uri.strip(), base=base)
                                if doc is not None:
                                    DTSdoc.referencesDocuement[doc] = "import"  #fake import
                                    doc.inDTS = True
                        if isFrom: modelDTSfrom = modelDTS
                        else: modelDTSto = modelDTS
                    if modelDTSfrom is not None and modelDTSto is not None:
                        # generate differences report
                        reportUri = uriFrom.partition(',')[0]  # first file
                        reportDir = os.path.dirname(reportUri)
                        if reportDir: reportDir += os.sep
                        reportName = os.path.basename(reportUri).replace("from.xsd","report.xml")
                        reportFile = reportDir + "out" + os.sep + reportName
                        #reportFile = reportDir + "report" + os.sep + reportName
                        reportFullPath = self.webCache.normalizeUrl(
                                            reportFile, 
                                            base)
                        testcasesDir = os.path.dirname(os.path.dirname(reportFullPath))
                        if testcasesDir != priorTestcasesDir:
                            # close prior report
                            if priorTestcasesDir:
                                for i,testcaseFile in enumerate(testcaseFiles):
                                    with open(testcaseFile, "w", encoding="utf-8") as fh:
                                        XmlUtil.writexml(fh, testcaseDocs[i], encoding="utf-8")
                            testcaseName = os.path.basename(testcasesDir)
                            testcaseFiles = [testcasesDir + os.sep + testcaseName + "-creation-testcase.xml",
                                             testcasesDir + os.sep + testcaseName + "-consumption-testcase.xml"]
                            for i,testcaseFile in enumerate(testcaseFiles):
                                etree.SubElement(testcasesElements[i], "testcase", 
                                                 attrib={"uri": 
                                                         testcaseFile[len(testGenDir)+1:].replace("\\","/")} )
                            
                            # start testcase file
                            testcaseDocs = []
                            testcaseElements = []
                            testcaseNumber = testcaseName[0:4]
                            if testcaseNumber.isnumeric():
                                testcaseNumberElement = "<number>{0}</number>".format(testcaseNumber)
                                testcaseName = testcaseName[5:]
                            else:
                                testcaseNumberElement = ""
                            testDirSegments = testDir.split('/')
                            if len(testDirSegments) >= 2 and '-' in testDirSegments[1]:
                                testedModule = testDirSegments[1][testDirSegments[1].index('-') + 1:]
                            else:
                                testedModule = ''
                            for purpose in ("Creation","Consumption"):
                                file = io.StringIO(
                                    #'<?xml version="1.0" encoding="UTF-8"?>'
                                    '<!-- Copyright 2011 XBRL International.  All Rights Reserved. -->'
                                    '<?xml-stylesheet type="text/xsl" href="../../../infrastructure/test.xsl"?>'
                                    '<testcase '
                                    ' xmlns="http://xbrl.org/2008/conformance"'
                                    ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"'
                                    ' xsi:schemaLocation="http://xbrl.org/2008/conformance ../../../infrastructure/test.xsd">'
                                    '<creator>'
                                    '<name>Roland Hommes</name>'
                                    '<email>[email protected]</email>'
                                    '</creator>'
                                    '{0}'
                                    '<name>{1}</name>'
                                    # '<description>{0}</description>'
                                    '<reference>'
                                    '{2}'
                                    '{3}'
                                    '</reference>'
                                    '</testcase>'.format(testcaseNumberElement,
                                                         testcaseName,
                                                         '<name>{0}</name>'.format(testedModule) if testedModule else '',
                                                         '<id>{0}</id>'.format(useCase) if useCase else '')
                                    
                                    )
                                doc = etree.parse(file)
                                file.close()
                                testcaseDocs.append(doc)
                                testcaseElements.append(doc.getroot())
                            priorTestcasesDir = testcasesDir
                            variationSeq = 1
                        try:
                            os.makedirs(os.path.dirname(reportFullPath))
                        except WindowsError:
                            pass # dir already exists
                        modelVersReport = ModelVersReport.ModelVersReport(modelTestcases)
                        modelVersReport.diffDTSes(reportFullPath,modelDTSfrom, modelDTSto, 
                                                  assignment=assignment,
                                                  schemaDir=schemaDir)
                        
                        # check for expected elements
                        if expectedEvents:
                            for expectedEvent in expectedEvents.split(","):
                                if expectedEvent not in ("No change", "N.A."):
                                    prefix, sep, localName = expectedEvent.partition(':')
                                    if sep and len(modelVersReport.xmlDocuement.findall(
                                                        '//{{{0}}}{1}'.format(
                                                            XbrlConst.verPrefixNS.get(prefix),
                                                            localName))) == 0:
                                        modelTestcases.warning("warning",
                                            "Generated test case %(reportName)s missing expected event %(event)s",
                                            reportName=reportName, 
                                            event=expectedEvent)
                        
                        modelVersReport.close()
                        uriFromParts = uriFrom.split('_')
                        if len(uriFromParts) >= 2:
                            variationId = uriFromParts[1]
                        else:
                            variationId = "_{0:02n}".format(variationSeq)
                        for i,testcaseElt in enumerate(testcaseElements):
                            variationElement = etree.SubElement(testcaseElt, "{http://xbrl.org/2008/conformance}variation", 
                                                                attrib={"id": variationId})
                            nameElement = etree.SubElement(variationElement, "{http://xbrl.org/2008/conformance}description")
                            nameElement.text = description
                            ''' (removed per RH 2011/10/04
                            if note:
                                paramElement = etree.SubElement(variationElement, "{http://xbrl.org/2008/conformance}description")
                                paramElement.text = "Note: " + note
                            if useCase:
                                paramElement = etree.SubElement(variationElement, "{http://xbrl.org/2008/conformance}reference")
                                paramElement.set("specification", "versioning-requirements")
                                paramElement.set("useCase", useCase)
                            '''
                            dataElement = etree.SubElement(variationElement, "{http://xbrl.org/2008/conformance}data")
                            if i == 0:  # result is report
                                if expectedEvents:
                                    paramElement = etree.SubElement(dataElement, "{http://xbrl.org/2008/conformance}parameter",
                                                                    attrib={"name":"expectedEvent",
                                                                            "value":expectedEvents.replace(',',' ')},
                                                                    nsmap={"conf":"http://xbrl.org/2008/conformance",
                                                                           None:""})
                                if assignment:
                                    paramElement = etree.SubElement(dataElement, "{http://xbrl.org/2008/conformance}parameter",
                                                                    attrib={"name":"assignment",
                                                                            "value":assignment},
                                                                    nsmap={"conf":"http://xbrl.org/2008/conformance",
                                                                           None:""})
                            for schemaURIs, dtsAttr in ((uriFrom,"from"), (uriTo,"to")):
                                for schemaURI in schemaURIs.split(","): 
                                    schemaElement = etree.SubElement(dataElement, "{http://xbrl.org/2008/conformance}schema")
                                    schemaElement.set("dts",dtsAttr)
                                    if i == 0:
                                        schemaElement.set("readMeFirst","true")
                                    schemaElement.text=os.path.basename(schemaURI.strip())
                            resultElement = etree.SubElement(variationElement, "{http://xbrl.org/2008/conformance}result")
                            reportElement = etree.SubElement(resultElement if i == 0 else dataElement, 
                                             "{http://xbrl.org/2008/conformance}versioningReport")
                            if i == 1:
                                reportElement.set("readMeFirst","true")
                            reportElement.text = "report/" + reportName
                        variationSeq += 1
            except Exception as err:
                modelTestcases.error("exception",
                    _("Exception: %(error)s, Excel row: %(excelRow)s"),
                    error=err,
                    excelRow=iRow, 
                    exc_info=True)
        
        # add tests-error-code index files to consumption
        for testcaseFile in self.testcaseFiles(testGenDir + os.sep + "tests-error-code"):
            etree.SubElement(testcasesElements[1], "testcase", 
                             attrib={"uri": 
                             testcaseFile[len(testGenDir)+1:].replace("\\","/")} )

        with open(logMessagesFile, "w") as fh:
            fh.writelines(self.logMessages)

        if priorTestcasesDir:
            for i,testcaseFile in enumerate(testcaseFiles):
                with open(testcaseFile, "w", encoding="utf-8") as fh:
                    XmlUtil.writexml(fh, testcaseDocs[i], encoding="utf-8")
        for i,indexFile in enumerate(indexFiles):
            with open(indexFile, "w", encoding="utf-8") as fh:
                XmlUtil.writexml(fh, indexDocs[i], encoding="utf-8")

Example 6

Project: eofs Source File: plot_directive.py
Function: run
def run(arguments, content, options, state_machine, state, lineno):
    # The user may provide a filename *or* Python code content, but not both
    if arguments and content:
        raise RuntimeError("plot:: directive can't have both args and content")

    docuement = state_machine.docuement
    config = docuement.settings.env.config
    nofigs = options.has_key('nofigs')

    options.setdefault('include-source', config.plot_include_source)
    context = options.has_key('context')

    rst_file = docuement.attributes['source']
    rst_dir = os.path.dirname(rst_file)

    if len(arguments):
        if not config.plot_basedir:
            source_file_name = os.path.join(setup.app.builder.srcdir,
                                            directives.uri(arguments[0]))
        else:
            source_file_name = os.path.join(setup.confdir, config.plot_basedir,
                                            directives.uri(arguments[0]))

        # If there is content, it will be passed as a caption.
        caption = '\n'.join(content)

        # If the optional function name is provided, use it
        if len(arguments) == 2:
            function_name = arguments[1]
        else:
            function_name = None

        with open(source_file_name, 'r') as fd:
            code = fd.read()
        output_base = os.path.basename(source_file_name)
    else:
        source_file_name = rst_file
        code = textwrap.dedent("\n".join(map(str, content)))
        counter = docuement.attributes.get('_plot_counter', 0) + 1
        docuement.attributes['_plot_counter'] = counter
        base, ext = os.path.splitext(os.path.basename(source_file_name))
        output_base = '%s-%d.py' % (base, counter)
        function_name = None
        caption = ''

    base, source_ext = os.path.splitext(output_base)
    if source_ext in ('.py', '.rst', '.txt'):
        output_base = base
    else:
        source_ext = ''

    # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
    output_base = output_base.replace('.', '-')

    # is it in doctest format?
    is_doctest = contains_doctest(code)
    if options.has_key('format'):
        if options['format'] == 'python':
            is_doctest = False
        else:
            is_doctest = True

    # determine output directory name fragment
    source_rel_name = relpath(source_file_name, setup.confdir)
    source_rel_dir = os.path.dirname(source_rel_name)
    while source_rel_dir.startswith(os.path.sep):
        source_rel_dir = source_rel_dir[1:]

    # build_dir: where to place output files (temporarily)
    build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
                             'plot_directive',
                             source_rel_dir)
    # get rid of .. in paths, also changes pathsep
    # see note in Python docs for warning about symbolic links on Windows.
    # need to compare source and dest paths at end
    build_dir = os.path.normpath(build_dir)

    if not os.path.exists(build_dir):
        os.makedirs(build_dir)

    # output_dir: final location in the builder's directory
    dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
                                            source_rel_dir))
    if not os.path.exists(dest_dir):
        os.makedirs(dest_dir) # no problem here for me, but just use built-ins

    # how to link to files from the RST file
    dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir),
                                 source_rel_dir).replace(os.path.sep, '/')
    build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
    source_link = dest_dir_link + '/' + output_base + source_ext

    # make figures
    try:
        results = render_figures(code, source_file_name, build_dir, output_base,
                                 context, function_name, config)
        errors = []
    except PlotError, err:
        reporter = state.memo.reporter
        sm = reporter.system_message(
            2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
                                                source_file_name, err),
            line=lineno)
        results = [(code, [])]
        errors = [sm]

    # Properly indent the caption
    caption = '\n'.join('      ' + line.strip()
                        for line in caption.split('\n'))

    # generate output restructuredtext
    total_lines = []
    for j, (code_piece, images) in enumerate(results):
        if options['include-source']:
            if is_doctest:
                lines = ['']
                lines += [row.rstrip() for row in code_piece.split('\n')]
            else:
                lines = ['.. code-block:: python', '']
                lines += ['    %s' % row.rstrip()
                          for row in code_piece.split('\n')]
            source_code = "\n".join(lines)
        else:
            source_code = ""

        if nofigs:
            images = []

        opts = [':%s: %s' % (key, val) for key, val in options.items()
                if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]

        only_html = ".. only:: html"
        only_latex = ".. only:: latex"

        if j == 0:
            src_link = source_link
        else:
            src_link = None

        result = format_template(
            config.plot_template or TEMPLATE,
            dest_dir=dest_dir_link,
            build_dir=build_dir_link,
            source_link=src_link,
            multi_image=len(images) > 1,
            only_html=only_html,
            only_latex=only_latex,
            options=opts,
            images=images,
            source_code=source_code,
            html_show_formats=config.plot_html_show_formats,
            caption=caption)

        total_lines.extend(result.split("\n"))
        total_lines.extend("\n")

    if total_lines:
        state_machine.insert_input(total_lines, source=source_file_name)

    # copy image files to builder's output directory, if necessary
    if not os.path.exists(dest_dir):
        cbook.mkdirs(dest_dir)

    for code_piece, images in results:
        for img in images:
            for fn in img.filenames():
                destimg = os.path.join(dest_dir, os.path.basename(fn))
                if fn != destimg:
                    shutil.copyfile(fn, destimg)

    # copy script (if necessary)
    target_name = os.path.join(dest_dir, output_base + source_ext)
    with open(target_name, 'w') as f:
        if source_file_name == rst_file:
            code_escaped = unescape_doctest(code)
        else:
            code_escaped = code
        f.write(code_escaped)

    return errors

Example 7

Project: django-ddp Source File: views.py
    def __init__(self, **kwargs):
        """
        Initialisation for Django DDP server view.

        The following items populate `Meteor.settings` (later take precedence):
          1. django.conf.settings.METEOR_SETTINGS
          2. os.environ['METEOR_SETTINGS']
          3. MeteorView.meteor_settings (class attribute) or empty dict
          4. MeteorView.as_view(meteor_settings=...)

        Additionally, `Meteor.settings.public` is updated with values from
        environemnt variables specified from the following sources:
          1. django.conf.settings.METEOR_PUBLIC_ENVS
          2. os.environ['METEOR_PUBLIC_ENVS']
          3. MeteorView.meteor_public_envs (class attribute) or empty dict
          4. MeteorView.as_view(meteor_public_envs=...)
        """
        self.runtime_config = {}
        self.meteor_settings = {}
        for other in [
                getattr(settings, 'METEOR_SETTINGS', {}),
                loads(os.environ.get('METEOR_SETTINGS', '{}')),
                self.meteor_settings or {},
                kwargs.pop('meteor_settings', {}),
        ]:
            self.meteor_settings = dict_merge(self.meteor_settings, other)
        self.meteor_public_envs = set()
        self.meteor_public_envs.update(
            getattr(settings, 'METEOR_PUBLIC_ENVS', []),
            os.environ.get('METEOR_PUBLIC_ENVS', '').replace(',', ' ').split(),
            self.meteor_public_envs or [],
            kwargs.pop('meteor_public_envs', []),
        )
        public = self.meteor_settings.setdefault('public', {})
        for env_name in self.meteor_public_envs:
            try:
                public[env_name] = os.environ[env_name]
            except KeyError:
                pass  # environment variable not set
        # super(...).__init__ assigns kwargs to instance.
        super(MeteorView, self).__init__(**kwargs)

        # read and process /etc/mime.types
        mimetypes.init()

        self.url_map = {}

        # process `star_json`
        self.star_json = read_json(self.json_path)
        star_format = self.star_json['format']
        if star_format != 'site-archive-pre1':
            raise ValueError(
                'Unknown Meteor star format: %r' % star_format,
            )
        programs = {
            program['name']: program
            for program in self.star_json['programs']
        }

        # process `bundle/programs/server/program.json` from build dir
        server_json_path = os.path.join(
            os.path.dirname(self.json_path),
            os.path.dirname(programs['server']['path']),
            'program.json',
        )
        server_json = read_json(server_json_path)
        server_format = server_json['format']
        if server_format != 'javascript-image-pre1':
            raise ValueError(
                'Unknown Meteor server format: %r' % server_format,
            )
        self.server_load_map = {}
        for item in server_json['load']:
            item['path_full'] = os.path.join(
                os.path.dirname(server_json_path),
                item['path'],
            )
            self.server_load_map[item['path']] = item
            self.url_map[item['path']] = (
                item['path_full'], 'text/javascript'
            )
            try:
                item['source_map_full'] = os.path.join(
                    os.path.dirname(server_json_path),
                    item['sourceMap'],
                )
                self.url_map[item['sourceMap']] = (
                    item['source_map_full'], 'text/plain'
                )
            except KeyError:
                pass
        self.template_path = os.path.join(
            os.path.dirname(server_json_path),
            self.server_load_map[
                'packages/boilerplate-generator.js'
            ][
                'assets'
            ][
                'boilerplate_web.browser.html'
            ],
        )

        # process `bundle/programs/web.browser/program.json` from build dir
        web_browser_json_path = os.path.join(
            os.path.dirname(self.json_path),
            programs['web.browser']['path'],
        )
        web_browser_json = read_json(web_browser_json_path)
        web_browser_format = web_browser_json['format']
        if web_browser_format != 'web-program-pre1':
            raise ValueError(
                'Unknown Meteor web.browser format: %r' % (
                    web_browser_format,
                ),
            )
        self.client_map = {}
        self.internal_map = {}
        for item in web_browser_json['manifest']:
            item['path_full'] = os.path.join(
                os.path.dirname(web_browser_json_path),
                item['path'],
            )
            if item['where'] == 'client':
                if '?' in item['url']:
                    item['url'] = item['url'].split('?', 1)[0]
                if item['url'].startswith('/'):
                    item['url'] = item['url'][1:]
                self.client_map[item['url']] = item
                self.url_map[item['url']] = (
                    item['path_full'],
                    mimetypes.guess_type(
                        item['path_full'],
                    )[0] or 'application/octet-stream',
                )
            elif item['where'] == 'internal':
                self.internal_map[item['type']] = item

        config = {
            'css': [
                {'url': item['path']}
                for item in web_browser_json['manifest']
                if item['type'] == 'css' and item['where'] == 'client'
            ],
            'js': [
                {'url': item['path']}
                for item in web_browser_json['manifest']
                if item['type'] == 'js' and item['where'] == 'client'
            ],
            'meteorRuntimeConfig': '"%s"' % (
                dumps(self.runtime_config)
            ),
            'rootUrlPathPrefix': self.root_url_path_prefix,
            'bundledJsCssPrefix': self.bundled_js_css_prefix,
            'inlineScriptsAllowed': False,
            'inline': None,
            'head': read(
                self.internal_map.get('head', {}).get('path_full', None),
                default=u'',
            ),
            'body': read(
                self.internal_map.get('body', {}).get('path_full', None),
                default=u'',
            ),
        }
        tmpl_raw = read(self.template_path, encoding='utf8')
        compiler = pybars.Compiler()
        tmpl = compiler.compile(tmpl_raw)
        self.html = '<!DOCTYPE html>\n%s' % tmpl(config)

Example 8

Project: Subterfuge Source File: views.py
Function: conf
def conf(request, module):
      # Read in subterfuge.conf
   with open(str(os.path.dirname(__file__)).rstrip("abcdefghijklmnnnopqrstruvwxyz") + 'subterfuge.conf', 'r') as file:
      conf = file.readlines()

      # Subterfuge Settings Configuration
      # Edit subterfuge.conf
   if module == "settings":
         #Attack Setup
      try:
         setup.objects.update(iface = request.POST["iface"])
         conf[15] = request.POST["iface"] + "\n"
         print "Using Interface => " + request.POST["iface"]
      except:
         pass
      
      try:
         if request.POST["auto"] == "true":
            setup.objects.update(autoconf = "yes")
            conf[20] = "yes" + "\n"
            print "Auto Configure  => yes"
         else:
            setup.objects.update(autoconf = "no")
            conf[20] = "no" + "\n"
            print "Auto Configure  => no"
      except:
         pass
         
      try:
         setup.objects.update(gateway = request.POST["agw"])
         conf[17] = request.POST["agw"] + "\n"
         print "Using Gateway   => " + request.POST["agw"]
      except:
         pass

      try:
         setup.objects.update(proxymode = request.POST["proxymode"])
         print "Using Gateway   => " + request.POST["proxymode"]
      except:
         pass
         
      try:
         setup.objects.update(gateway = request.POST["mgw"])
         conf[17] = request.POST["mgw"] + "\n"
         print "Using Gateway   => " + request.POST["mgw"]
      except:
         pass

         #Get the Local IP Address
      try:
         f = os.popen("ifconfig " + request.POST["iface"] + " | grep \"inet addr\" | sed -e \'s/.*addr://;s/ .*//\'")
         temp2 = ''
         temp3 = ''
         temp = f.readline().rstrip('\n')
   
         ipaddress = re.findall(r'\d*.\d*.\d*.\d*', temp)[0]
         conf[26] = ipaddress + "\n"
         setup.objects.update(ip = ipaddress)
      except:
         pass
         
         
         #Configuration
      try:
         setup.objects.update(ploadrate = request.POST["ploadrate"])
         setup.objects.update(injectrate = request.POST["injectrate"])
         if request.POST["smartarp"] == "true":
            setup.objects.update(smartarp = "yes")
         elif request.POST["smartarp"] == "false":
            setup.objects.update(smartarp = "no")
         setup.objects.update(arprate = request.POST["arprate"])
      except:
         pass
      
      
         #Vectors
      try:
         if request.POST["active"] == "true":
            vectors.objects.filter(name = request.POST["vector"]).update(active = "yes")
         else:
            vectors.objects.filter(name = request.POST["vector"]).update(active = "no")
            
            #Wireless AP Generator Settings 
         if request.POST["vector"] == "ARP Cache Poisoning":
            arppoison.objects.update(target = request.POST["target"])
            arppoison.objects.update(method = request.POST["arpmethod"])
         
            #Wireless AP Generator Settings
         if request.POST["vector"] == "Wireless AP Generator":
            apgen.objects.update(essid = request.POST["essid"])
            apgen.objects.update(channel = request.POST["channel"])
            apgen.objects.update(atknic = request.POST["atknic"])
            apgen.objects.update(netnic = request.POST["netnic"])
      except:
         pass
         
         
         #Advanced
      try:
         scanip = request.POST["scantargetip"]
         print "Importing Nmap scan for: " + scanip
         
            #Get/Write Files
         if request.FILES['scanresults']:
            scanresults = request.FILES['scanresults']
            dest = open(str(os.path.dirname(__file__)).rstrip("abcdefghijklmnnnopqrstruvwxyz") + 'utilities/scans/' + scanip + '.xml', 'wb+')
            for chunk in scanresults.chunks():
               dest.write(chunk)
            dest.close()
               #Execute Scan
            os.system('python ' + str(os.path.dirname(__file__)).rstrip("abcdefghijklmnnnopqrstruvwxyz") + 'utilities/scan.py ' + scanip)
            
               #Relay Template Variables
         return render_to_response("settings.ext", {
            "config"    :   config,
            "conf"      :   str(config[20]).rstrip('\n'),
            "iface"	   :   result,
            "gateway"   :   gw,
            "status"    :   status,
            "setup"     :   currentsetup,
         })    
      except:
         pass
         

   if module == "update":
      os.system('apt-get install subterfuge')

   if module == "exportcreds":
      os.system('python ' + str(os.path.dirname(__file__)).rstrip("abcdefghijklmnnnopqrstruvwxyz") + 'exportcreds.py')

      #################################
      #Subterfuge Module Configurations
      #################################
      
   if module == "httpinjection":   
      httpcodeinjection(request, conf)
      
   elif module == "tunnelblock":   
      tunnelblock()
      
   else:
      for mod in installed.objects.all():
         if module == mod.name:
           os.system('python ' + str(os.path.dirname(__file__)).rstrip("abcdefghijklmnnnopqrstruvwxyz") + 'modules/' + module + '/' + module + '.py &')
   
              
      #################################
      #  END MODULE CONFIGURATION
      #################################
   
      # Write to subterfuge.conf
   with open(str(os.path.dirname(__file__)).rstrip("abcdefghijklmnnnopqrstruvwxyz") + 'subterfuge.conf', 'w') as file:
      file.writelines(conf)
      
      
      # Call Index Page
      # Check Arpspoof status
   command = "ps -A 1 | sed -e '/arpmitm/!d;/sed -e/d;s/^ //;s/ pts.*//'"
   a = os.popen(command)
   reply = a.read()
   if(len(reply)>1):
	   status = "on"
   else:
	   status = "off"
   
   if module == "httpinjection" or module == "tunnelblock":
            #Relay Template Variables
        modules = installed.objects.all()
        return render_to_response("plugins.ext", {
            "modules"   :   modules,
        })
   else:
         #Relay Template Variables
      return render_to_response(request.META['HTTP_REFERER'].split('/')[3] + ".ext", {
         "status"    :   status,
      })

Example 9

Project: fb2mobi Source File: config.py
Function: load
    def _load(self):
        config = etree.parse(self.config_file)
        for e in config.getroot():
            if e.tag == 'debug':
                self.debug = e.text.lower() == 'true'

            elif e.tag == 'logFile':
                if e.text:
                    self.original_log_file = e.text
                    self.log_file = os.path.abspath(os.path.join(os.path.abspath(os.path.dirname(self.config_file)), e.text))

            elif e.tag == 'logLevel':
                self.log_level = e.text

            elif e.tag == 'consoleLevel':
                self.console_level = e.text

            elif e.tag == 'outputFormat':
                self.output_format = e.text

            elif e.tag == 'kindleCompressionLevel':
                self.kindle_compression_level = int(e.text)

            elif e.tag == 'noDropcapsSymbols':
                self.no_dropcaps_symbols = e.text

            elif e.tag == 'transliterate':
                self.transliterate = e.text.lower() == 'true'

            elif e.tag == 'screenWidth':
                self.screen_width = int(e.text)

            elif e.tag == 'screenHeight':
                self.screen_height = int(e.text)

            elif e.tag == 'defaultProfile':
                self.default_profile = e.text

            elif e.tag == 'noMOBIoptimization':
                self.noMOBIoptimization = e.text.lower() == 'true'

            elif e.tag == 'sendToKindle':
                for s in e:
                    if s.tag == 'send':
                        self.send_to_kindle['send'] = s.text.lower() == 'true'

                    elif s.tag == 'deleteSendedBook':
                        self.send_to_kindle['deleteSendedBook'] = s.text.lower() == 'true'

                    elif s.tag == 'smtpServer':
                        self.send_to_kindle['smtpServer'] = s.text

                    elif s.tag == 'smtpPort':
                        self.send_to_kindle['smtpPort'] = int(s.text)

                    elif s.tag == 'smtpLogin':
                        self.send_to_kindle['smtpLogin'] = s.text

                    elif s.tag == 'smtpPassword':
                        self.send_to_kindle['smtpPassword'] = s.text

                    elif s.tag == 'fromUserEmail':
                        self.send_to_kindle['fromUserEmail'] = s.text

                    elif s.tag == 'toKindleEmail':
                        self.send_to_kindle['toKindleEmail'] = s.text

            elif e.tag == 'profiles':
                self.profiles = {}
                for prof in e:
                    prof_name = prof.attrib['name']
                    self.profiles[prof_name] = {}
                    self.profiles[prof_name]['name'] = prof.attrib['name']
                    self.profiles[prof_name]['description'] = prof.attrib['description']
                    self.profiles[prof_name]['vignettes'] = {}

                    self.profiles[prof_name]['generateTOCPage'] = True
                    self.profiles[prof_name]['generateAnnotationPage'] = True
                    self.profiles[prof_name]['generateOPFGuide'] = True
                    self.profiles[prof_name]['flatTOC'] = True
                    self.profiles[prof_name]['kindleRemovePersonalLabel'] = True
                    self.profiles[prof_name]['removePngTransparency'] = False

                    for p in prof:
                        if p.tag == 'hyphens':
                            self.profiles[prof_name]['hyphens'] = p.text.lower() == 'true'

                        elif p.tag == 'hyphensReplaceNBSP':
                            self.profiles[prof_name]['hyphensReplaceNBSP'] = p.text.lower() == 'true'

                        elif p.tag == 'dropcaps':
                            self.profiles[prof_name]['dropcaps'] = p.text

                        elif p.tag == 'outputFormat':
                            self.profiles[prof_name]['outputFormat'] = p.text

                        elif p.tag == 'transliterate':
                            self.profiles[prof_name]['transliterate'] = p.text.lower() == 'true'

                        elif p.tag == 'screenWidth':
                            self.profiles[prof_name]['screenWidth'] = int(p.text)

                        elif p.tag == 'screenHeight':
                            self.profiles[prof_name]['screenHeight'] = int(p.text)

                        elif p.tag == 'transliterateAuthorAndTitle':
                            self.profiles[prof_name]['transliterateAuthorAndTitle'] = p.text.lower() == 'true'

                        elif p.tag == 'tocMaxLevel':
                            self.profiles[prof_name]['tocMaxLevel'] = int(p.text)

                        elif p.tag == 'generateTOCPage':
                            self.profiles[prof_name]['generateTOCPage'] = p.text.lower() == 'true'

                        elif p.tag == 'flatTOC':
                            self.profiles[prof_name]['flatTOC'] = p.text.lower() == 'true'

                        elif p.tag == 'kindleRemovePersonalLabel':
                            self.profiles[prof_name]['kindleRemovePersonalLabel'] = p.text.lower() == 'true'

                        elif p.tag == 'removePngTransparency':
                            self.profiles[prof_name]['removePngTransparency'] = p.text.lower() == 'true'

                        elif p.tag == 'generateAnnotationPage':
                            self.profiles[prof_name]['generateAnnotationPage'] = p.text.lower() == 'true'

                        elif p.tag == 'generateOPFGuide':
                            self.profiles[prof_name]['generateOPFGuide'] = p.text.lower() == 'true'

                        elif p.tag == 'tocBeforeBody':
                            self.profiles[prof_name]['tocBeforeBody'] = p.text.lower() == 'true'

                        elif p.tag == 'css':
                            self.profiles[prof_name]['originalcss'] = p.text
                            self.profiles[prof_name]['css'] = os.path.abspath(os.path.join(os.path.abspath(os.path.dirname(self.config_file)), p.text))
                            if 'parse' in p.attrib:
                                self.profiles[prof_name]['parse_css'] = p.attrib['parse'].lower() == 'true'
                            else:
                                self.profiles[prof_name]['parse_css'] = True

                        elif p.tag == 'xslt':
                            self.profiles[prof_name]['xslt'] = os.path.abspath(os.path.join(os.path.abspath(os.path.dirname(self.config_file)), p.text))

                        elif p.tag == 'chapterOnNewPage':
                            self.profiles[prof_name]['chapterOnNewPage'] = p.text.lower() == 'true'

                        elif p.tag == 'authorFormat':
                            self.profiles[prof_name]['authorFormat'] = p.text

                        elif p.tag == 'bookTitleFormat':
                            self.profiles[prof_name]['bookTitleFormat'] = p.text

                        elif p.tag == 'annotationTitle':
                            self.profiles[prof_name]['annotationTitle'] = p.text

                        elif p.tag == 'tocTitle':
                            self.profiles[prof_name]['tocTitle'] = p.text

                        elif p.tag == 'notesMode':
                            self.profiles[prof_name]['notesMode'] = p.text

                        elif p.tag == 'notesBodies':
                            self.profiles[prof_name]['notesBodies'] = p.text

                        elif p.tag == 'vignettes':
                            self.profiles[prof_name]['vignettes'] = {}
                            self.profiles[prof_name]['vignettes_save'] = {}

                            for vignettes in p:
                                vignettes_level = vignettes.attrib['level']
                                vign_arr = {}
                                vign_arr_save = {}

                                for v in vignettes:
                                    vign_arr[v.tag] = None if v.text.lower() == 'none' else os.path.abspath(os.path.join(os.path.abspath(os.path.dirname(self.config_file)), v.text))
                                    vign_arr_save[v.tag] = None if v.text.lower() == 'none' else v.text

                                self.profiles[prof_name]['vignettes'][vignettes_level] = vign_arr
                                self.profiles[prof_name]['vignettes_save'][vignettes_level] = vign_arr_save

Example 10

Project: pywcsgrid2 Source File: plot_directive_v3.py
def run(arguments, content, options, state_machine, state, lineno):
    # The user may provide a filename *or* Python code content, but not both
    if arguments and content:
        raise RuntimeError("plot:: directive can't have both args and content")

    docuement = state_machine.docuement
    config = docuement.settings.env.config
    nofigs = 'nofigs' in options

    options.setdefault('include-source', config.plot_include_source)
    context = 'context' in options

    rst_file = docuement.attributes['source']
    rst_dir = os.path.dirname(rst_file)

    if len(arguments):
        if not config.plot_basedir:
            source_file_name = os.path.join(setup.app.builder.srcdir,
                                            directives.uri(arguments[0]))
        else:
            source_file_name = os.path.join(setup.confdir, config.plot_basedir,
                                            directives.uri(arguments[0]))

        # If there is content, it will be passed as a caption.
        caption = '\n'.join(content)

        # If the optional function name is provided, use it
        if len(arguments) == 2:
            function_name = arguments[1]
        else:
            function_name = None

        with open(source_file_name, 'r') as fd:
            code = fd.read()
        output_base = os.path.basename(source_file_name)
    else:
        source_file_name = rst_file
        code = textwrap.dedent("\n".join(map(str, content)))
        counter = docuement.attributes.get('_plot_counter', 0) + 1
        docuement.attributes['_plot_counter'] = counter
        base, ext = os.path.splitext(os.path.basename(source_file_name))
        output_base = '%s-%d.py' % (base, counter)
        function_name = None
        caption = ''

    base, source_ext = os.path.splitext(output_base)
    if source_ext in ('.py', '.rst', '.txt'):
        output_base = base
    else:
        source_ext = ''

    # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
    output_base = output_base.replace('.', '-')

    # is it in doctest format?
    is_doctest = contains_doctest(code)
    if 'format' in options:
        if options['format'] == 'python':
            is_doctest = False
        else:
            is_doctest = True

    # determine output directory name fragment
    source_rel_name = relpath(source_file_name, setup.confdir)
    source_rel_dir = os.path.dirname(source_rel_name)
    while source_rel_dir.startswith(os.path.sep):
        source_rel_dir = source_rel_dir[1:]

    # build_dir: where to place output files (temporarily)
    build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
                             'plot_directive',
                             source_rel_dir)
    # get rid of .. in paths, also changes pathsep
    # see note in Python docs for warning about symbolic links on Windows.
    # need to compare source and dest paths at end
    build_dir = os.path.normpath(build_dir)

    if not os.path.exists(build_dir):
        os.makedirs(build_dir)

    # output_dir: final location in the builder's directory
    dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
                                            source_rel_dir))
    if not os.path.exists(dest_dir):
        os.makedirs(dest_dir) # no problem here for me, but just use built-ins

    # how to link to files from the RST file
    dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir),
                                 source_rel_dir).replace(os.path.sep, '/')
    build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
    source_link = dest_dir_link + '/' + output_base + source_ext

    # make figures
    try:
        results = render_figures(code, source_file_name, build_dir, output_base,
                                 context, function_name, config)
        errors = []
    except PlotError as err:
        reporter = state.memo.reporter
        sm = reporter.system_message(
            2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
                                                source_file_name, err),
            line=lineno)
        results = [(code, [])]
        errors = [sm]

    # Properly indent the caption
    caption = '\n'.join('      ' + line.strip()
                        for line in caption.split('\n'))

    # generate output restructuredtext
    total_lines = []
    for j, (code_piece, images) in enumerate(results):
        if options['include-source']:
            if is_doctest:
                lines = ['']
                lines += [row.rstrip() for row in code_piece.split('\n')]
            else:
                lines = ['.. code-block:: python', '']
                lines += ['    %s' % row.rstrip()
                          for row in code_piece.split('\n')]
            source_code = "\n".join(lines)
        else:
            source_code = ""

        if nofigs:
            images = []

        opts = [':%s: %s' % (key, val) for key, val in list(options.items())
                if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]

        only_html = ".. only:: html"
        only_latex = ".. only:: latex"
        only_texinfo = ".. only:: texinfo"

        if j == 0:
            src_link = source_link
        else:
            src_link = None

        result = format_template(
            config.plot_template or TEMPLATE,
            dest_dir=dest_dir_link,
            build_dir=build_dir_link,
            source_link=src_link,
            multi_image=len(images) > 1,
            only_html=only_html,
            only_latex=only_latex,
            only_texinfo=only_texinfo,
            options=opts,
            images=images,
            source_code=source_code,
            html_show_formats=config.plot_html_show_formats,
            caption=caption)

        total_lines.extend(result.split("\n"))
        total_lines.extend("\n")

    if total_lines:
        state_machine.insert_input(total_lines, source=source_file_name)

    # copy image files to builder's output directory, if necessary
    if not os.path.exists(dest_dir):
        cbook.mkdirs(dest_dir)

    for code_piece, images in results:
        for img in images:
            for fn in img.filenames():
                destimg = os.path.join(dest_dir, os.path.basename(fn))
                if fn != destimg:
                    shutil.copyfile(fn, destimg)

    # copy script (if necessary)
    target_name = os.path.join(dest_dir, output_base + source_ext)
    with open(target_name, 'w') as f:
        if source_file_name == rst_file:
            code_escaped = unescape_doctest(code)
        else:
            code_escaped = code
        f.write(code_escaped)

    return errors

Example 11

Project: dipy Source File: gqsampling_stats.py
def test_gqiodf():

    #read bvals,gradients and data
    bvals=np.load(opj(os.path.dirname(__file__), \
                          'data','small_64D.bvals.npy'))
    gradients=np.load(opj(os.path.dirname(__file__), \
                              'data','small_64D.gradients.npy'))    
    img =ni.load(os.path.join(os.path.dirname(__file__),\
                                  'data','small_64D.nii'))
    data=img.get_data()    

    #print(bvals.shape)
    #print(gradients.shape)
    #print(data.shape)


    t1=time.clock()
    
    gqs = gq.GeneralizedQSampling(data,bvals,gradients)
    ten = dt.Tensor(data,bvals,gradients,thresh=50)

    
    fa=ten.fa()

    x,y,z,a,b=ten.evecs.shape
    evecs=ten.evecs
    xyz=x*y*z
    evecs = evecs.reshape(xyz,3,3)
    #vs = np.sign(evecs[:,2,:])
    #print vs.shape
    #print np.hstack((vs,vs,vs)).reshape(1000,3,3).shape
    #evecs = np.hstack((vs,vs,vs)).reshape(1000,3,3)
    #print evecs.shape
    evals=ten.evals
    evals = evals.reshape(xyz,3)
    #print evals.shape

    

    t2=time.clock()
    #print('GQS in %d' %(t2-t1))
        
    eds=np.load(opj(os.path.dirname(__file__),\
                        '..','matrices',\
                        'evenly_distributed_sphere_362.npz'))

    
    odf_vertices=eds['vertices']
    odf_faces=eds['faces']

    #Yeh et.al, IEEE TMI, 2010
    #calculate the odf using GQI

    scaling=np.sqrt(bvals*0.01506) # 0.01506 = 6*D where D is the free
    #water diffusion coefficient 
    #l_values sqrt(6 D tau) D free water
    #diffusion coefficiet and tau included in the b-value

    tmp=np.tile(scaling,(3,1))
    b_vector=gradients.T*tmp
    Lambda = 1.2 # smoothing parameter - diffusion sampling length
    
    q2odf_params=np.sinc(np.dot(b_vector.T, odf_vertices.T) * Lambda/np.pi)
    #implements equation no. 9 from Yeh et.al.

    S=data.copy()

    x,y,z,g=S.shape
    S=S.reshape(x*y*z,g)
    QA = np.zeros((x*y*z,5))
    IN = np.zeros((x*y*z,5))

    fwd = 0
    
    #Calculate Quantitative Anisotropy and find the peaks and the indices
    #for every voxel

    summary = {}

    summary['vertices'] = odf_vertices
    v = odf_vertices.shape[0]
    summary['faces'] = odf_faces
    f = odf_faces.shape[0]

    '''
    If e = number_of_edges
    the Euler formula says f-e+v = 2 for a mesh on a sphere
    Here, assuming we have a healthy triangulation
    every face is a triangle, all 3 of whose edges should belong to
    exactly two faces = so 2*e = 3*f
    to avoid division we test whether 2*f - 3*f + 2*v == 4
    or equivalently 2*v - f == 4
    '''

    assert_equal(2*v-f, 4,'Direct Euler test fails')
    assert_true(meshes.euler_characteristic_check(odf_vertices, odf_faces,chi=2),'euler_characteristic_check fails')
    
    coarse = meshes.coarseness(odf_faces)
    print 'coarseness: ', coarse

    for (i,s) in enumerate(S):

        #print 'Volume %d' % i

        istr = str(i)

        summary[istr] = {}

        odf = Q2odf(s,q2odf_params)
        peaks,inds=rp.peak_finding(odf,odf_faces)
        fwd=max(np.max(odf),fwd)
        peaks = peaks - np.min(odf)
        l=min(len(peaks),5)
        QA[i][:l] = peaks[:l]
        IN[i][:l] = inds[:l]

        summary[istr]['odf'] = odf
        summary[istr]['peaks'] = peaks
        summary[istr]['inds'] = inds
        summary[istr]['evecs'] = evecs[i,:,:]
        summary[istr]['evals'] = evals[i,:]
   
    QA/=fwd
    QA=QA.reshape(x,y,z,5)    
    IN=IN.reshape(x,y,z,5)
    
    #print('Old %d secs' %(time.clock() - t2))
    # assert_equal((gqs.QA-QA).max(),0.,'Frank QA different than our QA')

    # assert_equal((gqs.QA.shape),QA.shape, 'Frank QA shape is different')
       
    # assert_equal((gqs.QA-QA).max(), 0.)

    #import dipy.core.track_propagation as tp

    #tp.FACT_Delta(QA,IN)

    #return tp.FACT_Delta(QA,IN,seeds_no=10000).tracks

    peaks_1 = [i for i in range(1000) if len(summary[str(i)]['inds'])==1]
    peaks_2 = [i for i in range(1000) if len(summary[str(i)]['inds'])==2]
    peaks_3 = [i for i in range(1000) if len(summary[str(i)]['inds'])==3]

    # correct numbers of voxels with respectively 1,2,3 ODF/QA peaks
    assert_array_equal((len(peaks_1),len(peaks_2),len(peaks_3)), (790,196,14),
                       'error in numbers of QA/ODF peaks')

    # correct indices of odf directions for voxels 0,10,44
    # with respectively 1,2,3 ODF/QA peaks
    assert_array_equal(summary['0']['inds'],[116],
                       'wrong peak indices for voxel 0')
    assert_array_equal(summary['10']['inds'],[105, 78],
                       'wrong peak indices for voxel 10')
    assert_array_equal(summary['44']['inds'],[95, 84, 108],
                       'wrong peak indices for voxel 44')

    assert_equal(np.argmax(summary['0']['odf']), 116)
    assert_equal(np.argmax(summary['10']['odf']), 105)
    assert_equal(np.argmax(summary['44']['odf']), 95)

    pole_1 = summary['vertices'][116]
    #print 'pole_1', pole_1
    pole_2 = summary['vertices'][105]
    #print 'pole_2', pole_2
    pole_3 = summary['vertices'][95]
    #print 'pole_3', pole_3

    vertices = summary['vertices']

    width = 0.02#0.3 #0.05
    
    '''
    print 'pole_1 equator contains:', len([i for i,v in enumerate(vertices) if np.abs(np.dot(v,pole_1)) < width])
    print 'pole_2 equator contains:', len([i for i,v in enumerate(vertices) if np.abs(np.dot(v,pole_2)) < width])
    print 'pole_3 equator contains:', len([i for i,v in enumerate(vertices) if np.abs(np.dot(v,pole_3)) < width])
    '''
    
    #print 'pole_1 equator contains:', len(meshes.equatorial_vertices(vertices,pole_1,width))
    #print 'pole_2 equator contains:', len(meshes.equatorial_vertices(vertices,pole_2,width))
    #print 'pole_3 equator contains:', len(meshes'equatorial_vertices(vertices,pole_3,width))

    #print triple_odf_maxima(vertices,summary['0']['odf'],width)
    #print triple_odf_maxima(vertices,summary['10']['odf'],width)
    #print triple_odf_maxima(vertices,summary['44']['odf'],width)
    #print summary['0']['evals']
    '''

    pole=np.array([0,0,1])

    from dipy.viz import fos
    r=fos.ren()
    fos.add(r,fos.point(pole,fos.green))
    for i,ev in enumerate(vertices):        
        if np.abs(np.dot(ev,pole))<width:
            fos.add(r,fos.point(ev,fos.red))
    fos.show(r)

    '''

    triple = triple_odf_maxima(vertices, summary['10']['odf'], width)
    
    indmax1, odfmax1 = triple[0]
    indmax2, odfmax2 = triple[1]
    indmax3, odfmax3 = triple[2] 

    '''
    from dipy.viz import fos
    r=fos.ren()
    for v in vertices:
        fos.add(r,fos.point(v,fos.cyan))
    fos.add(r,fos.sphere(upper_hemi_map(vertices[indmax1]),radius=0.1,color=fos.red))
    #fos.add(r,fos.line(np.array([0,0,0]),vertices[indmax1]))
    fos.add(r,fos.sphere(upper_hemi_map(vertices[indmax2]),radius=0.05,color=fos.green))
    fos.add(r,fos.sphere(upper_hemi_map(vertices[indmax3]),radius=0.025,color=fos.blue))
    fos.add(r,fos.sphere(upper_hemi_map(summary['0']['evecs'][:,0]),radius=0.1,color=fos.red,opacity=0.7))
    fos.add(r,fos.sphere(upper_hemi_map(summary['0']['evecs'][:,1]),radius=0.05,color=fos.green,opacity=0.7))
    fos.add(r,fos.sphere(upper_hemi_map(summary['0']['evecs'][:,2]),radius=0.025,color=fos.blue,opacity=0.7))
    fos.add(r,fos.sphere([0,0,0],radius=0.01,color=fos.white))
    fos.show(r)
    '''
    
    mat = np.vstack([vertices[indmax1],vertices[indmax2],vertices[indmax3]])

    print np.dot(mat,np.transpose(mat))
    # this is to assess how othogonal the triple is/are
    print np.dot(summary['0']['evecs'],np.transpose(mat))

Example 12

Project: OSCAAR Source File: calculateEphemerides.py
def calculateEphemerides(parFile):
    '''
        :INPUTS:
        parFile	 --	  path to the parameter file
        '''

    #parFile = 'umo.par'

    '''Parse the observatory .par file'''
    parFileText = open(os.path.join(os.path.dirname(oscaar.__file__),'extras','eph','observatories',parFile),'r').read().splitlines()

    def returnBool(value):
        '''Return booleans from strings'''
        if value.upper().strip() == 'TRUE': return True
        elif value.upper().strip() == 'FALSE': return False
    if hasattr(sys, 'real_prefix'):
        show_lt = float(0)
    for line in parFileText:
        parameter = line.split(':')[0]
        if len(line.split(':')) > 1:
            value = line.split(':')[1].strip()
            if parameter == 'name': observatory_name = value
            elif parameter == 'latitude': observatory_latitude = value
            elif parameter == 'longitude': observatory_longitude = value
            elif parameter == 'elevation': observatory_elevation = float(value)
            elif parameter == 'temperature': observatory_temperature = float(value)
            elif parameter == 'min_horizon': observatory_minHorizon = value
            elif parameter == 'start_date': startSem = gd2jd(eval(value))
            elif parameter == 'end_date': endSem = gd2jd(eval(value))
            elif parameter == 'mag_limit': mag_limit = float(value)
            elif parameter == 'band': band = value
            elif parameter == 'depth_limit': depth_limit = float(value)
            elif parameter == 'calc_transits': calcTransits = returnBool(value)
            elif parameter == 'calc_eclipses': calcEclipses = returnBool(value)
            elif parameter == 'html_out': htmlOut = returnBool(value)
            elif parameter == 'text_out': textOut = returnBool(value)
            elif parameter == 'twilight': twilightType = value
            elif parameter == 'show_lt': show_lt = float(value)
    from oscaar.extras.knownSystemParameters import getLatestParams
    exoplanetDB = getLatestParams.downloadAndPickle()

    ''' Set up observatory parameters '''
    observatory = ephem.Observer()
    observatory.lat =  observatory_latitude#'38:58:50.16'	## Input format-  deg:min:sec  (type=str)
    observatory.long = observatory_longitude#'-76:56:13.92' ## Input format-  deg:min:sec  (type=str)
    observatory.elevation = observatory_elevation   # m
    observatory.temp = observatory_temperature	  ## Celsius 
    observatory.horizon = observatory_minHorizon	## Input format-  deg:min:sec  (type=str)

    def trunc(f, n):
        '''Truncates a float f to n decimal places without rounding'''
        slen = len('%.*f' % (n, f))
        return str(f)[:slen]

    def RA(planet):
        '''Type: str, Units:  hours:min:sec'''
        return exoplanetDB[planet]['RA_STRING']
    def dec(planet):
        '''Type: str, Units:  deg:min:sec'''
        return exoplanetDB[planet]['DEC_STRING']
    def period(planet):
        '''Units:  days'''
        return np.float64(exoplanetDB[planet]['PER'])
    def epoch(planet):
        '''Tc at mid-transit. Units:  days'''
        if exoplanetDB[planet]['TT'] == '': return 0.0
        else: return np.float64(exoplanetDB[planet]['TT'])
    def duration(planet):
        '''Transit/eclipse duration. Units:  days'''
        if exoplanetDB[planet]['T14'] == '': return 0.0
        else: return float(exoplanetDB[planet]['T14'])
    def V(planet):
        '''V mag'''
        if exoplanetDB[planet]['V'] == '': return 0.0
        else: return float(exoplanetDB[planet]['V'])
    def KS(planet):
        '''KS mag'''
        if exoplanetDB[planet]['KS'] == '': return 0.0
        else: return float(exoplanetDB[planet]['KS'])
    
    def bandMagnitude(planet):
        if band.upper() == 'V':
            return V(planet)
        elif band.upper() == 'K':
            return KS(planet)
    def depth(planet):
        '''Transit depth'''
        if exoplanetDB[planet]['DEPTH'] == '': return 0.0
        else: return float(exoplanetDB[planet]['DEPTH'])

    def transitBool(planet):
        '''True if exoplanet is transiting, False if detected by other means'''
        if exoplanetDB[planet]['TRANSIT'] == '0': return 0
        elif exoplanetDB[planet]['TRANSIT'] == '1': return 1
    ########################################################################################
    ########################################################################################

    def datestr2list(datestr):
        ''' Take strings of the form: "2013/1/18 20:08:18" and return them as a
            tuple of the same parameters'''
        year,month,others = datestr.split('/')
        day, time = others.split(' ')
        hour,minute,sec = time.split(':')
        return (int(year),int(month),int(day),int(hour),int(minute),int(sec))

    def list2datestr(inList):
        '''Converse function to datestr2list'''
        inList = map(str,inList)
        return inList[0]+'/'+inList[1]+'/'+inList[2]+' '+inList[3].zfill(2)+':'+inList[4].zfill(2)+':'+inList[5].zfill(2)

    def list2datestrCSV(inList):
        '''Converse function to datestr2list'''
        inList = map(str,inList)
        print inList
        return inList[0]+'/'+inList[1]+'/'+inList[2]+','+inList[3].zfill(2)+':'+inList[4].zfill(2)+':'+inList[5].zfill(2)


    def list2datestrHTML(inList,alt,direction):
        '''Converse function to datestr2list'''
        inList = map(str,inList)
        #return inList[1].zfill(2)+'/'+inList[2].zfill(2)+'<br />'+inList[3].zfill(2)+':'+inList[4].zfill(2)
        return inList[1].zfill(2)+'/<strong>'+inList[2].zfill(2)+'</strong>, '+inList[3].zfill(2)+':'+inList[4].split('.')[0].zfill(2)+'<br /> '+alt+'&deg; '+direction

    def list2datestrHTML_UTnoaltdir(inList,alt,direction):
        '''Converse function to datestr2list'''
        inList = map(str,inList)
        #return inList[1].zfill(2)+'/'+inList[2].zfill(2)+'<br />'+inList[3].zfill(2)+':'+inList[4].zfill(2)
        return inList[1].zfill(2)+'/<strong>'+inList[2].zfill(2)+'</strong>, '+inList[3].zfill(2)+':'+inList[4].split('.')[0].zfill(2)

    def list2datestrHTML_LT(inList,alt,direction):
        '''Converse function to datestr2list for daylight savings time'''
        #print "original",inList
        tempDate = ephem.Date(inList)
        inList = ephem.Date(ephem.localtime(tempDate)).tuple()
        #print "converted",lt_inList,'\n'
        inList = map(str,inList)
        #return inList[1].zfill(2)+'/'+inList[2].zfill(2)+'<br />'+inList[3].zfill(2)+':'+inList[4].zfill(2)
        return inList[1].zfill(2)+'/<strong>'+inList[2].zfill(2)+'</strong>, '+inList[3].zfill(2)+':'+inList[4].split('.')[0].zfill(2)+'<br /> '+alt+'&deg; '+direction

    def simbadURL(planet):
        if exoplanetDB[planet]['SIMBADURL'] == '': return 'http://simbad.harvard.edu/simbad/'
        else: return exoplanetDB[planet]['SIMBADURL']

    def RADecHTML(planet):
        return '<a href="'+simbadURL(planet)+'">'+RA(planet).split('.')[0]+'<br />'+dec(planet).split('.')[0]+'</a>'

    def constellation(planet):
        return exoplanetDB[planet]['Constellation']

    def orbitReference(planet):
        return exoplanetDB[planet]['TRANSITURL']

    def orbitReferenceYear(planet):
        '''ORBREF returns the citation in the format "<first author> <year>", so parse and return just the year'''
        return exoplanetDB[planet]['ORBREF'].split()[1]

    def nameWithLink(planet):
        return '<a href="'+orbitReference(planet)+'">'+planet+'</a>'

    def mass(planet):
        if exoplanetDB[planet]['MASS'] == '': return '---'
        else: return trunc(float(exoplanetDB[planet]['MASS']),2)

    def semimajorAxis(planet):
        #return trunc(0.004649*float(exoplanetDB[planet]['AR'])*float(exoplanetDB[planet]['RSTAR']),3)   ## Convert from solar radii to AU
        return trunc(float(exoplanetDB[planet]['SEP']),3)

    def radius(planet):
        if exoplanetDB[planet]['R'] == '': return '---'
        else: return trunc(float(exoplanetDB[planet]['R']),2) ## Convert from solar radii to Jupiter radii

    def midTransit(Tc, P, start, end):
        '''Calculate mid-transits between Julian Dates start and end, using a 2500 
            orbital phase kernel since T_c (for 2 day period, 2500 phases is 14 years)
            '''
        Nepochs = np.arange(0,2500,dtype=np.float64)
        transitTimes = Tc + P*Nepochs
        transitTimesInSem = transitTimes[(transitTimes < end)*(transitTimes > start)]
        return transitTimesInSem

    def midEclipse(Tc, P, start, end):
        '''Calculate mid-eclipses between Julian Dates start and end, using a 2500 
            orbital phase kernel since T_c (for 2 day period, 2500 phases is 14 years)
            '''
        Nepochs = np.arange(0,2500,dtype=np.float64)
        transitTimes = Tc + P*(0.5 + Nepochs)
        transitTimesInSem = transitTimes[(transitTimes < end)*(transitTimes > start)]
        return transitTimesInSem

    '''Choose which planets from the database to include in the search, 
        assemble a list of them.'''
    planets = []
    for planet in exoplanetDB:
        if bandMagnitude(planet) != 0.0 and depth(planet) != 0.0 and float(bandMagnitude(planet)) <= mag_limit and \
           float(depth(planet)) >= depth_limit and transitBool(planet):
            planets.append(planet)

    if calcTransits: transits = {}
    if calcEclipses: eclipses = {}
    for day in np.arange(startSem,endSem+1):
        if calcTransits: transits[str(day)] = []
        if calcEclipses: eclipses[str(day)] = []
    planetsNeverUp = []


    def azToDirection(az):
        az = float(az)
        if (az >= 0 and az < 22.5) or (az >= 337.5 and az < 360): return 'N'
        elif az >= 22.5 and az < 67.5:  return 'NE'
        elif az >= 67.5 and az < 112.5:  return 'E'
        elif az >= 112.5 and az < 157.5:  return 'SE'
        elif az >= 157.5 and az < 202.5:  return 'S'
        elif az >= 202.5 and az < 247.5:  return 'SW'
        elif az >= 247.5 and az < 292.5:  return 'W'	
        elif az >= 292.5 and az < 337.5:  return 'NW'

    def ingressEgressAltAz(planet,observatory,ingress,egress):
        altitudes = []
        directions = []
        for time in [ingress,egress]:
            observatory.date = list2datestr(jd2gd(time))
            star = ephem.FixedBody()
            star._ra = ephem.hours(RA(planet))
            star._dec = ephem.degrees(dec(planet))
            star.compute(observatory)
            altitudes.append(str(ephem.degrees(star.alt)).split(":")[0])
            directions.append(azToDirection(str(ephem.degrees(star.az)).split(":")[0]))
        ingressAlt,egressAlt = altitudes
        ingressDir,egressDir = directions
        return ingressAlt,ingressDir,egressAlt,egressDir

    def aboveHorizonForEvent(planet,observatory,ingress,egress):
        altitudes = []
        for time in [ingress,egress]:
            observatory.date = list2datestr(jd2gd(time))
            star = ephem.FixedBody()
            star._ra = ephem.hours(RA(planet))
            star._dec = ephem.degrees(dec(planet))
            star.compute(observatory)
            #altitudes.append(str(ephem.degrees(star.alt)).split(":")[0])
            altitudes.append(float(repr(star.alt))/(2*np.pi) * 360)	## Convert altitudes to degrees
        #if altitudes[0] > 0 and altitudes[1] > 0: return True
        if altitudes[0] > float(ephem.degrees(observatory_minHorizon))*(180/np.pi) and altitudes[1] > float(ephem.degrees(observatory_minHorizon))*(180/np.pi): return True
        else: return False

    def eventAfterTwilight(planet,observatory,ingress,egress,twilightType):
        altitudes = []
        for time in [ingress,egress]:
            observatory.date = list2datestr(jd2gd(time))
            sun = ephem.Sun()
            sun.compute(observatory)
            altitudes.append(float(repr(sun.alt))/(2*np.pi) * 360)	## Convert altitudes to degrees
        if altitudes[0] < float(twilightType) and altitudes[1] < float(twilightType): return True
        else: return False

    for planet in planets:		
        '''Compute all of the coming transits and eclipses for a long time out'''
        allTransitEpochs = midTransit(epoch(planet),period(planet),startSem,endSem)
        allEclipseEpochs = midEclipse(epoch(planet),period(planet),startSem,endSem)
        for day in np.arange(startSem,endSem+1,1.0):
            try:
                '''For each day, gather the transits and eclipses that happen'''
                transitEpochs = allTransitEpochs[(allTransitEpochs <= day+0.5)*(allTransitEpochs > day-0.5)]
                eclipseEpochs = allEclipseEpochs[(allEclipseEpochs <= day+0.5)*(allEclipseEpochs > day-0.5)]
                if calcTransits and len(transitEpochs) != 0:
                    transitEpoch = transitEpochs[0]
                    ingress = transitEpoch-duration(planet)/2
                    egress = transitEpoch+duration(planet)/2
                    
                    ''' Calculate positions of host stars'''
                    star = ephem.FixedBody()
                    star._ra = ephem.hours(RA(planet))
                    star._dec = ephem.degrees(dec(planet))
                    star.compute(observatory)
                    exoplanetDB[planet]['Constellation'] = ephem.constellation(star)[0]
                    
                    '''If star is above horizon and sun is below horizon during transit/eclipse:'''		
                    if aboveHorizonForEvent(planet,observatory,ingress,egress) and eventAfterTwilight(planet,observatory,ingress,egress,twilightType):
                        ingressAlt,ingressDir,egressAlt,egressDir = ingressEgressAltAz(planet,observatory,ingress,egress)
                        transitInfo = [planet,transitEpoch,duration(planet)/2,'transit',ingressAlt,ingressDir,egressAlt,egressDir]
                        transits[str(day)].append(transitInfo)		
                if calcEclipses and len(eclipseEpochs) != 0:
                    eclipseEpoch = eclipseEpochs[0]
                    ingress = eclipseEpoch-duration(planet)/2
                    egress = eclipseEpoch+duration(planet)/2
                    
                    ''' Calculate positions of host stars'''
                    star = ephem.FixedBody()
                    star._ra = ephem.hours(RA(planet))
                    star._dec = ephem.degrees(dec(planet))
                    star.compute(observatory)
                    exoplanetDB[planet]['Constellation'] = ephem.constellation(star)[0]
                    
                    if aboveHorizonForEvent(planet,observatory,ingress,egress) and eventAfterTwilight(planet,observatory,ingress,egress,twilightType):
                        ingressAlt,ingressDir,egressAlt,egressDir = ingressEgressAltAz(planet,observatory,ingress,egress)
                        eclipseInfo = [planet,eclipseEpoch,duration(planet)/2,'eclipse',ingressAlt,ingressDir,egressAlt,egressDir]
                        eclipses[str(day)].append(eclipseInfo)	
            
            except ephem.NeverUpError:
                if str(planet) not in planetsNeverUp:
                    print 'Note: planet %s is never above the horizon at this observing location.' % (planet)
                    planetsNeverUp.append(str(planet))

    def removeEmptySets(dictionary):
        '''Remove days where there were no transits/eclipses from the transit/eclipse list dictionary. 
            Can't iterate through the transits dictionary with a for loop because it would change length 
            as keys get deleted, so loop through with while loop until all entries are not empty sets'''
        dayCounter = startSem
        while any(dictionary[day] == [] for day in dictionary):	
            if dictionary[str(dayCounter)] == []:
                del dictionary[str(dayCounter)]
            dayCounter += 1

    if calcTransits: removeEmptySets(transits)
    if calcEclipses: removeEmptySets(eclipses)

    events = {}
    def mergeDictionaries(dict):
        for key in dict:
            if any(key == eventKey for eventKey in events) == False:	## If key does not exist in events,
                if np.shape(dict[key])[0] == 1:	## If new event is the only one on that night, add only it
                    events[key] = [dict[key][0]]
                else:			## If there were multiple events that night, add them each
                    events[key] = []
                    for event in dict[key]:
                        events[key].append(event)
            else:
                if np.shape(dict[key])[0] > 1: ## If there are multiple entries to append,
                    for event in dict[key]:
                        events[key].append(event)
                else:							## If there is only one to add,
                    events[key].append(dict[key][0])
    if calcTransits: mergeDictionaries(transits)
    if calcEclipses: mergeDictionaries(eclipses)

    if textOut: 
        allKeys = events.keys()
        allKeys = np.array(allKeys)[np.argsort(allKeys)]
        report = open(os.path.join(os.path.dirname(oscaar.__file__),'extras','eph','ephOutputs','eventReport.csv'),'w')
        firstLine = 'Planet,Event,Ingress Date, Ingress Time (UT) ,Altitude at Ingress,Azimuth at Ingress,Egress Date, Egress Time (UT) ,Altitude at Egress,Azimuth at Egress,V mag,Depth,Duration,RA,Dec,Const.,Mass,Semimajor Axis (AU),Radius (R_J)\n'
        report.write(firstLine)
        
        for key in allKeys:
            def writeCSVtransit():
                middle = ','.join([planet[0],str(planet[3]),list2datestrCSV(jd2gd(float(planet[1]-planet[2]))),planet[4],planet[5],\
                                   list2datestrCSV(jd2gd(float(planet[1]+planet[2]))),planet[6],planet[7],trunc(bandMagnitude(str(planet[0])),2),\
                                   trunc(depth(planet[0]),4),trunc(24.0*duration(planet[0]),2),RA(planet[0]),dec(planet[0]),constellation(planet[0]),\
                                   mass(planet[0]),semimajorAxis(planet[0]),radius(planet[0])])
                line = middle+'\n'
                report.write(line)
            
            def writeCSVeclipse():
                middle = ','.join([planet[0],str(planet[3]),list2datestrCSV(jd2gd(float(planet[1]-planet[2]))),planet[4],planet[5],\
                                   list2datestrCSV(jd2gd(float(planet[1]+planet[2]))),planet[6],planet[7],trunc(bandMagnitude(str(planet[0])),2),\
                                   trunc(depth(planet[0]),4),trunc(24.0*duration(planet[0]),2),RA(planet[0]),dec(planet[0]),constellation(planet[0]),\
                                   mass(planet[0]),semimajorAxis(planet[0]),radius(planet[0])])
                line = middle+'\n'
                report.write(line)
            
            if np.shape(events[key])[0] > 1:
                elapsedTime = []
                
                for i in range(1,len(events[key])):
                    nextPlanet = events[key][1]
                    planet = events[key][0]
                    double = False
                    '''If the other planet's ingress is before this one's egress, then'''
                    if ephem.Date(list2datestr(jd2gd(float(nextPlanet[1]-nextPlanet[2])))) -\
                        ephem.Date(list2datestr(jd2gd(float(planet[1]+planet[2])))) > 0.0:
                            double = True
                            elapsedTime.append(ephem.Date(list2datestr(jd2gd(float(nextPlanet[1]-nextPlanet[2])))) - \
                                               ephem.Date(list2datestr(jd2gd(float(planet[1]+planet[2])))))
                    
                    if ephem.Date(list2datestr(jd2gd(float(planet[1]-planet[2])))) - \
                        ephem.Date(list2datestr(jd2gd(float(nextPlanet[1]+nextPlanet[2])))) > 0.0:
                            '''If the other planet's egress is before this one's ingress, then'''
                            double = True
                            elapsedTime.append(ephem.Date(list2datestr(jd2gd(float(planet[1]-planet[2])))) - \
                                               ephem.Date(list2datestr(jd2gd(float(nextPlanet[1]+nextPlanet[2])))))
                
                for planet in events[key]:
                    if calcTransits and planet[3] == 'transit':
                        writeCSVtransit()
                    if calcEclipses and planet[3] == 'eclipse':
                        writeCSVeclipse()		  
            
            elif np.shape(events[key])[0] == 1:
                planet = events[key][0]
                if calcTransits and planet[3] == 'transit':
                    writeCSVtransit()
                if calcEclipses and planet[3] == 'eclipse':
                    writeCSVeclipse()
        # report.write('\n')
        
        report.close()
    #print exoplanetDB['HD 209458 b']
    print 'calculateEphemerides.py: Done'


    if htmlOut: 
        '''Write out a text report with the transits/eclipses. Write out the time of 
            ingress, egress, whether event is transit/eclipse, elapsed in time between
            ingress/egress of the temporally isolated events'''
        report = open(os.path.join(os.path.dirname(oscaar.__file__),'extras','eph','ephOutputs','eventReport.html'),'w')
        allKeys = events.keys()
        ## http://www.kryogenix.org/code/browser/sorttable/
        htmlheader = '\n'.join([
                                '<!doctype html>',\
                                '<html>',\
                                '	<head>',\
                                '		<meta http-equiv="content-type" content="text/html; charset=UTF-8" />',\
                                '		<title>Ephemeris</title>',\
                                '		<link rel="stylesheet" href="stylesheetEphem.css" type="text/css" />',\
                                '		 <script type="text/javascript">',\
                                '		  function changeCSS(cssFile, cssLinkIndex) {',\
                                '			var oldlink = docuement.getElementsByTagName("link").item(cssLinkIndex);',\
                                '			var newlink = docuement.createElement("link")',\
                                '			newlink.setAttribute("rel", "stylesheet");',\
                                '			newlink.setAttribute("type", "text/css");',\
                                '			newlink.setAttribute("href", cssFile);',\
                                
                                '			docuement.getElementsByTagName("head").item(0).replaceChild(newlink, oldlink);',\
                                '		  }',\
                                '		</script>',\
                                '	   <script src="./sorttable.js"></script>',\
                                '	</head>',\
                                '	<body>',\
                                '		<div id="textDiv">',\
                                '		<h1>Ephemerides for: '+observatory_name+'</h1>',\
                                '		<h2>Observing dates (UT): '+list2datestr(jd2gd(startSem)).split(' ')[0]+' - '+list2datestr(jd2gd(endSem)).split(' ')[0]+'</h2>'
                                '	   Click the column headers to sort. ',\
                                '		<table class="daynight" id="eph">',\
                                '		<tr><th colspan=2>Toggle Color Scheme</th></tr>',\
                                '		<tr><td><a href="#" onclick="changeCSS(\'stylesheetEphem.css\', 0);">Day</a></td><td><a href="#" onclick="changeCSS(\'stylesheetEphemDark.css\', 0);">Night</a></td></tr>',\
                                '		</table>'])
        
        if show_lt == 0:
            tableheader = '\n'.join([
                                     '\n		<table class="sortable" id="eph">',\
                                     '		<tr> <th>Planet<br /><span class="small">[Link: Orbit ref.]</span></th>	  <th>Event<br /><span class="small">[Transit/<br />Eclipse]</span></th>	<th>Ingress <br /><span class="small">(MM/DD<br />HH:MM, UT)</span></th> <th>Egress <br /><span class="small">(MM/DD<br />HH:MM, (UT), Alt., Dir.)</span></th>'+\
                                     '<th>'+band.upper()+'</th> <th>Depth<br />(mag)</th> <th>Duration<br />(hrs)</th> <th>RA/Dec<br /><span class="small">[Link: Simbad ref.]</span></th> <th>Const.</th> <th>Mass<br />(M<sub>J</sub>)</th>'+\
                                     '<th>Radius<br />(R<sub>J</sub>)</th> <th>Ref. Year</th></tr>'])
        else:
            tableheader = '\n'.join([
                                     '\n        <table class="sortable" id="eph">',\
                                     '        <tr> <th>Planet<br /><span class="small">[Link: Orbit ref.]</span></th>      <th>Event<br /><span class="small">[Transit/<br />Eclipse]</span></th> <th>Ingress <br /><span class="small">(MM/DD<br />HH:MM (LT), Alt., Dir.)</span></th> <th>Egress <br /><span class="small">(MM/DD<br />HH:MM (LT), Alt., Dir.)</span></th>   '+\
                                     '<th>'+band.upper()+'</th> <th>Depth<br />(mag)</th> <th>Duration<br />(hrs)</th> <th>RA/Dec<br /><span class="small">[Link: Simbad ref.]</span></th> <th>Const.</th> <th>Mass<br />(M<sub>J</sub>)</th>'+\
                                     ' <th>Radius<br />(R<sub>J</sub>)</th> <th>Ref. Year</th> <th>Ingress <br /><span class="small">(MM/DD<br />HH:MM (UT))</span></th> <th>Egress <br /><span class="small">(MM/DD<br />HH:MM, (UT))</span></th></tr>'])
    
        
        tablefooter = '\n'.join([
                                 '\n		</table>',\
                                 '		<br /><br />',])
        htmlfooter = '\n'.join([
                                '\n		<p class="headinfo">',\
                                '		Developed by Brett Morris with great gratitude for the help of <a href="http://rhodesmill.org/pyephem/">PyEphem</a>,<br/>',\
                                '		and for up-to-date exoplanet parameters from <a href="http://www.exoplanets.org/">exoplanets.org</a> (<a href="http://adsabs.harvard.edu/abs/2011PASP..123..412W">Wright et al. 2011</a>).<br />',\
                                '		</p>',\
                                '		</div>',\
                                '	</body>',\
                                '</html>'])
        report.write(htmlheader)
        report.write(tableheader)
        
        allKeys = np.array(allKeys)[np.argsort(allKeys)]
        for key in allKeys:
            def writeHTMLtransit():
                indentation = '		'
                if show_lt != 0: 
                    middle = '</td><td>'.join([nameWithLink(planet[0]),str(planet[3]),list2datestrHTML_LT(jd2gd(float(planet[1]-planet[2])),planet[4],planet[5]),\
                                               list2datestrHTML_LT(jd2gd(float(planet[1]+planet[2])),planet[6],planet[7]),trunc(bandMagnitude(str(planet[0])),2),\
                                               trunc(depth(planet[0]),4),trunc(24.0*duration(planet[0]),2),RADecHTML(planet[0]),constellation(planet[0]),\
                                               mass(planet[0]),radius(planet[0]),orbitReferenceYear(planet[0]),list2datestrHTML_UTnoaltdir(jd2gd(float(planet[1]-planet[2])),planet[4],planet[5]),\
                                               list2datestrHTML_UTnoaltdir(jd2gd(float(planet[1]+planet[2])),planet[6],planet[7])])
                else:
                    middle = '</td><td>'.join([nameWithLink(planet[0]),str(planet[3]),list2datestrHTML(jd2gd(float(planet[1]-planet[2])),planet[4],planet[5]),\
                                               list2datestrHTML(jd2gd(float(planet[1]+planet[2])),planet[6],planet[7]),trunc(bandMagnitude(str(planet[0])),2),\
                                               trunc(depth(planet[0]),4),trunc(24.0*duration(planet[0]),2),RADecHTML(planet[0]),constellation(planet[0]),\
                                               mass(planet[0]),radius(planet[0]),orbitReferenceYear(planet[0])])
                line = indentation+'<tr><td>'+middle+'</td></tr>\n'
                report.write(line)
            
            def writeHTMLeclipse():
                indentation = '		'
                if show_lt != 0:
                    middle = '</td><td>'.join([nameWithLink(planet[0]),str(planet[3]),list2datestrHTML_LT(jd2gd(float(planet[1]-planet[2])),planet[4],planet[5]),\
                                               list2datestrHTML_LT(jd2gd(float(planet[1]+planet[2])),planet[6],planet[7]),trunc(bandMagnitude(str(planet[0])),2),\
                                               '---',trunc(24.0*duration(planet[0]),2),RADecHTML(planet[0]),constellation(planet[0]),\
                                               mass(planet[0]),radius(planet[0]),orbitReferenceYear(planet[0]),list2datestrHTML_UTnoaltdir(jd2gd(float(planet[1]-planet[2])),planet[4],planet[5]),\
                                               list2datestrHTML_UTnoaltdir(jd2gd(float(planet[1]+planet[2])),planet[6],planet[7])])
                else: 
                    middle = '</td><td>'.join([nameWithLink(planet[0]),str(planet[3]),list2datestrHTML(jd2gd(float(planet[1]-planet[2])),planet[4],planet[5]),\
                                               list2datestrHTML(jd2gd(float(planet[1]+planet[2])),planet[6],planet[7]),trunc(bandMagnitude(str(planet[0])),2),\
                                               '---',trunc(24.0*duration(planet[0]),2),RADecHTML(planet[0]),constellation(planet[0]),\
                                               mass(planet[0]),radius(planet[0]),orbitReferenceYear(planet[0])])

                line = indentation+'<tr><td>'+middle+'</td></tr>\n'
                report.write(line)
            
            
            if np.shape(events[key])[0] > 1:
                elapsedTime = []
                
                for i in range(1,len(events[key])):
                    nextPlanet = events[key][1]
                    planet = events[key][0]
                    double = False
                    '''If the other planet's ingress is before this one's egress, then'''
                    if ephem.Date(list2datestr(jd2gd(float(nextPlanet[1]-nextPlanet[2])))) -\
                        ephem.Date(list2datestr(jd2gd(float(planet[1]+planet[2])))) > 0.0:
                            double = True
                            elapsedTime.append(ephem.Date(list2datestr(jd2gd(float(nextPlanet[1]-nextPlanet[2])))) - \
                                               ephem.Date(list2datestr(jd2gd(float(planet[1]+planet[2])))))
                    
                    if ephem.Date(list2datestr(jd2gd(float(planet[1]-planet[2])))) - \
                        ephem.Date(list2datestr(jd2gd(float(nextPlanet[1]+nextPlanet[2])))) > 0.0:
                            '''If the other planet's egress is before this one's ingress, then'''
                            double = True
                            elapsedTime.append(ephem.Date(list2datestr(jd2gd(float(planet[1]-planet[2])))) - \
                                               ephem.Date(list2datestr(jd2gd(float(nextPlanet[1]+nextPlanet[2])))))
                
                for planet in events[key]:
                    if calcTransits and planet[3] == 'transit':
                        writeHTMLtransit()
                    if calcEclipses and planet[3] == 'eclipse':
                        writeHTMLeclipse()		  
            elif np.shape(events[key])[0] == 1:
                planet = events[key][0]
                if calcTransits and planet[3] == 'transit':
                    writeHTMLtransit()
                if calcEclipses and planet[3] == 'eclipse':
                    writeHTMLeclipse()
        report.write(tablefooter)
        report.write(htmlfooter)
        report.close()

Example 13

Project: AfterQC Source File: preprocesser.py
    def run(self):
        if self.options.debubble:
            self.loadBubbleCircles()

        #read1_file is required
        read1_file = fastq.Reader(self.options.read1_file)

        #no front trim if sequence is barcoded
        if self.options.barcode:
            self.options.trim_front = 0

        reporter = QCReporter()

        self.r1qc_prefilter = QualityControl(self.options.qc_sample, self.options.qc_kmer)
        self.r2qc_prefilter = QualityControl(self.options.qc_sample, self.options.qc_kmer)
        self.r1qc_prefilter.statFile(self.options.read1_file)
        if self.options.read2_file != None:
            self.r2qc_prefilter.statFile(self.options.read2_file)

        self.r1qc_postfilter = QualityControl(self.options.qc_sample, self.options.qc_kmer)
        self.r2qc_postfilter = QualityControl(self.options.qc_sample, self.options.qc_kmer)

        readLen = self.r1qc_prefilter.readLen
        overlap_histgram = [0 for x in xrange(readLen+1)]
        distance_histgram = [0 for x in xrange(readLen+1)]

        #auto detect trim front and trim tail
        if self.options.trim_front == -1 or self.options.trim_tail == -1:
            #auto trim for read1
            trimFront, trimTail = self.r1qc_prefilter.autoTrim()
            if self.options.trim_front == -1:
                self.options.trim_front = trimFront
            if self.options.trim_tail == -1:
                self.options.trim_tail = trimTail
            #auto trim for read2
            if self.options.read2_file != None:
                # check if we should keep same trimming for read1/read2 to keep their length identical
                # this option is on by default because lots of dedup algorithms require this feature
                if self.options.trim_pair_same:
                    self.options.trim_front2 = self.options.trim_front
                    self.options.trim_tail2 = self.options.trim_tail
                else:
                    trimFront2, trimTail2 = self.r2qc_prefilter.autoTrim()
                    if self.options.trim_front2 == -1:
                        self.options.trim_front2 = trimFront2
                    if self.options.trim_tail2 == -1:
                        self.options.trim_tail2 = trimTail2
                
        print(self.options.read1_file + " options:")
        print(self.options)
        
        #if good output folder not specified, set it as the same folder of read1 file
        good_dir = self.options.good_output_folder
        if good_dir == None:
            good_dir = os.path.dirname(self.options.read1_file)

        #if bad output folder not specified, set it as the same folder of read1 file            
        bad_dir = self.options.bad_output_folder
        if bad_dir == None:
            bad_dir = os.path.join(os.path.dirname(os.path.dirname(good_dir+"/")), "bad")

        #if overlap output folder not specified, set it as the same folder of read1 file
        overlap_dir = self.options.overlap_output_folder
        if overlap_dir == None:
#            overlap_dir = os.path.dirname(self.options.read1_file)
            overlap_dir = os.path.join(os.path.dirname(os.path.dirname(good_dir+"/")), "overlap")

        #save QC results at the same folder of good
        qc_base_folder =  os.path.join(os.path.dirname(os.path.dirname(good_dir+"/")), "QC")
        if not os.path.exists(qc_base_folder):
            os.makedirs(qc_base_folder)
        qc_dir =  os.path.join(qc_base_folder, os.path.basename(self.options.read1_file))
        if not os.path.exists(qc_dir):
            os.makedirs(qc_dir)
            
        if not os.path.exists(good_dir):
            os.makedirs(good_dir)
            
        if not os.path.exists(bad_dir):
            os.makedirs(bad_dir)

        if self.options.store_overlap and self.options.read2_file != None and (not os.path.exists(overlap_dir)):
            os.makedirs(overlap_dir)
        
        good_read1_file = None
        bad_read1_file = None
        overlap_read1_file = None
        if not self.options.qc_only:
            good_read1_file = fastq.Writer(os.path.join(good_dir, getMainName(self.options.read1_file)+".good.fq"))
            bad_read1_file = fastq.Writer(os.path.join(bad_dir, getMainName(self.options.read1_file)+".bad.fq"))

            overlap_read1_file = None
            if self.options.store_overlap:
                overlap_read1_file = fastq.Writer(os.path.join(overlap_dir, getMainName(self.options.read1_file)+".overlap.fq"))
        
        #other files are optional
        read2_file = None
        good_read2_file = None
        bad_read2_file = None
        overlap_read2_file = None

        index1_file = None
        good_index1_file = None
        bad_index1_file = None
        overlap_index1_file = None

        index2_file = None
        good_index2_file = None
        bad_index2_file = None
        overlap_index2_file = None
        
        #if other files are specified, then read them
        if self.options.read2_file != None:
            read2_file = fastq.Reader(self.options.read2_file)
            if not self.options.qc_only:
                good_read2_file = fastq.Writer(os.path.join(good_dir, getMainName(self.options.read2_file)+".good.fq"))
                bad_read2_file = fastq.Writer(os.path.join(bad_dir, getMainName(self.options.read2_file)+".bad.fq"))
                if self.options.store_overlap and self.options.read2_file != None:
                    overlap_read2_file = fastq.Writer(os.path.join(overlap_dir, getMainName(self.options.read2_file)+".overlap.fq"))
        if self.options.index1_file != None:
            index1_file = fastq.Reader(self.options.index1_file)
            if not self.options.qc_only:
                good_index1_file = fastq.Writer(os.path.join(good_dir, getMainName(self.options.index1_file)+".good.fq"))
                bad_index1_file = fastq.Writer(os.path.join(bad_dir, getMainName(self.options.index1_file)+".bad.fq"))
                if self.options.store_overlap and self.options.read2_file != None:
                    overlap_index1_file = fastq.Writer(os.path.join(overlap_dir, getMainName(self.options.index1_file)+".overlap.fq"))
        if self.options.index2_file != None:
            index2_file = fastq.Reader(self.options.index2_file)
            if not self.options.qc_only:
                good_index2_file = fastq.Writer(os.path.join(good_dir, getMainName(self.options.index2_file)+".good.fq"))
                bad_index2_file = fastq.Writer(os.path.join(bad_dir, getMainName(self.options.index2_file)+".bad.fq"))
                if self.options.store_overlap and self.options.read2_file != None:
                    overlap_index2_file = fastq.Writer(os.path.join(overlap_dir, getMainName(self.options.index2_file)+".overlap.fq"))
            
        r1 = None
        r2 = None
        i1 = None
        i2 = None

        # stat numbers
        TOTAL_BASES = 0
        GOOD_BASES = 0
        TOTAL_READS = 0
        GOOD_READS = 0
        BAD_READS = 0
        BADBCD1 = 0
        BADBCD2 = 0
        BADTRIM1 = 0
        BADTRIM2 = 0
        BADBBL = 0
        BADLEN = 0
        BADPOL = 0
        BADLQC = 0
        BADNCT = 0
        BADINDEL = 0
        BADMISMATCH = 0
        READ_CORRECTED = 0
        BASE_CORRECTED = 0
        BASE_ZERO_QUAL_MASKED = 0
        OVERLAPPED = 0
        OVERLAP_LEN_SUM = 0
        OVERLAP_BASE_SUM = 0
        # error profiling by overlap analysis
        OVERLAP_BASE_ERR = 0
        OVERLAP_ERR_MATRIX = init_error_matrix()

        while True:
            r1 = read1_file.nextRead()
            if r1==None:
                break
            else:
                TOTAL_BASES += len(r1[1])
                
            if read2_file != None:
                r2 = read2_file.nextRead()
                if r2==None:
                    break
            if index1_file != None:
                i1 = index1_file.nextRead()
                if i1==None:
                    break
            if index2_file != None:
                i2 = index2_file.nextRead()
                if i2==None:
                    break
                else:
                    TOTAL_BASES += len(r2[1])

            TOTAL_READS += 1
                    
            #barcode processing
            if self.options.barcode:
                barcodeLen1 = barcodeprocesser.detectBarcode(r1[1], self.options.barcode_length, self.options.barcode_verify)
                if barcodeLen1 == 0:
                    self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADBCD1")
                    BADBCD1 += 1
                    continue
                else:
                    if r2 == None:
                        barcodeprocesser.moveBarcodeToName(r1, self.options.barcode_length, self.options.barcode_verify)
                    else:
                        barcodeLen2 = barcodeprocesser.detectBarcode(r2[1], self.options.barcode_length, self.options.barcode_verify)
                        if barcodeLen2 == 0:
                            self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADBCD2")
                            BADBCD2 += 1
                            continue
                        else:
                            barcodeprocesser.moveAndTrimPair(r1, r2, barcodeLen1, barcodeLen2, self.options.barcode_verify)
            
            #trim
            if self.options.trim_front > 0 or self.options.trim_tail > 0:
                r1 = trim(r1, self.options.trim_front, self.options.trim_tail)
                if len(r1[1]) < 5:
                    self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADTRIM1")
                    BADTRIM1 += 1
                    continue
                if r2 != None:
                    r2 = trim(r2, self.options.trim_front2, self.options.trim_tail2)
                    if len(r2[1]) < 5:
                        self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADTRIM2")
                        BADTRIM2 += 1
                        continue

            #filter debubble
            if self.options.debubble:
                if self.isInBubble(r1[0]):
                    self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADBBL")
                    BADBBL += 1
                    continue
            
            #filter sequence length
            if len(r1[1])<self.options.seq_len_req:
                self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADLEN")
                BADLEN += 1
                continue
                    
            #check polyX
            if self.options.poly_size_limit > 0:
                poly1 = hasPolyX(r1[1], self.options.poly_size_limit, self.options.allow_mismatch_in_poly)
                poly2 = None
                if r2!=None:
                    poly2 = hasPolyX(r2[1], self.options.poly_size_limit, self.options.allow_mismatch_in_poly)
                if poly1!=None or poly2!=None:
                    self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADPOL")
                    BADPOL += 1
                    continue
            
            #check low quality count
            if self.options.unqualified_base_limit > 0:
                lowQual1 = lowQualityNum(r1, self.options.qualified_quality_phred)
                lowQual2 = 0
                if r2!=None:
                    lowQual2 = lowQualityNum(r2, self.options.qualified_quality_phred)
                if lowQual1 > self.options.unqualified_base_limit or lowQual1 > self.options.unqualified_base_limit:
                    self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADLQC")
                    BADLQC += 1
                    continue
            
            #check N number
            if self.options.n_base_limit > 0:
                nNum1 = nNumber(r1)
                nNum2 = 0
                if r2!=None:
                    nNum2 = nNumber(r2)
                if nNum1 > self.options.n_base_limit or nNum2 > self.options.n_base_limit:
                    self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADNCT")
                    BADNCT += 1
                    continue

            #check overlap and do error correction
            if r2!=None and (not self.options.no_overlap):
                (offset, overlap_len, distance) = util.overlap(r1[1], r2[1])
                overlap_histgram[overlap_len] += 1
                # deal with the case insert DNA is shorter than read length and cause offset is negative
                if offset <0 and overlap_len > 30:
                    # shift the junk bases
                    r1[1] = r1[1][0:overlap_len]
                    r1[3] = r1[3][0:overlap_len]
                    r2[1] = r2[1][-offset:-offset+overlap_len]
                    r2[3] = r2[3][-offset:-offset+overlap_len]
                    # then calc overlap again
                    (offset, overlap_len, distance) = util.overlap(r1[1], r2[1])
                if overlap_len>30:
                    OVERLAPPED += 1
                    distance_histgram[distance] += 1
                    OVERLAP_LEN_SUM += overlap_len
                    # we consider the distance is caused by sequencing error
                    OVERLAP_BASE_SUM += overlap_len * 2
                    OVERLAP_BASE_ERR += distance
                    corrected = 0
                    zero_qual_masked = 0
                    if distance>0:
                        #try to fix low quality base
                        hamming = util.hammingDistance(r1[1][len(r1[1]) - overlap_len:], util.reverseComplement(r2[1][len(r2[1]) - overlap_len:]))
                        if hamming != distance:
                            self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADINDEL")
                            BADINDEL += 1
                            continue
                        #print(r1[1][len(r1[1]) - overlap_len:])
                        #print(util.reverseComplement(r2[1][len(r2[1]) - overlap_len:]))
                        #print(r1[3][len(r1[1]) - overlap_len:])
                        #print(util.reverse(r2[3][len(r2[1]) - overlap_len:]))
                        err_mtx = init_error_matrix()
                        for o in xrange(overlap_len):
                            b1 = r1[1][len(r1[1]) - overlap_len + o]
                            b2 = util.complement(r2[1][-o-1])
                            q1 = r1[3][len(r1[3]) - overlap_len + o]
                            q2 = r2[3][-o-1]
                            if b1 != b2:
                                # print(TOTAL_READS, o, b1, b2, q1, q2)
                                this_is_corrected = False
                                if util.qualNum(q1) >= 30 and util.qualNum(q2) <= 14:
                                    if b1!='N' and b2!='N':
                                        err_mtx[util.complement(b1)][util.complement(b2)] += 1
                                    if not self.options.no_correction:
                                        r2[1] = util.changeString(r2[1], -o-1, util.complement(b1))
                                        r2[3] = util.changeString(r2[3], -o-1, q1)
                                        corrected += 1
                                        this_is_corrected = True
                                elif util.qualNum(q2) >= 30 and util.qualNum(q1) <= 14:
                                    if b1!='N' and b2!='N':
                                        err_mtx[b2][b1] += 1
                                    if not self.options.no_correction:
                                        r1[1]= util.changeString(r1[1], len(r1[1]) - overlap_len + o, b2)
                                        r1[3] = util.changeString(r1[3], len(r1[3]) - overlap_len + o, q2)
                                        corrected += 1
                                        this_is_corrected = True
                                if not this_is_corrected:
                                    # mask them as zero qual if it is not corrected
                                    zero_qual = '!'
                                    r2[3] = util.changeString(r2[3], -o-1, zero_qual)
                                    r1[3] = util.changeString(r1[3], len(r1[3]) - overlap_len + o, zero_qual)
                                    zero_qual_masked += 1

                                if corrected + zero_qual_masked>= distance:
                                    break
                        #print(r1[1][len(r1[1]) - overlap_len:])
                        #print(util.reverseComplement(r2[1][len(r2[1]) - overlap_len:]))
                        #print(r1[3][len(r1[1]) - overlap_len:])
                        #print(util.reverse(r2[3][len(r2[1]) - overlap_len:]))
                        if corrected + zero_qual_masked == distance:
                            merge_error_matrix(OVERLAP_ERR_MATRIX, err_mtx)
                            READ_CORRECTED += 1
                            BASE_CORRECTED += corrected
                            # multiply by 2 since we mask bases by pair
                            BASE_ZERO_QUAL_MASKED += zero_qual_masked * 2
                        else:
                            self.writeReads(r1, r2, i1, i2, bad_read1_file, bad_read2_file, bad_index1_file, bad_index2_file, "BADMISMATCH")
                            BADMISMATCH += 1
                            continue
                    if distance == 0 or distance == corrected:
                        if self.options.store_overlap:
                            self.writeReads(getOverlap(r1, overlap_len), getOverlap(r2, overlap_len), i1, i2, overlap_read1_file, overlap_read2_file, overlap_index1_file, overlap_index2_file, None)

            #write to good       
            self.writeReads(r1, r2, i1, i2, good_read1_file, good_read2_file, good_index1_file, good_index2_file, None)
            GOOD_BASES += len(r1[1])
            if i2 != None:
                GOOD_BASES += len(r2[1])
            if self.options.qc_sample <=0 or TOTAL_READS < self.options.qc_sample:
                self.r1qc_postfilter.statRead(r1)
                if r2 != None:
                    self.r2qc_postfilter.statRead(r2)

            GOOD_READS += 1
            if self.options.qc_only and TOTAL_READS >= self.options.qc_sample:
                break

        self.r1qc_postfilter.qc()
        #self.r1qc_postfilter.plot(qc_dir, "R1-postfilter")
        if self.options.read2_file != None:
            self.r2qc_postfilter.qc()
            #self.r2qc_postfilter.plot(qc_dir, "R2-postfilter")
        
        #close all files
        if not self.options.qc_only:
            good_read1_file.flush()
            bad_read1_file.flush()
            if self.options.read2_file != None:
                good_read2_file.flush()
                bad_read2_file.flush()
            if self.options.index1_file != None:
                good_index1_file.flush()
                bad_index1_file.flush()
            if self.options.index2_file != None:
                good_index2_file.flush()
                bad_index2_file.flush()

        # print stat numbers
        BAD_READS = TOTAL_READS - GOOD_READS
        result = {}
        result['total_bases']=TOTAL_BASES
        result['good_bases']=GOOD_BASES
        result['total_reads']=TOTAL_READS
        result['good_reads']=GOOD_READS
        result['bad_reads']=BAD_READS
        result['bad_reads_with_bad_barcode']= BADBCD1 + BADBCD2
        result['bad_reads_with_reads_in_bubble']= BADBBL
        result['bad_reads_with_bad_read_length']= BADLEN + BADTRIM1 + BADTRIM2
        result['bad_reads_with_polyX']= BADPOL
        result['bad_reads_with_low_quality']=BADLQC
        result['bad_reads_with_too_many_N']= BADNCT
        result['bad_reads_with_bad_overlap']= BADMISMATCH + BADINDEL
        result['readlen'] = readLen

        # plot result bar figure
        labels = ['good reads', 'has_polyX', 'low_quality', 'too_short', 'too_many_N']
        counts = [GOOD_READS, BADPOL, BADLQC, BADLEN + BADTRIM1 + BADTRIM2, BADNCT]
        colors = ['#66BB11', '#FF33AF', '#FFD3F2', '#FFA322', '#FF8899']
        if self.options.read2_file != None:
            labels.append('bad_overlap')
            counts.append(BADMISMATCH + BADINDEL)
            colors.append('#FF6600')
        if self.options.debubble:
            labels.append('in_bubble')
            counts.append(BADBBL)
            colors.append('#EEBB00')
        if self.options.barcode:
            labels.append('bad_barcode')
            counts.append(BADBCD1 + BADBCD2)
            colors.append('#CCDD22')

        for i in xrange(len(counts)):
            type_percent = 0.0
            if TOTAL_READS > 0:
                type_percent = 100.0 * float(counts[i])/TOTAL_READS
            labels[i] = labels[i] + ": " + str(counts[i]) + "(" + str(type_percent) + "%)"

        reporter.addFigure('Good reads and bad reads after filtering', self.r1qc_prefilter.statPlotly(labels, counts, TOTAL_READS, 'filter_stat'), 'filter_stat', "")
        #self.r1qc_prefilter.plotFilterStats(labels, counts, colors, TOTAL_READS, os.path.join(qc_dir, "filter-stat.png"))

        stat={}
        # stat["options"]=self.options
        stat["summary"]=result
        stat["command"]=makeDict(self.options)
        stat["kmer_content"] = {}
        stat["kmer_content"]["read1_prefilter"] = self.r1qc_prefilter.topKmerCount[0:10]
        stat["kmer_content"]["read1_postfilter"] = self.r1qc_postfilter.topKmerCount[0:10]
        if self.options.read2_file != None:
            stat["kmer_content"]["read2_prefilter"] = self.r2qc_prefilter.topKmerCount[0:10]
            stat["kmer_content"]["read2_postfilter"] = self.r2qc_postfilter.topKmerCount[0:10]
            stat["overlap"]={}
            stat["overlap"]['overlapped_pairs']=OVERLAPPED
            if OVERLAPPED > 0:
                stat["overlap"]['average_overlap_length']=float(OVERLAP_LEN_SUM/OVERLAPPED)
            else:
                stat["overlap"]['average_overlap_length']=0.0
            stat["overlap"]['bad_mismatch_reads']=BADMISMATCH
            stat["overlap"]['bad_indel_reads']=BADINDEL
            stat["overlap"]['corrected_reads']=READ_CORRECTED
            stat["overlap"]['corrected_bases']=BASE_CORRECTED
            stat["overlap"]['zero_qual_masked']=BASE_ZERO_QUAL_MASKED
            if OVERLAP_BASE_SUM > 0:
                stat["overlap"]['error_rate']=float(OVERLAP_BASE_ERR)/float(OVERLAP_BASE_SUM)
            else:
                stat["overlap"]['error_rate']=0.0
            stat["overlap"]['error_matrix']=OVERLAP_ERR_MATRIX
            stat["overlap"]['edit_distance_histogram']=distance_histgram[0:10]
            reporter.addFigure('Sequence error distribution', self.r1qc_prefilter.errorPlotly(OVERLAP_ERR_MATRIX, 'error_matrix'), 'error_matrix', "")
            reporter.addFigure('Overlap length distribution', self.r1qc_prefilter.overlapPlotly(overlap_histgram, readLen, TOTAL_READS, 'overlap_stat'), 'overlap_stat', "")
            #self.r1qc_prefilter.plotOverlapHistgram(overlap_histgram, readLen, TOTAL_READS, os.path.join(qc_dir, "overlap.png"))

        stat_file = open(os.path.join(qc_dir, "after.json"), "w")
        stat_json = json.dumps(stat, sort_keys=True,indent=4, separators=(',', ': '))
        stat_file.write(stat_json)
        stat_file.close()

        self.addFiguresToReport(reporter)
        reporter.setStat(stat)
        reporter.output(os.path.join(qc_dir, "report.html"))

Example 14

Project: trackpy Source File: plot_directive.py
def run(arguments, content, options, state_machine, state, lineno):
    if arguments and content:
        raise RuntimeError("plot:: directive can't have both args and content")

    docuement = state_machine.docuement
    config = docuement.settings.env.config

    options.setdefault('include-source', config.plot_include_source)

    # determine input
    rst_file = docuement.attributes['source']
    rst_dir = os.path.dirname(rst_file)

    if arguments:
        if not config.plot_basedir:
            source_file_name = os.path.join(rst_dir,
                                            directives.uri(arguments[0]))
        else:
            source_file_name = os.path.join(setup.confdir, config.plot_basedir,
                                            directives.uri(arguments[0]))
        code = open(source_file_name, 'r').read()
        output_base = os.path.basename(source_file_name)
    else:
        source_file_name = rst_file
        code = textwrap.dedent("\n".join(map(str, content)))
        counter = docuement.attributes.get('_plot_counter', 0) + 1
        docuement.attributes['_plot_counter'] = counter
        base, ext = os.path.splitext(os.path.basename(source_file_name))
        output_base = '%s-%d.py' % (base, counter)

    base, source_ext = os.path.splitext(output_base)
    if source_ext in ('.py', '.rst', '.txt'):
        output_base = base
    else:
        source_ext = ''

    # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
    output_base = output_base.replace('.', '-')

    # is it in doctest format?
    is_doctest = contains_doctest(code)
    if options.has_key('format'):
        if options['format'] == 'python':
            is_doctest = False
        else:
            is_doctest = True

    # determine output directory name fragment
    source_rel_name = relpath(source_file_name, setup.confdir)
    source_rel_dir = os.path.dirname(source_rel_name)
    while source_rel_dir.startswith(os.path.sep):
        source_rel_dir = source_rel_dir[1:]

    # build_dir: where to place output files (temporarily)
    build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
                             'plot_directive',
                             source_rel_dir)
    if not os.path.exists(build_dir):
        os.makedirs(build_dir)

    # output_dir: final location in the builder's directory
    dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
                                            source_rel_dir))

    # how to link to files from the RST file
    dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir),
                                 source_rel_dir).replace(os.path.sep, '/')
    build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
    source_link = dest_dir_link + '/' + output_base + source_ext

    # make figures
    try:
        results = makefig(code, source_file_name, build_dir, output_base,
                          config)
        errors = []
    except PlotError, err:
        reporter = state.memo.reporter
        sm = reporter.system_message(
            2, "Exception occurred in plotting %s: %s" % (output_base, err),
            line=lineno)
        results = [(code, [])]
        errors = [sm]

    # generate output restructuredtext
    total_lines = []
    for j, (code_piece, images) in enumerate(results):
        if options['include-source']:
            if is_doctest:
                lines = ['']
                lines += [row.rstrip() for row in code_piece.split('\n')]
            else:
                lines = ['.. code-block:: python', '']
                lines += ['    %s' % row.rstrip()
                          for row in code_piece.split('\n')]
            source_code = "\n".join(lines)
        else:
            source_code = ""

        opts = [':%s: %s' % (key, val) for key, val in options.items()
                if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]

        if sphinx.__version__ >= "0.6":
            only_html = ".. only:: html"
            only_latex = ".. only:: latex"
        else:
            only_html = ".. htmlonly::"
            only_latex = ".. latexonly::"

        if j == 0:
            src_link = source_link
        else:
            src_link = None

        result = format_template(
            TEMPLATE,
            dest_dir=dest_dir_link,
            build_dir=build_dir_link,
            source_link=src_link,
            multi_image=len(images) > 1,
            only_html=only_html,
            only_latex=only_latex,
            options=opts,
            images=images,
            source_code=source_code,
            html_show_formats=config.plot_html_show_formats)

        total_lines.extend(result.split("\n"))
        total_lines.extend("\n")

    if total_lines:
        state_machine.insert_input(total_lines, source=source_file_name)

    # copy image files to builder's output directory
    if not os.path.exists(dest_dir):
        os.makedirs(dest_dir)

    for code_piece, images in results:
        for img in images:
            for fn in img.filenames():
                shutil.copyfile(fn, os.path.join(dest_dir,
                                                 os.path.basename(fn)))

    # copy script (if necessary)
    if source_file_name == rst_file:
        target_name = os.path.join(dest_dir, output_base + source_ext)
        f = open(target_name, 'w')
        f.write(unescape_doctest(code))
        f.close()

    return errors

Example 15

Project: fdroidserver Source File: init.py
Function: main
def main():

    global options, config

    # Parse command line...
    parser = ArgumentParser()
    common.setup_global_opts(parser)
    parser.add_argument("-d", "--distinguished-name", default=None,
                        help="X.509 'Distiguished Name' used when generating keys")
    parser.add_argument("--keystore", default=None,
                        help="Path to the keystore for the repo signing key")
    parser.add_argument("--repo-keyalias", default=None,
                        help="Alias of the repo signing key in the keystore")
    parser.add_argument("--android-home", default=None,
                        help="Path to the Android SDK (sometimes set in ANDROID_HOME)")
    parser.add_argument("--no-prompt", action="store_true", default=False,
                        help="Do not prompt for Android SDK path, just fail")
    options = parser.parse_args()

    # find root install prefix
    tmp = os.path.dirname(sys.argv[0])
    examplesdir = None
    if os.path.basename(tmp) == 'bin':
        egg_link = os.path.join(tmp, '..', 'local/lib/python2.7/site-packages/fdroidserver.egg-link')
        if os.path.exists(egg_link):
            # installed from local git repo
            examplesdir = os.path.join(open(egg_link).readline().rstrip(), 'examples')
        else:
            # try .egg layout
            examplesdir = os.path.dirname(os.path.dirname(__file__)) + '/share/doc/fdroidserver/examples'
            if not os.path.exists(examplesdir):  # use UNIX layout
                examplesdir = os.path.dirname(tmp) + '/share/doc/fdroidserver/examples'
    else:
        # we're running straight out of the git repo
        prefix = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
        examplesdir = prefix + '/examples'

    aapt = None
    fdroiddir = os.getcwd()
    test_config = dict()
    common.fill_config_defaults(test_config)

    # track down where the Android SDK is, the default is to use the path set
    # in ANDROID_HOME if that exists, otherwise None
    if options.android_home is not None:
        test_config['sdk_path'] = options.android_home
    elif not common.test_sdk_exists(test_config):
        if os.path.isfile('/usr/bin/aapt'):
            # remove sdk_path and build_tools, they are not required
            test_config.pop('sdk_path', None)
            test_config.pop('build_tools', None)
            # make sure at least aapt is found, since this can't do anything without it
            test_config['aapt'] = common.find_sdk_tools_cmd('aapt')
        else:
            # if neither --android-home nor the default sdk_path exist, prompt the user
            default_sdk_path = '/opt/android-sdk'
            if sys.platform == 'win32' or sys.platform == 'cygwin':
                default_sdk_path = os.path.join(os.getenv('USERPROFILE'),
                                                'AppData', 'Local', 'Android', 'android-sdk')
            while not options.no_prompt:
                try:
                    s = input('Enter the path to the Android SDK ('
                              + default_sdk_path + ') here:\n> ')
                except KeyboardInterrupt:
                    print('')
                    sys.exit(1)
                if re.match('^\s*$', s) is not None:
                    test_config['sdk_path'] = default_sdk_path
                else:
                    test_config['sdk_path'] = s
                if common.test_sdk_exists(test_config):
                    break
    if not common.test_sdk_exists(test_config):
        sys.exit(3)

    if not os.path.exists('config.py'):
        # 'metadata' and 'tmp' are created in fdroid
        if not os.path.exists('repo'):
            os.mkdir('repo')
        shutil.copy(os.path.join(examplesdir, 'fdroid-icon.png'), fdroiddir)
        shutil.copyfile(os.path.join(examplesdir, 'config.py'), 'config.py')
        os.chmod('config.py', 0o0600)
        # If android_home is None, test_config['sdk_path'] will be used and
        # "$ANDROID_HOME" may be used if the env var is set up correctly.
        # If android_home is not None, the path given from the command line
        # will be directly written in the config.
        if 'sdk_path' in test_config:
            common.write_to_config(test_config, 'sdk_path', options.android_home)
    else:
        logging.warn('Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...')
        logging.info('Try running `fdroid init` in an empty directory.')
        sys.exit()

    if 'aapt' not in test_config or not os.path.isfile(test_config['aapt']):
        # try to find a working aapt, in all the recent possible paths
        build_tools = os.path.join(test_config['sdk_path'], 'build-tools')
        aaptdirs = []
        aaptdirs.append(os.path.join(build_tools, test_config['build_tools']))
        aaptdirs.append(build_tools)
        for f in os.listdir(build_tools):
            if os.path.isdir(os.path.join(build_tools, f)):
                aaptdirs.append(os.path.join(build_tools, f))
        for d in sorted(aaptdirs, reverse=True):
            if os.path.isfile(os.path.join(d, 'aapt')):
                aapt = os.path.join(d, 'aapt')
                break
        if os.path.isfile(aapt):
            dirname = os.path.basename(os.path.dirname(aapt))
            if dirname == 'build-tools':
                # this is the old layout, before versioned build-tools
                test_config['build_tools'] = ''
            else:
                test_config['build_tools'] = dirname
            common.write_to_config(test_config, 'build_tools')
        common.ensure_build_tools_exists(test_config)

    # now that we have a local config.py, read configuration...
    config = common.read_config(options)

    # the NDK is optional and there may be multiple versions of it, so it's
    # left for the user to configure

    # find or generate the keystore for the repo signing key. First try the
    # path written in the default config.py.  Then check if the user has
    # specified a path from the command line, which will trump all others.
    # Otherwise, create ~/.local/share/fdroidserver and stick it in there.  If
    # keystore is set to NONE, that means that Java will look for keys in a
    # Hardware Security Module aka Smartcard.
    keystore = config['keystore']
    if options.keystore:
        keystore = os.path.abspath(options.keystore)
        if options.keystore == 'NONE':
            keystore = options.keystore
        else:
            keystore = os.path.abspath(options.keystore)
            if not os.path.exists(keystore):
                logging.info('"' + keystore
                             + '" does not exist, creating a new keystore there.')
    common.write_to_config(test_config, 'keystore', keystore)
    repo_keyalias = None
    if options.repo_keyalias:
        repo_keyalias = options.repo_keyalias
        common.write_to_config(test_config, 'repo_keyalias', repo_keyalias)
    if options.distinguished_name:
        keydname = options.distinguished_name
        common.write_to_config(test_config, 'keydname', keydname)
    if keystore == 'NONE':  # we're using a smartcard
        common.write_to_config(test_config, 'repo_keyalias', '1')  # seems to be the default
        disable_in_config('keypass', 'never used with smartcard')
        common.write_to_config(test_config, 'smartcardoptions',
                               ('-storetype PKCS11 -providerName SunPKCS11-OpenSC '
                                + '-providerClass sun.security.pkcs11.SunPKCS11 '
                                + '-providerArg opensc-fdroid.cfg'))
        # find opensc-pkcs11.so
        if not os.path.exists('opensc-fdroid.cfg'):
            if os.path.exists('/usr/lib/opensc-pkcs11.so'):
                opensc_so = '/usr/lib/opensc-pkcs11.so'
            elif os.path.exists('/usr/lib64/opensc-pkcs11.so'):
                opensc_so = '/usr/lib64/opensc-pkcs11.so'
            else:
                files = glob.glob('/usr/lib/' + os.uname()[4] + '-*-gnu/opensc-pkcs11.so')
                if len(files) > 0:
                    opensc_so = files[0]
                else:
                    opensc_so = '/usr/lib/opensc-pkcs11.so'
                    logging.warn('No OpenSC PKCS#11 module found, ' +
                                 'install OpenSC then edit "opensc-fdroid.cfg"!')
            with open(os.path.join(examplesdir, 'opensc-fdroid.cfg'), 'r') as f:
                opensc_fdroid = f.read()
            opensc_fdroid = re.sub('^library.*', 'library = ' + opensc_so, opensc_fdroid,
                                   flags=re.MULTILINE)
            with open('opensc-fdroid.cfg', 'w') as f:
                f.write(opensc_fdroid)
    elif not os.path.exists(keystore):
        password = common.genpassword()
        c = dict(test_config)
        c['keystorepass'] = password
        c['keypass'] = password
        c['repo_keyalias'] = socket.getfqdn()
        c['keydname'] = 'CN=' + c['repo_keyalias'] + ', OU=F-Droid'
        common.write_to_config(test_config, 'keystorepass', password)
        common.write_to_config(test_config, 'keypass', password)
        common.write_to_config(test_config, 'repo_keyalias', c['repo_keyalias'])
        common.write_to_config(test_config, 'keydname', c['keydname'])
        common.genkeystore(c)

    logging.info('Built repo based in "' + fdroiddir + '"')
    logging.info('with this config:')
    logging.info('  Android SDK:\t\t\t' + config['sdk_path'])
    if aapt:
        logging.info('  Android SDK Build Tools:\t' + os.path.dirname(aapt))
    logging.info('  Android NDK r12b (optional):\t$ANDROID_NDK')
    logging.info('  Keystore for signing key:\t' + keystore)
    if repo_keyalias is not None:
        logging.info('  Alias for key in store:\t' + repo_keyalias)
    logging.info('\nTo complete the setup, add your APKs to "' +
                 os.path.join(fdroiddir, 'repo') + '"' + '''
then run "fdroid update -c; fdroid update".  You might also want to edit
"config.py" to set the URL, repo name, and more.  You should also set up
a signing key (a temporary one might have been automatically generated).

For more info: https://f-droid.org/manual/fdroid.html#Simple-Binary-Repository
and https://f-droid.org/manual/fdroid.html#Signing
''')

Example 16

Project: fontlab-scripts Source File: convertToTTF.py
def processFonts(fontsList):
    totalFonts = len(fontsList)

    print "%d fonts found:\n%s\n" % (totalFonts, '\n'.join(fontsList))

    setType1openPrefs()
    setTTgeneratePrefs()
    setTTautohintPrefs()

    fontIndex = 1
    for pfaPath in fontsList:

        # Make temporary encoding file from GOADB file. This step needs to
        # be done per font, because the directory tree selected may contain
        # more than one family, or because the glyph set of a given family
        # may not be the same for both Roman/Upright and Italic/Sloped.
        encPath = None
        goadbPath = None

        # The GOADB can be located in the same folder or up to two
        # levels above in the directory tree
        sameLevel = os.path.join(os.path.dirname(pfaPath), kGOADBfileName)
        oneUp = os.path.join(
            os.path.dirname(os.path.dirname(pfaPath)), kGOADBfileName)
        twoUp = os.path.join(
            os.path.dirname(os.path.dirname(os.path.dirname(pfaPath))), kGOADBfileName)

        if os.path.exists(sameLevel):
            goadbPath = sameLevel
        elif os.path.exists(oneUp):
            goadbPath = oneUp
        elif os.path.exists(twoUp):
            goadbPath = twoUp

        if goadbPath:
            encPath = makeTempEncFileFromGOADB(goadbPath)
        else:
            print "Could not find %s file." % kGOADBfileName
            print "Skipping %s" % pfaPath
            print

        if not encPath:
            continue

        # Checking if a derivedchars file exists.
        # If not, the dvInput step is skipped.
        makeDV = False

        for file in os.listdir(os.path.split(pfaPath)[0]):
            if re.search(r'derivedchars(.+?)?$', file) and dvModuleFound:
                makeDV = True

        fontIsTXT = False
        fontIsUFO = False

        if kFontTXT in pfaPath:
            fontIsTXT = True
            pfaPath = convertTXTfontToPFA(pfaPath)

        elif kFontUFO in pfaPath or (pfaPath[-4:].lower() in [".ufo"]):
            # Support more than just files named "font.ufo"
            fontIsUFO = True
            pfaPath = convertUFOfontToPFA(pfaPath)

        fl.Open(pfaPath)
        print "\nProcessing %s ... (%d/%d)" % (
            fl.font.font_name, fontIndex, totalFonts)
        fontIndex += 1

        fontZonesWereReplaced = replaceFontZonesByFamilyZones()
        baselineZonesWereRemoved = removeBottomZonesAboveBaseline()

        # NOTE: After making changes to the PostScript alignment zones, the TT
        # equivalents have to be updated as well, but I couldn't find a way
        # to do it via scripting (because TTH.top_zones and TTH.bottom_zones
        # are read-only, and despite that functionality being available in
        # the UI, there's no native function to update TT zones from T1 zones).
        # So the solution is to generate a new T1 font and open it back.
        pfaPathTemp = pfaPath.replace('.pfa', '_TEMP_.pfa')
        infPathTemp = pfaPathTemp.replace('.pfa', '.inf')
        if baselineZonesWereRemoved or fontZonesWereReplaced:
            fl.GenerateFont(eval("ftTYPE1ASCII"), pfaPathTemp)
            fl[fl.ifont].modified = 0
            fl.Close(fl.ifont)
            fl.Open(pfaPathTemp)
            if os.path.exists(infPathTemp):
                # Delete the .INF file (bug in FL v5.1.x)
                os.remove(infPathTemp)

        # Load encoding file
        fl.font.encoding.Load(encPath)

        # Make sure the Font window is in 'Names mode'
        fl.CallCommand(fl_cmd.FontModeNames)

        # Sort glyphs by encoding
        fl.CallCommand(fl_cmd.FontSortByCodepage)

        # read derivedchars file, make components
        if makeDV:
            dvInput_module.run(verbose=False)

        convertT1toTT()
        changeTTfontSettings()

        # Switch the Font window to 'Index mode'
        fl.CallCommand(fl_cmd.FontModeIndex)

        # path to the folder containing the font, and the font's file name
        folderPath, fontFileName = os.path.split(pfaPath)
        ppmsFilePath = os.path.join(folderPath, kPPMsFileName)
        if os.path.exists(ppmsFilePath):
            hPPMs, vPPMs = readPPMsFile(ppmsFilePath)
            replaceStemsAndPPMs(hPPMs, vPPMs)

        tthintsFilePath = os.path.join(folderPath, kTTHintsFileName)
        if os.path.exists(tthintsFilePath):
            inputTTHints.run(folderPath)
            # readTTHintsFile(tthintsFilePath)
            # replaceTTHints()

        # FontLab 5.1.5 Mac Build 5714 does NOT respect the unchecked
        # option "Automatically add .null, CR and space characters"
        for gName in ["NULL", "CR"]:
            gIndex = fl.font.FindGlyph(gName)
            if gIndex != -1:
                del fl.font.glyphs[gIndex]

        vfbPath = pfaPath.replace('.pfa', '.vfb')
        fl.Save(vfbPath)

        # The filename of the TT output is hardcoded
        ttfPath = os.path.join(folderPath, kFontTTF)
        fl.GenerateFont(eval("ftTRUETYPE"), ttfPath)

        fl[fl.ifont].modified = 0
        fl.Close(fl.ifont)

        # The TT font generated with FontLab ends up with a few glyph names
        # changed. Fix the glyph names so that makeOTF does not fail.
        postProccessTTF(ttfPath)

        # Delete temporary Encoding file:
        if os.path.exists(encPath):
            os.remove(encPath)

        # Delete temp PFA:
        if os.path.exists(pfaPathTemp):
            os.remove(pfaPathTemp)

        # Cleanup after processing from TXT type1 font or UFO font
        if fontIsTXT or fontIsUFO:
            if os.path.exists(pfaPath):
                os.remove(pfaPath)
            if os.path.exists(ttfPath):
                finalTTFpath = ttfPath.replace('_TEMP_.ttf', '.ttf')
                if finalTTFpath != ttfPath:
                    if PC:
                        os.remove(finalTTFpath)
                    os.rename(ttfPath, finalTTFpath)

            if os.path.exists(vfbPath):
                finalVFBpath = vfbPath.replace('_TEMP_.vfb', '.vfb')
                if finalVFBpath != vfbPath:
                    if PC and os.path.exists(finalVFBpath):
                        os.remove(finalVFBpath)
                    os.rename(vfbPath, finalVFBpath)

            # remove FontLab leftovers
            pfmPath = pfaPathTemp.replace('.pfa', '.pfm')
            afmPath = pfaPathTemp.replace('.pfa', '.afm')
            if os.path.exists(pfmPath):
                os.remove(pfmPath)
            if os.path.exists(afmPath):
                os.remove(afmPath)

Example 17

Project: icaro Source File: icaro.py
Function: init
    def __init__(self, icaro_dir):

        # esta es la lista de donde se sacan los valores para los botones
        # icaro
        self.icaro_dir = icaro_dir
        arch = open(sys.path[0] + "/version", "r")
        version = arch.readline()

        creditos.Info.version = version
        self.carga_conf_ventana()
        # declaro la ventana principal
        # esta es la toolbar donde van los botones para cargar los datos
        # y compilar
        # declaro la tabla  donde van los botones para el menu de bloques
        # box1 es el contenedor principal despues de la ventana
        self.window1 = gtk.Window()

        titulo = "icaro " + version.strip("\n\r")
        self.window1.set_title(titulo)
        toolbar = gtk.Toolbar()
        self.area = gtk.DrawingArea()
        scrolled_window = gtk.ScrolledWindow()
        scrolled_window2 = gtk.ScrolledWindow()
        scrolled_window3 = gtk.ScrolledWindow()
        table = gtk.VBox(False, len(self.lista))
        notebook = gtk.Notebook()
        self.notebook2 = gtk.Notebook()
        hp = gtk.HPaned()
        box2 = gtk.HBox(False, 3)
        box1 = gtk.VBox(False, 3)
        menu_bar = gtk.MenuBar()

        # empaqueto todo
        # esta es la idea de usar un hpaned para poder achicar la pantalla
        #, en las netbook no entra todo
        self.window1.add(box1)
        box1.pack_start(menu_bar, False, True, 1)
        box1.pack_start(box2, True, True, 1)
        scrolled_window.add_with_viewport(self.area)
        scrolled_window3.add_with_viewport(toolbar)
        scrolled_window2.add_with_viewport(notebook)
        self.notebook2.append_page(scrolled_window, gtk.Label("bloques"))
        box2.pack_start(scrolled_window3, False, False, 1)
        box2.pack_start(hp, True, True, 1)
        hp.pack1(self.notebook2, True, True)
        hp.pack2(scrolled_window2, True, True)
        self.ver = visor.visor_codigo(self, self.notebook2)

        hp.set_position(500)
        self.window1.connect('delete-event', gtk.main_quit)
        self.window1.set_icon_from_file(
            sys.path[0] +
            "/imagenes/icaro.png"
        )
        self.area.set_app_paintable(True)
        self.area.set_size_request(800, 800)
        menu1 = [_("File"), _("Edit"), "herramientas"]
        menu_general = [
            (_("New"), _("Open"), _("Save"), _("Save as"),
             _("Save as function"), _("Examples"), _("Exit")),
            (_("Background"), _("Color"), _("About"), _("Config")),
            ("graficador", "clemente bulk", "clemente cdc"
             ,  _("Log"), "firmware",)
        ]
        menu_bar.show()
        # declaro los botones del menu 'menu'5 y 'edicion'
        for a in range(len(menu_general)):
            menu = gtk.Menu()
        # buf es donde se cargan todos los botones del menu
            for i in menu_general[a]:
                menu_items = gtk.MenuItem(i)
                menu.append(menu_items)
                menu_items.connect("activate", self.menuitem_response, i)
                menu_items.show()
            root_menu = gtk.MenuItem(menu1[a])
            root_menu.show()
            root_menu.set_submenu(menu)
            menu_bar.append(root_menu)

        # toolbar.append_item
        toolbar.set_style(gtk.TOOLBAR_BOTH_HORIZ)
        toolbar.set_orientation(gtk.ORIENTATION_VERTICAL)
        toolbar.show()

        # creo los botones de la toolbar
        botones_toolbar = [
            [1, toolbar, gtk.STOCK_NEW, "New",
             self.tooltip["nuevo"], self.nuevo, None],
            [1, toolbar, gtk.STOCK_OPEN, "Open",
             self.tooltip["abrir"], self.abrir, None],
            [1, toolbar, gtk.STOCK_SAVE, "Save",
             self.tooltip["guardar"], self.guardar, 0],
            [1, toolbar, gtk.STOCK_QUIT, "Quit",
             self.tooltip["salir"], self.salir, None],
            [3],
            [2, toolbar, sys.path[0] + "/imagenes/icaro.png",
             "Compile", self.tooltip["compilar"], self.compilar, None],
            [2, toolbar, sys.path[0] + "/imagenes/compilar.png",
             "Load", self.tooltip["cargar"], self.upload, None],
            [3],
            [2, toolbar, sys.path[0] + "/imagenes/tortucaro.png",
             "Tortucaro", self.tooltip["tortucaro"], self.comp_esp,
             "tortucaro/tortucaro"],

            [2, toolbar, sys.path[0] + "/imagenes/pilas.png",
             "pilas", self.tooltip["pilas"], self.comp_esp,
             "pilas/pilas-engine"],
            [2, toolbar, sys.path[0] + "/imagenes/icaroblue.png",
             "icaroblue", self.tooltip["icaroblue"], self.comp_esp,
             "icaroblue/icaroblue"],
            [3],
            [1, toolbar, gtk.STOCK_HELP, "Help",
             self.tooltip["ayuda"], self.ayuda, None],
            [3],
            [1, toolbar, gtk.STOCK_ADD, "Pen",
             self.tooltip["lapiz"], self.dibujo, 1],
            [1, toolbar, gtk.STOCK_SELECT_COLOR, "Move",
             self.tooltip["mover"], self.dibujo, 2],
            [1, toolbar, gtk.STOCK_DELETE, "Erase",
             self.tooltip["borrar"], self.dibujo, 3],
            [1, toolbar, gtk.STOCK_EDIT,
             "Edit", "", self.dibujo, 4],
            [3],
            [1, toolbar, gtk.STOCK_ZOOM_IN, "agrandar",
             "", self.menuitem_response, "zoomas"],
            [1, toolbar, gtk.STOCK_ZOOM_OUT, "achicar",
             "", self.menuitem_response, "zoomenos"],
            [1, toolbar, gtk.STOCK_ZOOM_100, "zoom 1:1",
             "", self.menuitem_response, "zoomcero"],
        ]

        # creo los botones de la toolbar en funcion de la tupla botonas_toolbar
        for dat in botones_toolbar:
            if dat[0] == 3:
                toolbar.append_space()
            if dat[0] == 1 or dat[0] == 2:
                self.crear_toolbuttons(
                    dat[0], dat[1], dat[2], dat[3], dat[4], dat[5], dat[6])

        scrolled_window.set_size_request(300, 300)
        scrolled_window.set_policy(gtk.POLICY_ALWAYS, gtk.POLICY_ALWAYS)
        scrolled_window.show()
        scrolled_window2.set_border_width(1)
        scrolled_window2.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
        scrolled_window2.show()
        scrolled_window3.set_border_width(1)
        scrolled_window3.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
        scrolled_window3.show()
        notebook.set_tab_pos(gtk.POS_RIGHT)
        label = gtk.Label(self.diccionario[self.lista[0]][1])
        notebook.append_page(table, label)
        button = gtk.RadioButton()
        ## aca cargo los datos de cada bloque ##
        for i in range(1, len(self.lista)):
            if self.diccionario[self.lista[i]][0] == "notebook":
                table = gtk.VBox(False, len(self.lista))
                label = gtk.Label(self.diccionario[self.lista[i]][1])
                notebook.append_page(table, label)
            else:
                self.diccionario[self.lista[i]][0]
                caja = self.imagen_boton(
                    self.diccionario[self.lista[i]][0],
                    self.diccionario[self.lista[i]][0]
                )
                button = gtk.RadioButton(button)
                button.set_tooltip_text(self.diccionario[self.lista[i]][6])
                button.add(caja)
                button.connect("clicked", self.botones, self.lista[i])
                table.pack_start(button, False, True, 0)
                button.show()

        # capturo los eventos del drawing area
        # menos el teclado que lo capturo desde la ventana principal
        self.area.add_events(gtk.gdk.BUTTON_PRESS_MASK)
        self.area.add_events(gtk.gdk.BUTTON_RELEASE_MASK)
        self.area.add_events(gtk.gdk.POINTER_MOTION_MASK)
        self.window1.add_events(gtk.gdk.KEY_PRESS_MASK)
        self.window1.add_events(gtk.gdk.KEY_RELEASE_MASK)
        self.area.connect("button-press-event", self.buttonpress_cb)
        self.area.connect("button-release-event", self.buttonrelease_cb)
        self.area.connect("motion-notify-event", self.move_cb)
        self.area.connect("expose-event", self.expose)
        self.window1.connect("key_press_event", self.keypress_cb)
        self.window1.connect("key_release_event", self.keyrelease_cb)
        self.area.realize()
        display = self.area.window.get_display()
        pixbuf = gtk.gdk.pixbuf_new_from_file(
            os.path.abspath(os.path.dirname(__file__)) + "/imagenes/mouse/lapiz.png")
        lapiz = gtk.gdk.Cursor(display, pixbuf, 6, 18)
        self.cursores.append(lapiz)
        pixbuf = gtk.gdk.pixbuf_new_from_file(
            os.path.abspath(os.path.dirname(__file__)) + "/imagenes/mouse/puntero.png")
        puntero = gtk.gdk.Cursor(display, pixbuf, 6, 18)
        self.cursores.append(puntero)
        pixbuf = gtk.gdk.pixbuf_new_from_file(
            os.path.abspath(os.path.dirname(__file__)) + "/imagenes/mouse/borrar.png")
        borrar = gtk.gdk.Cursor(display, pixbuf, 6, 18)
        self.cursores.append(borrar)
        pixbuf = gtk.gdk.pixbuf_new_from_file(
            os.path.abspath(os.path.dirname(__file__)) + "/imagenes/mouse/edicion.png")
        edicion = gtk.gdk.Cursor(display, pixbuf, 6, 18)
        self.cursores.append(edicion)
        self.definir_cursor(1)

Example 18

Project: python-documentcloud Source File: test_all.py
    def test_private_actions(self):
        """
        Test all the stuff that requires a login.
        """
        # Get an editable docuement
        obj_id = self.get_editable_docuement(self.version)
        obj = self.private_client.docuements.get(obj_id)

        # Make sure `data` attribute will only accept a dictionary.
        obj.data = dict(foo='bar')
        self.assertRaises(TypeError, obj.set_data, "string")
        self.assertRaises(TypeError, obj.set_data, 666)
        self.assertRaises(TypeError, obj.set_data, obj)

        # Test whether we can put random noise to all the editable fields.
        title = get_random_string()
        source = get_random_string()
        description = get_random_string()
        data = {get_random_string(): get_random_string()}
        if obj.resources.related_article == 'http://docuements.latimes.com':
            related_article = 'http://docuementcloud.org'
        else:
            related_article = 'http://docuements.latimes.com'
        if obj.resources.published_url == 'http://docuements.latimes.com':
            published_url = 'http://docuementcloud.org'
        else:
            published_url = 'http://docuements.latimes.com'
        obj.title = title
        obj.source = source
        obj.description = description
        obj.data = data
        obj.resources.related_article = related_article
        obj.resources.published_url = published_url

        # Save the changes up to DocuementCloud
        obj.put()

        # Pull the object again and verify the changes stuck
        obj = self.private_client.docuements.get(obj_id)
        self.assertEqual(obj.title, title)
        self.assertEqual(obj.source, source)
        self.assertEqual(obj.description, description)
        self.assertEqual(obj.data, data)
        self.assertEqual(obj.resources.related_article, related_article)
        self.assertEqual(obj.resources.published_url, published_url)

        # Test reserved namespaces to make sure they're protected
        black_list = [
            'person', 'organization', 'place', 'term', 'email', 'phone',
            'city', 'state', 'country', 'title', 'description', 'source',
            'account', 'group', 'project', 'projectid', 'docuement', 'access',
            'filter',
        ]
        for key in black_list:
            self.assertRaises(ValueError, setattr, obj, "data", {key: 'foo'})
        obj.data = dict(boom='bap')

        # Test to make sure non-strings can't get into the data dictionary
        with self.assertRaises(TypeError):
            obj.data = dict(a=1)

        with self.assertRaises(TypeError):
            obj.data = {1: 'a'}

        obj.data = dict(boom='bap')
        with self.assertRaises(TypeError):
            obj.data[1] = 2

        # Resources
        self.assertEqual(obj.published_url, obj.resources.published_url)
        self.assertEqual(obj.related_article, obj.resources.related_article)

        # And their shortcuts
        obj.published_url = 'http://latimes.com'
        obj.related_article = 'http://palewi.re'
        self.assertEqual(obj.published_url, obj.resources.published_url)
        self.assertEqual(obj.related_article, obj.resources.related_article)

        # Test whether the save method properly aliases `put`.
        title = get_random_string()
        obj.title = title
        obj.save()
        obj = self.private_client.docuements.get(obj_id)
        self.assertEqual(obj.title, title)

        # Test whether you can save an attribute with some weird encoding
        before_title = copy(obj.title)
        before_description = copy(obj.description)
        obj.title = random.choice(list(PANGRAMS.keys()))
        obj.description = random.choice(list(PANGRAMS.keys()))
        obj.put()
        obj.title = before_title
        obj.description = before_description
        obj.put()

        # Upload
        title = get_random_string()
        obj = self.private_client.docuements.upload(
            os.path.join(os.path.dirname(__file__), "test.pdf"),
            title,
            description='Blah blah',
            related_article='http://www.latimes.com',
            data=dict(like_this='like+that', boom='bap'),
        )
        self.assertTrue(isinstance(obj, Docuement))
        self.assertEqual(obj.title, title)
        self.assertEqual(obj.description, 'Blah blah')
        self.assertEqual(obj.related_article, 'http://www.latimes.com')
        self.assertEqual(
            obj.data,
            {u'like_this': u'like+that', u'boom': u'bap'}
        )

        # Delete
        obj.delete()
        self.assertRaises(
            DoesNotExistError,
            self.private_client.docuements.get,
            obj.id
        )

        # Test upload with bad keyword
        title = '001 - Test upload (%s)' % get_random_string()
        self.assertRaises(
            ValueError,
            self.private_client.docuements.upload,
            os.path.join(os.path.dirname(__file__), "test.pdf"),
            title,
            description='Blah blah',
            related_article='http://www.latimes.com',
            # Access is an reserved keyword so this should fail
            data=dict(access='this', boom='bap'),
        )

        # Upload with a file object, not a path
        title = get_random_string()
        obj = self.private_client.docuements.upload(
            open(os.path.join(os.path.dirname(__file__), "test.pdf"), "rb"),
            title,
        )
        self.assertTrue(isinstance(obj, Docuement))
        self.assertEqual(obj.title, title)
        obj.delete()

        # Ensure that docuements with non-english characters can be uploaded
        pdf = os.path.join(os.path.dirname(__file__), "español.pdf")
        obj = self.private_client.docuements.upload(open(pdf, 'rb'))
        self.assertTrue(isinstance(obj, Docuement))
        obj.delete()

        # Test virtual file upload and delete
        path = os.path.join(os.path.dirname(__file__), "español.pdf")
        real_file = open(path, 'rb')
        if six.PY3:
            virtual_file = io.BytesIO(real_file.read())
        else:
            virtual_file = io.StringIO(real_file.read())
        obj = self.private_client.docuements.upload(
            virtual_file,
            title='Espanola!'
        )
        self.assertTrue(isinstance(obj, Docuement))
        obj.delete()

        # Test secure upload
        title = get_random_string()
        obj = self.private_client.docuements.upload(
            os.path.join(os.path.dirname(__file__), "test.pdf"),
            title,
            secure=True,
        )
        self.assertTrue(isinstance(obj, Docuement))
        obj.delete()

        # Upload everything in this directory.
        obj_list = self.private_client.docuements.upload_directory(
            './',
            source='Los Angeles Times',
            published_url='http://www.latimes.com',
        )
        self.assertEqual(len(obj_list), 2)
        self.assertTrue(isinstance(obj_list[0], Docuement))
        self.assertEqual(obj_list[0].source, 'Los Angeles Times')
        self.assertEqual(obj_list[0].published_url, 'http://www.latimes.com')
        [i.delete() for i in obj_list]

        # Test URL upload
        url = 'http://ord.legistar.com/Chicago/attachments/e3a0cbcb-044d-4ec3-9848-23c5692b1943.pdf'
        obj = self.private_client.docuements.upload(url)
        obj.delete()

Example 19

Project: neural-network-animation Source File: plot_directive.py
def run(arguments, content, options, state_machine, state, lineno):
    # The user may provide a filename *or* Python code content, but not both
    if arguments and content:
        raise RuntimeError("plot:: directive can't have both args and content")

    docuement = state_machine.docuement
    config = docuement.settings.env.config
    nofigs = 'nofigs' in options

    options.setdefault('include-source', config.plot_include_source)
    context = 'context' in options
    context_reset = True if (context and options['context'] == 'reset') else False

    rst_file = docuement.attributes['source']
    rst_dir = os.path.dirname(rst_file)

    if len(arguments):
        if not config.plot_basedir:
            source_file_name = os.path.join(setup.app.builder.srcdir,
                                            directives.uri(arguments[0]))
        else:
            source_file_name = os.path.join(setup.confdir, config.plot_basedir,
                                            directives.uri(arguments[0]))

        # If there is content, it will be passed as a caption.
        caption = '\n'.join(content)

        # If the optional function name is provided, use it
        if len(arguments) == 2:
            function_name = arguments[1]
        else:
            function_name = None

        with io.open(source_file_name, 'r', encoding='utf-8') as fd:
            code = fd.read()
        output_base = os.path.basename(source_file_name)
    else:
        source_file_name = rst_file
        code = textwrap.dedent("\n".join(map(str, content)))
        counter = docuement.attributes.get('_plot_counter', 0) + 1
        docuement.attributes['_plot_counter'] = counter
        base, ext = os.path.splitext(os.path.basename(source_file_name))
        output_base = '%s-%d.py' % (base, counter)
        function_name = None
        caption = ''

    base, source_ext = os.path.splitext(output_base)
    if source_ext in ('.py', '.rst', '.txt'):
        output_base = base
    else:
        source_ext = ''

    # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
    output_base = output_base.replace('.', '-')

    # is it in doctest format?
    is_doctest = contains_doctest(code)
    if 'format' in options:
        if options['format'] == 'python':
            is_doctest = False
        else:
            is_doctest = True

    # determine output directory name fragment
    source_rel_name = relpath(source_file_name, setup.confdir)
    source_rel_dir = os.path.dirname(source_rel_name)
    while source_rel_dir.startswith(os.path.sep):
        source_rel_dir = source_rel_dir[1:]

    # build_dir: where to place output files (temporarily)
    build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
                             'plot_directive',
                             source_rel_dir)
    # get rid of .. in paths, also changes pathsep
    # see note in Python docs for warning about symbolic links on Windows.
    # need to compare source and dest paths at end
    build_dir = os.path.normpath(build_dir)

    if not os.path.exists(build_dir):
        os.makedirs(build_dir)

    # output_dir: final location in the builder's directory
    dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
                                            source_rel_dir))
    if not os.path.exists(dest_dir):
        os.makedirs(dest_dir) # no problem here for me, but just use built-ins

    # how to link to files from the RST file
    dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir),
                                 source_rel_dir).replace(os.path.sep, '/')
    build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
    source_link = dest_dir_link + '/' + output_base + source_ext

    # make figures
    try:
        results = render_figures(code, source_file_name, build_dir, output_base,
                                 context, function_name, config,
                                 context_reset=context_reset)
        errors = []
    except PlotError as err:
        reporter = state.memo.reporter
        sm = reporter.system_message(
            2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
                                                source_file_name, err),
            line=lineno)
        results = [(code, [])]
        errors = [sm]

    # Properly indent the caption
    caption = '\n'.join('      ' + line.strip()
                        for line in caption.split('\n'))

    # generate output restructuredtext
    total_lines = []
    for j, (code_piece, images) in enumerate(results):
        if options['include-source']:
            if is_doctest:
                lines = ['']
                lines += [row.rstrip() for row in code_piece.split('\n')]
            else:
                lines = ['.. code-block:: python', '']
                lines += ['    %s' % row.rstrip()
                          for row in code_piece.split('\n')]
            source_code = "\n".join(lines)
        else:
            source_code = ""

        if nofigs:
            images = []

        opts = [':%s: %s' % (key, val) for key, val in six.iteritems(options)
                if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]

        only_html = ".. only:: html"
        only_latex = ".. only:: latex"
        only_texinfo = ".. only:: texinfo"

        # Not-None src_link signals the need for a source link in the generated
        # html
        if j == 0 and config.plot_html_show_source_link:
            src_link = source_link
        else:
            src_link = None

        result = format_template(
            config.plot_template or TEMPLATE,
            dest_dir=dest_dir_link,
            build_dir=build_dir_link,
            source_link=src_link,
            multi_image=len(images) > 1,
            only_html=only_html,
            only_latex=only_latex,
            only_texinfo=only_texinfo,
            options=opts,
            images=images,
            source_code=source_code,
            html_show_formats=config.plot_html_show_formats and not nofigs,
            caption=caption)

        total_lines.extend(result.split("\n"))
        total_lines.extend("\n")

    if total_lines:
        state_machine.insert_input(total_lines, source=source_file_name)

    # copy image files to builder's output directory, if necessary
    if not os.path.exists(dest_dir):
        cbook.mkdirs(dest_dir)

    for code_piece, images in results:
        for img in images:
            for fn in img.filenames():
                destimg = os.path.join(dest_dir, os.path.basename(fn))
                if fn != destimg:
                    shutil.copyfile(fn, destimg)

    # copy script (if necessary)
    target_name = os.path.join(dest_dir, output_base + source_ext)
    with io.open(target_name, 'w', encoding="utf-8") as f:
        if source_file_name == rst_file:
            code_escaped = unescape_doctest(code)
        else:
            code_escaped = code
        f.write(code_escaped)

    return errors

Example 20

Project: robothon Source File: setup.py
Function: configuration
def configuration(parent_package='',top_path=None):
    from numpy.distutils.misc_util import Configuration,dot_join
    from numpy.distutils.system_info import get_info, default_lib_dirs

    config = Configuration('core',parent_package,top_path)
    local_dir = config.local_path
    codegen_dir = join(local_dir,'code_generators')

    generate_umath_py = join(codegen_dir,'generate_umath.py')
    n = dot_join(config.name,'generate_umath')
    generate_umath = imp.load_module('_'.join(n.split('.')),
                                     open(generate_umath_py,'U'),generate_umath_py,
                                     ('.py','U',1))

    header_dir = 'include/numpy' # this is relative to config.path_in_package

    def generate_config_h(ext, build_dir):
        target = join(build_dir,header_dir,'config.h')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        if newer(__file__,target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s',target)
            tc = generate_testcode(target)
            from distutils import sysconfig
            python_include = sysconfig.get_python_inc()
            python_h = join(python_include, 'Python.h')
            if not os.path.isfile(python_h):
                raise SystemError,\
                      "Non-existing %s. Perhaps you need to install"\
                      " python-dev|python-devel." % (python_h)
            result = config_cmd.try_run(tc,include_dirs=[python_include],
                                        library_dirs = default_lib_dirs)
            if not result:
                raise SystemError,"Failed to test configuration. "\
                      "See previous error messages for more information."

            moredefs = []
            #
            mathlibs = []
            tc = testcode_mathlib()
            mathlibs_choices = [[],['m'],['cpml']]
            mathlib = os.environ.get('MATHLIB')
            if mathlib:
                mathlibs_choices.insert(0,mathlib.split(','))
            for libs in mathlibs_choices:
                if config_cmd.try_run(tc,libraries=libs):
                    mathlibs = libs
                    break
            else:
                raise EnvironmentError("math library missing; rerun "
                                       "setup.py after setting the "
                                       "MATHLIB env variable")
            ext.libraries.extend(mathlibs)
            moredefs.append(('MATHLIB',','.join(mathlibs)))

            def check_func(func_name):
                return config_cmd.check_func(func_name,
                                             libraries=mathlibs, decl=False,
                                             headers=['math.h'])

            for func_name, defsymbol in FUNCTIONS_TO_CHECK:
                if check_func(func_name):
                    moredefs.append(defsymbol)

            if is_npy_no_signal():
                moredefs.append('__NPY_PRIVATE_NO_SIGNAL')

            if sys.platform=='win32' or os.name=='nt':
                from numpy.distutils.misc_util import get_build_architecture
                a = get_build_architecture()
                print 'BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' % (a, os.name, sys.platform)
                if a == 'AMD64':
                    moredefs.append('DISTUTILS_USE_SDK')

            if sys.version[:3] < '2.4':
                if config_cmd.check_func('strtod', decl=False,
                                         headers=['stdlib.h']):
                    moredefs.append(('PyOS_ascii_strtod', 'strtod'))

            target_f = open(target,'a')
            for d in moredefs:
                if isinstance(d,str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0],d[1]))
            target_f.close()
            cmd_ = 'ed - %s < /SourceCache/python_modules/python_modules-21/numpy/config.h.ed' % target
            print cmd_
            os.system(cmd_)
            print 'File:',target
            target_f = open(target)
            print target_f.read()
            target_f.close()
            print 'EOF'
        else:
            mathlibs = []
            target_f = open(target)
            for line in target_f.readlines():
                s = '#define MATHLIB'
                if line.startswith(s):
                    value = line[len(s):].strip()
                    if value:
                        mathlibs.extend(value.split(','))
            target_f.close()

        ext.libraries.extend(mathlibs)

        incl_dir = os.path.dirname(target)
        if incl_dir not in config.numpy_include_dirs:
            config.numpy_include_dirs.append(incl_dir)

        return target

    def generate_numpyconfig_h(ext, build_dir):
        """Depends on config.h: generate_config_h has to be called before !"""
        target = join(build_dir,header_dir,'numpyconfig.h')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        if newer(__file__,target):
            config_cmd = config.get_config_cmd()
            log.info('Generating %s',target)
            testcode = generate_numpyconfig_code(target)

            from distutils import sysconfig
            python_include = sysconfig.get_python_inc()
            python_h = join(python_include, 'Python.h')
            if not os.path.isfile(python_h):
                raise SystemError,\
                      "Non-existing %s. Perhaps you need to install"\
                      " python-dev|python-devel." % (python_h)

            config.numpy_include_dirs
            result = config_cmd.try_run(testcode,
                                include_dirs = [python_include] + \
                                                       config.numpy_include_dirs,
                                        library_dirs = default_lib_dirs)

            if not result:
                raise SystemError,"Failed to generate numpy configuration. "\
                      "See previous error messages for more information."

            moredefs = []

            # Check wether we can use inttypes (C99) formats
            if config_cmd.check_decl('PRIdPTR', headers = ['inttypes.h']):
                moredefs.append(('NPY_USE_C99_FORMATS', 1))
            else:
                moredefs.append(('NPY_USE_C99_FORMATS', 0))

            # Add moredefs to header
            target_f = open(target,'a')
            for d in moredefs:
                if isinstance(d,str):
                    target_f.write('#define %s\n' % (d))
                else:
                    target_f.write('#define %s %s\n' % (d[0],d[1]))

            # Define __STDC_FORMAT_MACROS
            target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
            target_f.close()

            # Dump the numpyconfig.h header to stdout
            cmd_ = 'ed - %s < /SourceCache/python_modules/python_modules-21/numpy/numpyconfig.h.ed' % target
            print cmd_
            os.system(cmd_)
            print 'File: %s' % target
            target_f = open(target)
            print target_f.read()
            target_f.close()
            print 'EOF'
        config.add_data_files((header_dir, target))
        return target

    def generate_api_func(module_name):
        def generate_api(ext, build_dir):
            script = join(codegen_dir, module_name + '.py')
            sys.path.insert(0, codegen_dir)
            try:
                m = __import__(module_name)
                log.info('executing %s', script)
                h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir))
            finally:
                del sys.path[0]
            config.add_data_files((header_dir, h_file),
                                  (header_dir, doc_file))
            return (h_file,)
        return generate_api

    generate_numpy_api = generate_api_func('generate_numpy_api')
    generate_ufunc_api = generate_api_func('generate_ufunc_api')

    def generate_umath_c(ext,build_dir):
        target = join(build_dir,header_dir,'__umath_generated.c')
        dir = os.path.dirname(target)
        if not os.path.exists(dir):
            os.makedirs(dir)
        script = generate_umath_py
        if newer(script,target):
            f = open(target,'w')
            f.write(generate_umath.make_code(generate_umath.defdict,
                                             generate_umath.__file__))
            f.close()
        return []

    config.add_data_files('include/numpy/*.h')
    config.add_include_dirs('src')

    config.numpy_include_dirs.extend(config.paths('include'))

    deps = [join('src','arrayobject.c'),
            join('src','arraymethods.c'),
            join('src','scalartypes.inc.src'),
            join('src','arraytypes.inc.src'),
            join('src','_signbit.c'),
            join('src','_isnan.c'),
            join('src','ucsnarrow.c'),
            join('include','numpy','*object.h'),
            'include/numpy/fenv/fenv.c',
            'include/numpy/fenv/fenv.h',
            join(codegen_dir,'genapi.py'),
            join(codegen_dir,'*.txt')
            ]

    # Don't install fenv unless we need them.
    if sys.platform == 'cygwin':
        config.add_data_dir('include/numpy/fenv')

    config.add_extension('multiarray',
                         sources = [join('src','multiarraymodule.c'),
                                    generate_config_h,
                                    generate_numpyconfig_h,
                                    generate_numpy_api,
                                    join('src','scalartypes.inc.src'),
                                    join('src','arraytypes.inc.src'),
                                    join(codegen_dir,'generate_numpy_api.py'),
                                    join('*.py')
                                    ],
                         depends = deps,
                         )

    config.add_extension('umath',
                         sources = [generate_config_h,
                                    generate_numpyconfig_h,
                                    join('src','umathmodule.c.src'),
                                    generate_umath_c,
                                    generate_ufunc_api,
                                    join('src','scalartypes.inc.src'),
                                    join('src','arraytypes.inc.src'),
                                    ],
                         depends = [join('src','ufuncobject.c'),
                                    generate_umath_py,
                                    join(codegen_dir,'generate_ufunc_api.py'),
                                    ]+deps,
                         )

    config.add_extension('_sort',
                         sources=[join('src','_sortmodule.c.src'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  ],
                         )

    config.add_extension('scalarmath',
                         sources=[join('src','scalarmathmodule.c.src'),
                                  generate_config_h,
                                  generate_numpyconfig_h,
                                  generate_numpy_api,
                                  generate_ufunc_api],
                         )

    # Configure blasdot
    blas_info = get_info('blas_opt',0)
    #blas_info = {}
    def get_dotblas_sources(ext, build_dir):
        if blas_info:
            if ('NO_ATLAS_INFO',1) in blas_info.get('define_macros',[]):
                return None # dotblas needs ATLAS, Fortran compiled blas will not be sufficient.
            return ext.depends[:1]
        return None # no extension module will be built

    config.add_extension('_dotblas',
                         sources = [get_dotblas_sources],
                         depends=[join('blasdot','_dotblas.c'),
                                  join('blasdot','cblas.h'),
                                  ],
                         include_dirs = ['blasdot'],
                         extra_info = blas_info
                         )


    config.add_data_dir('tests')
    config.add_data_dir('tests/data')

    config.make_svn_version_py()

    return config

Example 21

Project: ganga Source File: AthenaLocalRTHandler.py
    def master_prepare( self, app, appconfig ):
        """Prepare the master job"""
        
        job = app._getParent() # Returns job or subjob object

        logger.debug("AthenaLocalRTHandler master_prepare called, %s", job.id)

        if job._getRoot().subjobs:
            jobid = "%d" % (job._getRoot().id)
        else:
            jobid = "%d" % job.id

        # Generate output dataset name
        if job.outputdata:
            if job.outputdata._name=='DQ2OutputDataset':
                dq2_datasetname = job.outputdata.datasetname
                dq2_isGroupDS = job.outputdata.isGroupDS
                dq2_groupname = job.outputdata.groupname
            else:
                dq2_datasetname = ''
                dq2_isGroupDS = False
                dq2_groupname = ''
            self.output_datasetname, self.output_lfn = dq2outputdatasetname(dq2_datasetname, jobid, dq2_isGroupDS, dq2_groupname)

        # Expand Athena jobOptions
        if not app.option_file and not app.command_line:
            raise ConfigError("j.application.option_file='' - No Athena jobOptions files specified.")

        athena_options = ''
        inputbox = [File(os.path.join(os.path.dirname(__file__),'athena-utility.sh'))]
        if app.atlas_exetype in ['PYARA','ARES','ROOT','EXE']:

            for option_file in app.option_file:
                athena_options += ' ' + os.path.basename(option_file.name)
                inputbox += [ File(option_file.name) ]

            athena_options += ' %s ' % app.options

        else:
            for option_file in app.option_file:
                athena_option = os.path.basename(option_file.name)
                athena_options += ' ' + athena_option
                if app.options:
                    athena_options =  app.options + ' ' + athena_options
                inputbox += [ File(option_file.name) ]


            if app.command_line:
                athena_options = app.command_line

        athena_usersetupfile = os.path.basename(app.user_setupfile.name)

#       prepare input sandbox

        if app.user_setupfile.name: inputbox += [ File(app.user_setupfile.name) ]
        #CN: added extra test for TNTJobSplitter
        if job.inputdata and job.inputdata._name in [ 'DQ2Dataset', 'ATLASTier3Dataset'] or (job._getRoot().splitter and job._getRoot().splitter._name == 'TNTJobSplitter'):
            _append_files(inputbox,'ganga-stage-in-out-dq2.py')
            _append_files(inputbox,'dq2_get')
            _append_files(inputbox,'dq2info.tar.gz')
            _append_files(inputbox,'libdcap.so')

        if job.inputdata and job.inputdata._name == 'ATLASDataset':
            if job.inputdata.lfc:
                _append_files(inputbox,'ganga-stagein-lfc.py')
            else:
                _append_files(inputbox,'ganga-stagein.py')

        ## insert more scripts to inputsandbox for FileStager
        if job.inputdata and job.inputdata._name in [ 'DQ2Dataset' ] and job.inputdata.type in ['FILE_STAGER']:
            _append_files(inputbox,'make_filestager_joption.py','dm_util.py','fs-copy.py')

        if not 'getstats.py' in [ os.path.basename(file.name) for file in inputbox ]:
            _append_files(inputbox, 'getstats.py')

        if job.outputdata and job.outputdata._name == 'DQ2OutputDataset':
            if not job.outputdata.location:
                raise ApplicationConfigurationError(None,'j.outputdata.location is empty - Please specify a DQ2 output location - job not submitted !')
            if not File(os.path.join(os.path.dirname(__file__),'ganga-stage-in-out-dq2.py')) in inputbox:
                _append_files(inputbox,'ganga-stage-in-out-dq2.py')
                _append_files(inputbox,'dq2info.tar.gz')
                _append_files(inputbox,'libdcap.so')
            _append_files(inputbox,'ganga-joboption-parse.py')

        if job.inputsandbox:
            for file in job.inputsandbox:
                inputbox += [ file ]
        if app.user_area.name:
            if app.is_prepared is True:
                inputbox += [ File(app.user_area.name) ] 
            else:
                inputbox += [ File(os.path.join(os.path.join(shared_path,app.is_prepared.name),os.path.basename(app.user_area.name))) ]
        if app.group_area.name and string.find(app.group_area.name,"http")<0:
            if app.is_prepared is True:
                inputbox += [ File(app.group_area.name) ] 
            else:
                inputbox += [ File(os.path.join(os.path.join(shared_path,app.is_prepared.name),os.path.basename(app.group_area.name))) ]
   
#       prepare environment

        try:
            atlas_software = config['ATLAS_SOFTWARE']
        except ConfigError:
            raise ConfigError('No default location of ATLAS_SOFTWARE specified in the configuration.')

        if app.atlas_release=='' and app.atlas_project != "AthAnalysisBase":
            raise ApplicationConfigurationError(None,'j.application.atlas_release is empty - No ATLAS release version found. Run prepare() or specify a version explictly.')
      
        environment={ 
            'ATLAS_RELEASE' : app.atlas_release,
            'ATHENA_OPTIONS' : athena_options,
            'ATLAS_SOFTWARE' : atlas_software,
            'ATHENA_USERSETUPFILE' : athena_usersetupfile,
            'ATLAS_PROJECT' : app.atlas_project,
            'ATLAS_EXETYPE' : app.atlas_exetype,
            'GANGA_VERSION' : configSystem['GANGA_VERSION'],
            'DQ2_SETUP_SCRIPT': configDQ2['setupScript']
        }

        # Set athena architecture: 32 or 64 bit
        environment['ATLAS_ARCH'] = '32'
        cmtconfig = app.atlas_cmtconfig
        if cmtconfig.find('x86_64')>=0:
            environment['ATLAS_ARCH'] = '64'

        environment['ATLAS_CMTCONFIG'] = app.atlas_cmtconfig
        environment['DCACHE_RA_BUFFER'] = str(config['DCACHE_RA_BUFFER'])
        
        if app.atlas_environment:
            for var in app.atlas_environment:
                vars=var.split('=')
                if len(vars)==2:
                    environment[vars[0]]=vars[1]

        if app.atlas_production and (app.atlas_project == 'AtlasPoint1' or app.atlas_release.find('12.')<=0):
            environment['ATLAS_PRODUCTION'] = app.atlas_production 

        if app.user_area.name: 
            environment['USER_AREA'] = os.path.basename(app.user_area.name)
        if app.group_area.name:
            if string.find(app.group_area.name,"http")>=0:
                environment['GROUP_AREA_REMOTE'] = "%s" % (app.group_area.name)
            else:
                environment['GROUP_AREA']=os.path.basename(app.group_area.name)

        if app.max_events:
            if (app.max_events != -999) and (app.max_events > -2):
                environment['ATHENA_MAX_EVENTS'] = str(app.max_events)

        if job.inputdata and job.inputdata._name == 'StagerDataset':

            if job.inputdata.type not in ['LOCAL']:

                try:
                    environment['X509CERTDIR']=os.environ['X509_CERT_DIR']
                except KeyError:
                    environment['X509CERTDIR']=''

                try:
                    proxy = os.environ['X509_USER_PROXY']
                except KeyError:
                    proxy = '/tmp/x509up_u%s' % os.getuid()

                REMOTE_PROXY = '%s:%s' % (socket.getfqdn(),proxy)
                environment['REMOTE_PROXY'] = REMOTE_PROXY

                try:
                    environment['GANGA_GLITE_UI']=configLCG['GLITE_SETUP']
                except:
                    pass

        if job.inputdata and job.inputdata._name == 'DQ2Dataset':
            if job.inputdata.dataset:
                datasetname = job.inputdata.dataset
                environment['DATASETNAME']=':'.join(datasetname)
                environment['DATASETLOCATION'] = ':'.join(job.inputdata.get_locations())
                environment['DQ2_URL_SERVER']=configDQ2['DQ2_URL_SERVER']
                environment['DQ2_URL_SERVER_SSL']=configDQ2['DQ2_URL_SERVER_SSL']
                #environment['DATASETTYPE']=job.inputdata.type
                # At present, DQ2 download is the only thing that works
                environment['DATASETTYPE']="DQ2_DOWNLOAD"
                if job.inputdata.accessprotocol:
                    environment['DQ2_LOCAL_PROTOCOL'] = job.inputdata.accessprotocol                

                try:
                    environment['X509CERTDIR']=os.environ['X509_CERT_DIR']
                except KeyError:
                    environment['X509CERTDIR']=''

                try:
                    proxy = os.environ['X509_USER_PROXY']
                except KeyError:
                    proxy = '/tmp/x509up_u%s' % os.getuid()

                REMOTE_PROXY = '%s:%s' % (socket.getfqdn(),proxy)
                environment['REMOTE_PROXY'] = REMOTE_PROXY
                try:
                    environment['GANGA_GLITE_UI']=configLCG['GLITE_SETUP']
                except:
                    pass

            else:
                raise ConfigError("j.inputdata.dataset='' - DQ2 dataset name needs to be specified.")
            
            if job.inputdata.tagdataset:
                environment['TAGDATASETNAME'] = ':'.join(job.inputdata.tagdataset)
                
        if job.outputdata and job.outputdata._name == 'DQ2OutputDataset':
            environment['DQ2_URL_SERVER']=configDQ2['DQ2_URL_SERVER']
            environment['DQ2_URL_SERVER_SSL']=configDQ2['DQ2_URL_SERVER_SSL']
            try:
                environment['X509CERTDIR']=os.environ['X509_CERT_DIR']
            except KeyError:
                environment['X509CERTDIR']=''
            try:
                proxy = os.environ['X509_USER_PROXY']
            except KeyError:
                proxy = '/tmp/x509up_u%s' % os.getuid()

            REMOTE_PROXY = '%s:%s' % (socket.getfqdn(),proxy)
            environment['REMOTE_PROXY'] = REMOTE_PROXY
            try:
                environment['GANGA_GLITE_UI']=configLCG['GLITE_SETUP']
            except:
                pass

        if hasattr(job.backend, 'extraopts'):
            if job.backend.extraopts.find('site=hh')>0:
                environment['DQ2_LOCAL_SITE_ID'] = 'DESY-HH_SCRATCHDISK'
            elif job.backend.extraopts.find('site=zn')>0:
                environment['DQ2_LOCAL_SITE_ID'] = 'DESY-ZN_SCRATCHDISK'
            else:
                environment['DQ2_LOCAL_SITE_ID'] = configDQ2['DQ2_LOCAL_SITE_ID']
        else:
            environment['DQ2_LOCAL_SITE_ID'] = configDQ2['DQ2_LOCAL_SITE_ID']

        exe = os.path.join(os.path.dirname(__file__), 'run-athena-local.sh')

#       output sandbox
        outputbox = [ ]
        outputGUIDs='output_guids'
        outputLOCATION='output_location'
        outputDATA='output_data'
        outputbox.append( outputGUIDs )
        outputbox.append( outputLOCATION )
        outputbox.append( outputDATA )
        outputbox.append('stats.pickle')
        if (job.outputsandbox):
            for file in job.outputsandbox:
                outputbox += [ file ]

        ## retrieve the FileStager log
        if job.inputdata and job.inputdata._name in [ 'DQ2Dataset'] and job.inputdata.type in ['FILE_STAGER']:
            outputbox += ['FileStager.out', 'FileStager.err']

        # Switch for DEBUG print-out in logfiles
        if app.useNoDebugLogs:
            environment['GANGA_LOG_DEBUG'] = '0'
        else:
            environment['GANGA_LOG_DEBUG'] = '1'

        return StandardJobConfig(File(exe), inputbox, [], outputbox, environment)

Example 22

Project: tp-libvirt Source File: virsh_vol_create.py
def run(test, params, env):
    """
    Test virsh vol-create command to cover the following matrix:
    pool_type = [dir, fs, netfs]
    volume_format = [raw, bochs, cloop, cow, dmg, iso, qcow, qcow2, qed,
                     vmdk, vpc]

    pool_type = [disk]
    volume_format = [none, linux, fat16, fat32, linux-swap, linux-lvm,
                     linux-raid, extended]

    pool_type = [logical]
    volume_format = [none]

    pool_type = [iscsi, scsi]
    Not supported with format type

    TODO:
    pool_type = [rbd, glusterfs]

    Reference: http://www.libvirt.org/storage.html
    """

    src_pool_type = params.get("src_pool_type")
    src_pool_target = params.get("src_pool_target")
    src_pool_format = params.get("src_pool_format", "")
    pool_vol_num = int(params.get("src_pool_vol_num", '1'))
    src_emulated_image = params.get("src_emulated_image")
    extra_option = params.get("extra_option", "")
    prefix_vol_name = params.get("vol_name", "vol_create_test")
    vol_format = params.get("vol_format", "raw")
    vol_capacity = params.get("vol_capacity", 1048576)
    vol_allocation = params.get("vol_allocation", 1048576)
    image_size = params.get("emulate_image_size", "1G")
    lazy_refcounts = "yes" == params.get("lazy_refcounts")
    status_error = "yes" == params.get("status_error", "no")
    by_xml = "yes" == params.get("create_vol_by_xml", "yes")
    incomplete_target = "yes" == params.get("incomplete_target", "no")

    if not libvirt_version.version_compare(1, 0, 0):
        if "--prealloc-metadata" in extra_option:
            raise error.TestNAError("metadata preallocation not supported in"
                                    " current libvirt version.")
        if incomplete_target:
            raise error.TestNAError("It does not support generate target "
                                    "path in thi libvirt version.")

    pool_type = ['dir', 'disk', 'fs', 'logical', 'netfs', 'iscsi', 'scsi']
    if src_pool_type not in pool_type:
        raise error.TestNAError("pool type %s not in supported type list: %s" %
                                (src_pool_type, pool_type))

    # libvirt acl polkit related params
    if not libvirt_version.version_compare(1, 1, 1):
        if params.get('setup_libvirt_polkit') == 'yes':
            raise error.TestNAError("API acl test not supported in current"
                                    " libvirt version.")
    uri = params.get("virsh_uri")
    unprivileged_user = params.get('unprivileged_user')
    if unprivileged_user:
        if unprivileged_user.count('EXAMPLE'):
            unprivileged_user = 'testacl'

    # Stop multipathd to avoid start pool fail(For fs like pool, the new add
    # disk may in use by device-mapper, so start pool will report disk already
    # mounted error).
    multipathd = service.Factory.create_service("multipathd")
    multipathd_status = multipathd.status()
    if multipathd_status:
        multipathd.stop()

    # Set volume xml attribute dictionary, extract all params start with 'vol_'
    # which are for setting volume xml, except 'lazy_refcounts'.
    vol_arg = {}
    for key in params.keys():
        if key.startswith('vol_'):
            if key[4:] in ['capacity', 'allocation', 'owner', 'group']:
                vol_arg[key[4:]] = int(params[key])
            else:
                vol_arg[key[4:]] = params[key]
    vol_arg['lazy_refcounts'] = lazy_refcounts

    def post_process_vol(ori_vol_path):
        """
        Create or disactive a volume without libvirt

        :param ori_vol_path: Full path of an original volume
        :retur: Volume name for checking
        """
        process_vol_name = params.get("process_vol_name", "process_vol")
        process_vol_options = params.get("process_vol_options", "")
        process_vol_capacity = params.get("process_vol_capacity", vol_capacity)
        process_vol_cmd = ""
        unsupport_err = "Unsupport do '%s %s' in this test" % (process_vol_by,
                                                               process_vol_type)
        if process_vol_by == "lvcreate":
            process_vol_cmd = "lvcreate -L %s " % process_vol_capacity
            if process_vol_type == "thin":
                if not process_vol_options:
                    process_vol_options = "-T "
                process_vol_cmd += "%s " % process_vol_options
                processthin_pool_name = params.get("processthin_pool_name", "thinpool")
                processthin_vol_name = params.get("processthin_vol_name", "thinvol")
                process_vol_capacity = params.get("process_vol_capacity", "1G")
                os.path.dirname(ori_vol_path)
                process_vol_cmd += "%s/%s " % (os.path.dirname(ori_vol_path),
                                               processthin_pool_name)
                process_vol_cmd += "-V %s " % process_vol_capacity
                process_vol_cmd += "-n %s " % processthin_vol_name
                process_vol_name = processthin_vol_name
            elif process_vol_type == "snapshot":
                if not process_vol_options:
                    process_vol_options = "-s "
                process_vol_cmd += "%s " % process_vol_options
                process_vol_cmd += "-n %s " % process_vol_name
                process_vol_cmd += "%s " % (ori_vol_path)
            else:
                logging.error(unsupport_err)
                return
        elif process_vol_by == "qemu-img" and process_vol_type == "create":
            process_vol_cmd = "qemu-img create "
            process_vol_path = os.path.dirname(ori_vol_path) + "/"
            process_vol_path += process_vol_name
            process_vol_cmd += "%s " % process_vol_options
            process_vol_cmd += "%s " % process_vol_path
            process_vol_cmd += "%s " % process_vol_capacity
        elif process_vol_by == "lvchange" and process_vol_type == "deactivate":
            process_vol_cmd = "lvchange %s " % ori_vol_path
            if not process_vol_options:
                process_vol_options = "-an"
            process_vol_cmd += process_vol_options
        else:
            logging.error(unsupport_err)
            return
        rst = process.run(process_vol_cmd, ignore_status=True, shell=True)
        if rst.exit_status:
            if "Snapshots of snapshots are not supported" in rst.stderr:
                logging.debug("%s is already a snapshot volume", ori_vol_path)
                process_vol_name = os.path.basename(ori_vol_path)
            else:
                logging.error(rst.stderr)
                return
        return process_vol_name

    def check_vol(pool_name, vol_name, expect_exist=True):
        """
        Check volume vol_name in pool pool_name
        """
        src_volumes = src_pv.list_volumes().keys()
        logging.debug("Current volumes in %s: %s", pool_name, src_volumes)
        if expect_exist:
            if vol_name not in src_volumes:
                raise error.TestFail("Can't find volume %s in pool %s"
                                     % (vol_name, pool_name))
            # check format in volume xml
            post_xml = volxml.new_from_vol_dumpxml(vol_name, pool_name)
            logging.debug("Volume %s XML: %s" % (vol_name,
                                                 post_xml.xmltreefile))
            if 'format' in post_xml.keys() and vol_format is not None:
                if post_xml.format != vol_format:
                    raise error.TestFail("Volume format %s is not expected"
                                         % vol_format + " as defined.")
        else:
            if vol_name in src_volumes:
                raise error.TestFail("Find volume %s in pool %s, but expect not"
                                     % (vol_name, pool_name))

    fmt_err0 = "Unknown file format '%s'" % vol_format
    fmt_err1 = "Formatting or formatting option not "
    fmt_err1 += "supported for file format '%s'" % vol_format
    fmt_err2 = "Driver '%s' does not support " % vol_format
    fmt_err2 += "image creation"
    fmt_err_list = [fmt_err0, fmt_err1, fmt_err2]
    skip_msg = "Volume format '%s' is not supported by qemu-img" % vol_format
    vol_path_list = []
    try:
        # Create the src pool
        src_pool_name = "virt-%s-pool" % src_pool_type
        pvt = utlv.PoolVolumeTest(test, params)
        pvt.pre_pool(src_pool_name, src_pool_type, src_pool_target,
                     src_emulated_image, image_size=image_size,
                     source_format=src_pool_format)

        src_pv = libvirt_storage.PoolVolume(src_pool_name)
        # Print current pools for debugging
        logging.debug("Current pools:%s",
                      libvirt_storage.StoragePool().list_pools())

        # Create volumes by virsh in a loop
        while pool_vol_num > 0:
            # Set volume xml file
            vol_name = prefix_vol_name + "_%s" % pool_vol_num
            pool_vol_num -= 1
            if by_xml:
                # According to BZ#1138523, we need inpect the right name
                # (disk partition) for new volume
                if src_pool_type == "disk":
                    vol_name = utlv.new_disk_vol_name(src_pool_name)
                    if vol_name is None:
                        raise error.TestError("Fail to generate volume name")
                vol_arg['name'] = vol_name
                volxml = libvirt_xml.VolXML()
                newvol = volxml.new_vol(**vol_arg)
                vol_xml = newvol['xml']
                if params.get('setup_libvirt_polkit') == 'yes':
                    process.run("chmod 666 %s" % vol_xml, ignore_status=True,
                                shell=True)

                # Run virsh_vol_create to create vol
                logging.debug("Create volume from XML: %s" % newvol.xmltreefile)
                cmd_result = virsh.vol_create(
                    src_pool_name, vol_xml, extra_option,
                    unprivileged_user=unprivileged_user, uri=uri,
                    ignore_status=True, debug=True)
            else:
                # Run virsh_vol_create_as to create_vol
                cmd_result = virsh.vol_create_as(
                    vol_name, src_pool_name, vol_capacity, vol_allocation,
                    vol_format, unprivileged_user=unprivileged_user, uri=uri,
                    ignore_status=True, debug=True)
            # Check result
            try:
                utlv.check_exit_status(cmd_result, status_error)
                check_vol(src_pool_name, vol_name, not status_error)
                if not status_error:
                    vol_path = virsh.vol_path(vol_name,
                                              src_pool_name).stdout.strip()
                    logging.debug("Full path of %s: %s", vol_name, vol_path)
                    vol_path_list.append(vol_path)
            except error.TestFail, e:
                stderr = cmd_result.stderr
                if any(err in stderr for err in fmt_err_list):
                    raise error.TestNAError(skip_msg)
                else:
                    raise e
        # Post process vol by other programs
        process_vol_by = params.get("process_vol_by")
        process_vol_type = params.get("process_vol_type", "")
        expect_vol_exist = "yes" == params.get("expect_vol_exist", "yes")
        if process_vol_by and vol_path_list:
            process_vol = post_process_vol(vol_path_list[0])
            if process_vol is not None:
                try:
                    virsh.pool_refresh(src_pool_name, ignore_status=False)
                    check_vol(src_pool_name, process_vol, expect_vol_exist)
                except (process.CmdError, error.TestFail), e:
                    if process_vol_type == "thin":
                        logging.error(e)
                        raise error.TestNAError("You may encounter bug BZ#1060287")
                    else:
                        raise e
            else:
                raise error.TestFail("Post process volume failed")
    finally:
        # Cleanup
        # For old version lvm2(2.02.106 or early), deactivate volume group
        # (destroy libvirt logical pool) will fail if which has deactivated
        # lv snapshot, so before destroy the pool, we need activate it manually
        if src_pool_type == 'logical' and vol_path_list:
            vg_name = vol_path_list[0].split('/')[2]
            process.run("lvchange -ay %s" % vg_name, shell=True)
        try:
            pvt.cleanup_pool(src_pool_name, src_pool_type, src_pool_target,
                             src_emulated_image)
        except error.TestFail, detail:
            logging.error(str(detail))
        if multipathd_status:
            multipathd.start()

Example 23

Project: paramnormal Source File: plot_directive.py
def run(arguments, content, options, state_machine, state, lineno):
    # The user may provide a filename *or* Python code content, but not both
    if arguments and content:
        raise RuntimeError("plot:: directive can't have both args and content")

    docuement = state_machine.docuement
    config = docuement.settings.env.config
    nofigs = 'nofigs' in options

    options.setdefault('include-source', config.plot_include_source)
    keep_context = 'context' in options
    context_opt = None if not keep_context else options['context']

    rst_file = docuement.attributes['source']
    rst_dir = os.path.dirname(rst_file)

    if len(arguments):
        if not config.plot_basedir:
            source_file_name = os.path.join(setup.app.builder.srcdir,
                                            directives.uri(arguments[0]))
        else:
            source_file_name = os.path.join(setup.confdir, config.plot_basedir,
                                            directives.uri(arguments[0]))

        # If there is content, it will be passed as a caption.
        caption = '\n'.join(content)

        # If the optional function name is provided, use it
        if len(arguments) == 2:
            function_name = arguments[1]
        else:
            function_name = None

        with io.open(source_file_name, 'r', encoding='utf-8') as fd:
            code = fd.read()
        output_base = os.path.basename(source_file_name)
    else:
        source_file_name = rst_file
        code = textwrap.dedent("\n".join(map(str, content)))
        counter = docuement.attributes.get('_plot_counter', 0) + 1
        docuement.attributes['_plot_counter'] = counter
        base, ext = os.path.splitext(os.path.basename(source_file_name))
        output_base = '%s-%d.py' % (base, counter)
        function_name = None
        caption = ''

    base, source_ext = os.path.splitext(output_base)
    if source_ext in ('.py', '.rst', '.txt'):
        output_base = base
    else:
        source_ext = ''

    # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
    output_base = output_base.replace('.', '-')

    # is it in doctest format?
    is_doctest = contains_doctest(code)
    if 'format' in options:
        if options['format'] == 'python':
            is_doctest = False
        else:
            is_doctest = True

    # determine output directory name fragment
    source_rel_name = relpath(source_file_name, setup.confdir)
    source_rel_dir = os.path.dirname(source_rel_name)
    while source_rel_dir.startswith(os.path.sep):
        source_rel_dir = source_rel_dir[1:]

    # build_dir: where to place output files (temporarily)
    build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
                             'plot_directive',
                             source_rel_dir)
    # get rid of .. in paths, also changes pathsep
    # see note in Python docs for warning about symbolic links on Windows.
    # need to compare source and dest paths at end
    build_dir = os.path.normpath(build_dir)

    if not os.path.exists(build_dir):
        os.makedirs(build_dir)

    # output_dir: final location in the builder's directory
    dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
                                            source_rel_dir))
    if not os.path.exists(dest_dir):
        os.makedirs(dest_dir) # no problem here for me, but just use built-ins

    # how to link to files from the RST file
    dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir),
                                 source_rel_dir).replace(os.path.sep, '/')
    build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
    source_link = dest_dir_link + '/' + output_base + source_ext

    # make figures
    try:
        results = render_figures(code,
                                 source_file_name,
                                 build_dir,
                                 output_base,
                                 keep_context,
                                 function_name,
                                 config,
                                 context_reset=context_opt == 'reset',
                                 close_figs=context_opt == 'close-figs')
        errors = []
    except PlotError as err:
        reporter = state.memo.reporter
        sm = reporter.system_message(
            2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
                                                source_file_name, err),
            line=lineno)
        results = [(code, [])]
        errors = [sm]

    # Properly indent the caption
    caption = '\n'.join('      ' + line.strip()
                        for line in caption.split('\n'))

    # generate output restructuredtext
    total_lines = []
    for j, (code_piece, images) in enumerate(results):
        if options['include-source']:
            if is_doctest:
                lines = ['']
                lines += [row.rstrip() for row in code_piece.split('\n')]
            else:
                lines = ['.. code-block:: python', '']
                lines += ['    %s' % row.rstrip()
                          for row in code_piece.split('\n')]
            source_code = "\n".join(lines)
        else:
            source_code = ""

        if nofigs:
            images = []

        opts = [':%s: %s' % (key, val) for key, val in six.iteritems(options)
                if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]

        only_html = ".. only:: html"
        only_latex = ".. only:: latex"
        only_texinfo = ".. only:: texinfo"

        # Not-None src_link signals the need for a source link in the generated
        # html
        if j == 0 and config.plot_html_show_source_link:
            src_link = source_link
        else:
            src_link = None

        result = format_template(
            config.plot_template or TEMPLATE,
            dest_dir=dest_dir_link,
            build_dir=build_dir_link,
            source_link=src_link,
            multi_image=len(images) > 1,
            only_html=only_html,
            only_latex=only_latex,
            only_texinfo=only_texinfo,
            options=opts,
            images=images,
            source_code=source_code,
            html_show_formats=config.plot_html_show_formats and not nofigs,
            caption=caption)

        total_lines.extend(result.split("\n"))
        total_lines.extend("\n")

    if total_lines:
        state_machine.insert_input(total_lines, source=source_file_name)

    # copy image files to builder's output directory, if necessary
    if not os.path.exists(dest_dir):
        cbook.mkdirs(dest_dir)

    for code_piece, images in results:
        for img in images:
            for fn in img.filenames():
                destimg = os.path.join(dest_dir, os.path.basename(fn))
                if fn != destimg:
                    shutil.copyfile(fn, destimg)

    # copy script (if necessary)
    target_name = os.path.join(dest_dir, output_base + source_ext)
    with io.open(target_name, 'w', encoding="utf-8") as f:
        if source_file_name == rst_file:
            code_escaped = unescape_doctest(code)
        else:
            code_escaped = code
        f.write(code_escaped)

    return errors

Example 24

Project: cartopy Source File: plot_directive.py
def run(arguments, content, options, state_machine, state, lineno):
    # The user may provide a filename *or* Python code content, but not both
    if arguments and content:
        raise RuntimeError("plot:: directive can't have both args and content")

    docuement = state_machine.docuement
    config = docuement.settings.env.config
    nofigs = 'nofigs' in options

    options.setdefault('include-source', config.plot_include_source)
    keep_context = 'context' in options
    context_opt = None if not keep_context else options['context']

    rst_file = docuement.attributes['source']
    rst_dir = os.path.dirname(rst_file)

    if len(arguments):
        if not config.plot_basedir:
            source_file_name = os.path.join(setup.app.builder.srcdir,
                                            directives.uri(arguments[0]))
        else:
            source_file_name = os.path.join(setup.confdir, config.plot_basedir,
                                            directives.uri(arguments[0]))

        # If there is content, it will be passed as a caption.
        caption = '\n'.join(content)

        # If the optional function name is provided, use it
        if len(arguments) == 2:
            function_name = arguments[1]
        else:
            function_name = None

        with io.open(source_file_name, 'r', encoding='utf-8') as fd:
            code = fd.read()
        output_base = os.path.basename(source_file_name)
    else:
        source_file_name = rst_file
        code = textwrap.dedent("\n".join(map(str, content)))
        counter = docuement.attributes.get('_plot_counter', 0) + 1
        docuement.attributes['_plot_counter'] = counter
        base, ext = os.path.splitext(os.path.basename(source_file_name))
        output_base = '%s-%d.py' % (base, counter)
        function_name = None
        caption = ''

    base, source_ext = os.path.splitext(output_base)
    if source_ext in ('.py', '.rst', '.txt'):
        output_base = base
    else:
        source_ext = ''

    # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
    output_base = output_base.replace('.', '-')

    # is it in doctest format?
    is_doctest = contains_doctest(code)
    if 'format' in options:
        if options['format'] == 'python':
            is_doctest = False
        else:
            is_doctest = True

    # determine output directory name fragment
    source_rel_name = relpath(source_file_name, setup.confdir)
    source_rel_dir = os.path.dirname(source_rel_name)
    while source_rel_dir.startswith(os.path.sep):
        source_rel_dir = source_rel_dir[1:]

    # build_dir: where to place output files (temporarily)
    build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
                             'plot_directive',
                             source_rel_dir)
    # get rid of .. in paths, also changes pathsep
    # see note in Python docs for warning about symbolic links on Windows.
    # need to compare source and dest paths at end
    build_dir = os.path.normpath(build_dir)

    if not os.path.exists(build_dir):
        os.makedirs(build_dir)

    # output_dir: final location in the builder's directory
    dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
                                            source_rel_dir))
    if not os.path.exists(dest_dir):
        os.makedirs(dest_dir) # no problem here for me, but just use built-ins

    # how to link to files from the RST file
    dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir),
                                 source_rel_dir).replace(os.path.sep, '/')
    try:
        build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
    except ValueError:
        # on Windows, relpath raises ValueError when path and start are on
        # different mounts/drives
        build_dir_link = build_dir
    source_link = dest_dir_link + '/' + output_base + source_ext

    # make figures
    try:
        results = render_figures(code,
                                 source_file_name,
                                 build_dir,
                                 output_base,
                                 keep_context,
                                 function_name,
                                 config,
                                 context_reset=context_opt == 'reset',
                                 close_figs=context_opt == 'close-figs')
        errors = []
    except PlotError as err:
        reporter = state.memo.reporter
        sm = reporter.system_message(
            2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
                                                source_file_name, err),
            line=lineno)
        results = [(code, [])]
        errors = [sm]

    # Properly indent the caption
    caption = '\n'.join('      ' + line.strip()
                        for line in caption.split('\n'))

    # generate output restructuredtext
    total_lines = []
    for j, (code_piece, images) in enumerate(results):
        if options['include-source']:
            if is_doctest:
                lines = ['']
                lines += [row.rstrip() for row in code_piece.split('\n')]
            else:
                lines = ['.. code-block:: python', '']
                lines += ['    %s' % row.rstrip()
                          for row in code_piece.split('\n')]
            source_code = "\n".join(lines)
        else:
            source_code = ""

        if nofigs:
            images = []

        opts = [':%s: %s' % (key, val) for key, val in six.iteritems(options)
                if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]

        only_html = ".. only:: html"
        only_latex = ".. only:: latex"
        only_texinfo = ".. only:: texinfo"

        # Not-None src_link signals the need for a source link in the generated
        # html
        if j == 0 and config.plot_html_show_source_link:
            src_link = source_link
        else:
            src_link = None

        result = format_template(
            config.plot_template or TEMPLATE,
            dest_dir=dest_dir_link,
            build_dir=build_dir_link,
            source_link=src_link,
            multi_image=len(images) > 1,
            only_html=only_html,
            only_latex=only_latex,
            only_texinfo=only_texinfo,
            options=opts,
            images=images,
            source_code=source_code,
            html_show_formats=config.plot_html_show_formats and not nofigs,
            caption=caption)

        total_lines.extend(result.split("\n"))
        total_lines.extend("\n")

    if total_lines:
        state_machine.insert_input(total_lines, source=source_file_name)

    # copy image files to builder's output directory, if necessary
    if not os.path.exists(dest_dir):
        cbook.mkdirs(dest_dir)

    for code_piece, images in results:
        for img in images:
            for fn in img.filenames():
                destimg = os.path.join(dest_dir, os.path.basename(fn))
                if fn != destimg:
                    shutil.copyfile(fn, destimg)

    # copy script (if necessary)
    target_name = os.path.join(dest_dir, output_base + source_ext)
    with io.open(target_name, 'w', encoding="utf-8") as f:
        if source_file_name == rst_file:
            code_escaped = unescape_doctest(code)
        else:
            code_escaped = code
        f.write(code_escaped)

    return errors

Example 25

Project: RMG-Py Source File: statmech.py
Function: load
    def load(self):
        """
        Load the statistical mechanics parameters for each conformer from
        the associated files on disk. Creates :class:`Conformer` objects for
        each conformer and appends them to the list of confomers on the
        species object.
        """
        logging.info('Loading statistical mechanics parameters for {0}...'.format(self.species.label))
        
        path = self.path
        
        TS = isinstance(self.species, TransitionState)
    
        global_context = {
            '__builtins__': None,
        }
        local_context = {
            '__builtins__': None,
            'True': True,
            'False': False,
            'HinderedRotor': hinderedRotor,
            # File formats
            'GaussianLog': GaussianLog,
            'QchemLog': QchemLog,
            'MoleProLog': MoleProLog,
            'ScanLog': ScanLog,
        }
    
        directory = os.path.abspath(os.path.dirname(path))
    
        with open(path, 'r') as f:
            try:
                exec f in global_context, local_context
            except (NameError, TypeError, SyntaxError), e:
                logging.error('The species file {0} was invalid:'.format(path))
                raise
        
        try:
            atoms = local_context['atoms']
        except KeyError:
            raise InputError('Required attribute "atoms" not found in species file {0!r}.'.format(path))
        
        try:
            bonds = local_context['bonds']
        except KeyError:
            bonds = {}
            
        try:
            linear = local_context['linear']
        except KeyError:
            raise InputError('Required attribute "linear" not found in species file {0!r}.'.format(path))
        
        try:
            externalSymmetry = local_context['externalSymmetry']
        except KeyError:
            raise InputError('Required attribute "externalSymmetry" not found in species file {0!r}.'.format(path))
        
        try:
            spinMultiplicity = local_context['spinMultiplicity']
        except KeyError:
            raise InputError('Required attribute "spinMultiplicity" not found in species file {0!r}.'.format(path))
       
        try:
            opticalIsomers = local_context['opticalIsomers']
        except KeyError:
            raise InputError('Required attribute "opticalIsomers" not found in species file {0!r}.'.format(path))
        
        try:
            energy = local_context['energy']
        except KeyError:
            raise InputError('Required attribute "energy" not found in species file {0!r}.'.format(path))
        if isinstance(energy, dict):
            try:
                energy = energy[self.modelChemistry]
            except KeyError:
                raise InputError('Model chemistry {0!r} not found in from dictionary of energy values in species file {1!r}.'.format(self.modelChemistry, path))
        if isinstance(energy, GaussianLog):
            energyLog = energy; E0 = None
            energyLog.path = os.path.join(directory, energyLog.path)
        elif isinstance(energy, QchemLog):
            energyLog = energy; E0 = None
            energyLog.path = os.path.join(directory, energyLog.path)
        elif isinstance(energy, MoleProLog):
            energyLog = energy; E0 = None
            energyLog.path = os.path.join(directory, energyLog.path)
        elif isinstance(energy, float):
            energyLog = None; E0 = energy
        
        try:
            geomLog = local_context['geometry']
        except KeyError:
            raise InputError('Required attribute "geometry" not found in species file {0!r}.'.format(path))
        geomLog.path = os.path.join(directory, geomLog.path)
    
        try:
            statmechLog = local_context['frequencies']
        except KeyError:
            raise InputError('Required attribute "frequencies" not found in species file {0!r}.'.format(path))
        statmechLog.path = os.path.join(directory, statmechLog.path)
        
        if 'frequencyScaleFactor' in local_context:
            logging.warning('Ignoring frequency scale factor in species file {0!r}.'.format(path))
        
        try:
            rotors = local_context['rotors']
        except KeyError:
            rotors = []
        
        # But don't consider hindered rotors if flag is not set
        if not self.includeHinderedRotors:
            rotors = []
        
        logging.debug('    Reading molecular degrees of freedom...')
        conformer = statmechLog.loadConformer(symmetry=externalSymmetry, spinMultiplicity=spinMultiplicity, opticalIsomers=opticalIsomers)
        
        logging.debug('    Reading optimized geometry...')
        coordinates, number, mass = geomLog.loadGeometry()
        conformer.coordinates = (coordinates,"angstroms") 
        conformer.number = number
        conformer.mass = (mass,"amu")
        
        logging.debug('    Reading energy...')
        # The E0 that is read from the log file is without the ZPE and corresponds to E_elec
        if E0 is None:
            E0 = energyLog.loadEnergy(self.frequencyScaleFactor)
        else:
            E0 = E0 * constants.E_h * constants.Na         # Hartree/particle to J/mol
        E0 = applyEnergyCorrections(E0, self.modelChemistry, atoms, bonds if self.applyBondEnergyCorrections else {})
        ZPE = statmechLog.loadZeroPointEnergy() * self.frequencyScaleFactor
        
        # The E0_withZPE at this stage contains the ZPE
        E0_withZPE = E0 + ZPE
        
        logging.debug('         Scaling factor used = {0:g}'.format(self.frequencyScaleFactor))
        logging.debug('         ZPE (0 K) = {0:g} kcal/mol'.format(ZPE / 4184.))
        logging.debug('         E0 (0 K) = {0:g} kcal/mol'.format(E0_withZPE / 4184.))
       
        conformer.E0 = (E0_withZPE*0.001,"kJ/mol")
        
        # If loading a transition state, also read the imaginary frequency
        if TS:
            self.species.frequency = (statmechLog.loadNegativeFrequency() * self.frequencyScaleFactor, "cm^-1")

        # Read and fit the 1D hindered rotors if applicable
        # If rotors are found, the vibrational frequencies are also
        # recomputed with the torsional modes removed
        F = statmechLog.loadForceConstantMatrix()
        if F is not None and len(mass) > 1 and len(rotors) > 0:
            
            logging.debug('    Fitting {0} hindered rotors...'.format(len(rotors)))
            rotorCount = 0
            for scanLog, pivots, top, symmetry, fit in rotors:
                
                # Load the hindered rotor scan energies
                if isinstance(scanLog, GaussianLog):
                    scanLog.path = os.path.join(directory, scanLog.path)
                    Vlist, angle = scanLog.loadScanEnergies()
                    scanLogOutput = ScanLog(os.path.join(directory, '{0}_rotor_{1}.txt'.format(self.species.label, rotorCount+1)))
                    scanLogOutput.save(angle, Vlist)
                elif isinstance(scanLog, QchemLog):
                    scanLog.path = os.path.join(directory, scanLog.path)
                    Vlist, angle = scanLog.loadScanEnergies()
                    scanLogOutput = ScanLog(os.path.join(directory, '{0}_rotor_{1}.txt'.format(self.species.label, rotorCount+1)))
                    scanLogOutput.save(angle, Vlist)
                elif isinstance(scanLog, ScanLog):
                    scanLog.path = os.path.join(directory, scanLog.path)
                    angle, Vlist = scanLog.load()
                else:
                    raise Exception('Invalid log file type {0} for scan log.'.format(scanLog.__class__))
                    
                inertia = conformer.getInternalReducedMomentOfInertia(pivots, top) * constants.Na * 1e23
                
                cosineRotor = HinderedRotor(inertia=(inertia,"amu*angstrom^2"), symmetry=symmetry)
                cosineRotor.fitCosinePotentialToData(angle, Vlist)
                fourierRotor = HinderedRotor(inertia=(inertia,"amu*angstrom^2"), symmetry=symmetry)
                fourierRotor.fitFourierPotentialToData(angle, Vlist)
                
                Vlist_cosine = numpy.zeros_like(angle)
                Vlist_fourier = numpy.zeros_like(angle)
                for i in range(angle.shape[0]):
                    Vlist_cosine[i] = cosineRotor.getPotential(angle[i])
                    Vlist_fourier[i] = fourierRotor.getPotential(angle[i])
                
                if fit=='cosine':
                    rotor=cosineRotor
                elif fit =='fourier':
                    rotor=fourierRotor
                elif fit =='best':
                
                    rms_cosine = numpy.sqrt(numpy.sum((Vlist_cosine - Vlist) * (Vlist_cosine - Vlist)) / (len(Vlist) - 1)) / 4184.
                    rms_fourier = numpy.sqrt(numpy.sum((Vlist_fourier - Vlist) * (Vlist_fourier - Vlist))/ (len(Vlist) - 1)) / 4184.
                
                    # Keep the rotor with the most accurate potential
                    rotor = cosineRotor if rms_cosine < rms_fourier else fourierRotor
                    # However, keep the cosine rotor if it is accurate enough, the
                    # fourier rotor is not significantly more accurate, and the cosine
                    # rotor has the correct symmetry 
                    if rms_cosine < 0.05 and rms_cosine / rms_fourier < 2.0 and rms_cosine / rms_fourier < 4.0 and symmetry == cosineRotor.symmetry:
                        rotor = cosineRotor
                    
                    conformer.modes.append(rotor)
                    
                    self.plotHinderedRotor(angle, Vlist, cosineRotor, fourierRotor, rotor, rotorCount, directory)
                    
                    rotorCount += 1
                       
            logging.debug('    Determining frequencies from reduced force constant matrix...')
            frequencies = numpy.array(projectRotors(conformer, F, rotors, linear, TS))
            
            # The frequencies have changed after projection, hence we need to recompute the ZPE
            # We might need to multiply the scaling factor to the frequencies 
            ZPE = self.getZPEfromfrequencies(frequencies)
            E0_withZPE = E0 + ZPE
            # Reset the E0 of the conformer
            conformer.E0 = (E0_withZPE*0.001,"kJ/mol")

        elif len(conformer.modes) > 2:
            frequencies = conformer.modes[2].frequencies.value_si
            rotors = numpy.array([])
        else:
            frequencies = numpy.array([])
            rotors = numpy.array([])
    
        for mode in conformer.modes:
            if isinstance(mode, HarmonicOscillator):
                mode.frequencies = (frequencies * self.frequencyScaleFactor,"cm^-1")
        
        self.species.conformer = conformer

Example 26

Project: pwn_plug_sources Source File: targetScanner.py
    def identifyVuln(self, URL, Params, VulnParam, PostData, Language, isPost=False, blindmode=None, isUnix=None):
        xml2config = self.config["XML2CONFIG"]
        
        if (blindmode == None):

            script = None
            scriptpath = None
            pre = None
            
            langClass = xml2config.getAllLangSets()[Language]
            
            if (not isPost):
                self._log("[%s] Identifying Vulnerability '%s' with Parameter '%s'..."%(Language, URL, VulnParam), self.LOG_ALWAYS)
            else:
                self._log("[%s] Identifying Vulnerability '%s' with POST-Parameter '%s'..."%(Language, URL, VulnParam), self.LOG_ALWAYS)

            tmpurl = URL
            PostHax = PostData
            rndStr = self.getRandomStr()

            if (not isPost):
                tmpurl = tmpurl.replace("%s=%s"%(VulnParam,Params[VulnParam]), "%s=%s"%(VulnParam, rndStr))
            else:
                PostHax = PostHax.replace("%s=%s"%(VulnParam,Params[VulnParam]), "%s=%s"%(VulnParam, rndStr))

            RE_SUCCESS_MSG = re.compile(langClass.getSniper()%(rndStr), re.DOTALL)

            code = self.doPostRequest(tmpurl, PostHax)
            if (code == None):
                self._log("Identification of vulnerability failed. (code == None)", self.LOG_ERROR)
                return None
                
            m = RE_SUCCESS_MSG.search(code)
            if (m == None):
                self._log("Identification of vulnerability failed. (m == None)", self.LOG_ERROR)
                return None


            r = report(URL, Params, VulnParam)
            r.setPost(isPost)
            r.setPostData(PostData)
            
            for sp_err_msg in langClass.getIncludeDetectors():
                RE_SCRIPT_PATH = re.compile(sp_err_msg)
                s = RE_SCRIPT_PATH.search(code)
                if (s != None): break
            if (s == None):
                self._log("Failed to retrieve script path.", self.LOG_WARN)

                print "[MINOR BUG FOUND]"
                print "------------------------------------------------------"
                print "It's possible that fimap was unable to retrieve the scriptpath"
                print "because the regex for this kind of error message is missing."
                a = raw_input("Do you want to help me and send the URL of the site? [y = Print Info/N = Discard]")
                if (a=="y" or a=="Y"):
                    print "-----------SEND THIS TO '[email protected]'-----------"
                    print "SUBJECT: fimap Regex"
                    print "ERROR  : Failed to retrieve script path."
                    print "URL    : " + URL
                    print "-----------------------------------------------------------"
                    raw_input("Copy it and press enter to proceed with scanning...")
                else:
                    print "No problem! I'll continue with your scan..."

                return(None)
            else:
                script = s.group('script')
                if (script != None and script[1] == ":"): # Windows detection quick hack
                    scriptpath = script[:script.rfind("\\")]
                    r.setWindows()
                elif (script != None and script.startswith("\\\\")):
                    scriptpath = script[:script.rfind("\\")]
                    r.setWindows()
                else:
                    scriptpath = os.path.dirname(script)
                    if (scriptpath == None or scriptpath == ""):
                        self._log("Scriptpath is empty! Assuming that we are on toplevel.", self.LOG_WARN)
                        scriptpath = "/"
                        script = "/" + script

                # Check if scriptpath was received correctly.
                if(scriptpath!=""):
                    self._log("Scriptpath received: '%s'" %(scriptpath), self.LOG_INFO)
                    r.setServerPath(scriptpath)
                    r.setServerScript(script)


            if (r.isWindows()):
                self._log("Operating System is 'Windows'.", self.LOG_INFO)
            else:
                self._log("Operating System is 'Unix-Like'.", self.LOG_INFO)


            errmsg = m.group("incname")

            if (errmsg == rndStr):
                r.setPrefix("")
                r.setSurfix("")
            else:
                tokens = errmsg.split(rndStr)
                pre = tokens[0]
                addSlash = False
                if (pre == ""):
                    pre = "/"
                #else:
                #    if pre[-1] != "/":
                #       addSlash = True


                rootdir = None
                
                if (pre[0] != "/"):
                    if (r.isUnix()):
                        pre = posixpath.join(r.getServerPath(), pre)
                        pre = posixpath.normpath(pre)
                        rootdir = "/"
                        pre = self.relpath_unix(rootdir, pre)
                    else:
                        pre = ntpath.join(r.getServerPath(), pre)
                        pre = ntpath.normpath(pre)
                        if (pre[1] == ":"):
                            rootdir = pre[0:3]
                        pre = self.relpath_win(rootdir, pre)
                else:
                    pre = self.relpath_unix("/", pre)
                if addSlash: pre = rootdir + pre
                
                #Quick fix for increasing success :P
                if (pre != "."):
                    pre = "/" + pre
                
                sur = tokens[1]
                if (pre == "."): pre = ""
                r.setPrefix(pre)
                r.setSurfix(sur)

                if (sur != ""):
                    self._log("Trying NULL-Byte Poisoning to get rid of the suffix...", self.LOG_INFO)
                    tmpurl = URL
                    tmpurl = tmpurl.replace("%s=%s"%(VulnParam,Params[VulnParam]), "%s=%s%%00"%(VulnParam, rndStr))
                    code = self.doGetRequest(tmpurl)
                    if (code == None):
                        self._log("NULL-Byte testing failed.", self.LOG_WARN)
                        r.setNullBytePossible(False)
                    elif (code.find("%s\\0%s"%(rndStr, sur)) != -1 or code.find("%s%s"%(rndStr, sur)) != -1):
                        self._log("NULL-Byte Poisoning not possible.", self.LOG_INFO)
                        r.setNullBytePossible(False)
                    else:
                        self._log("NULL-Byte Poisoning successfull!", self.LOG_INFO)
                        r.setSurfix("%00")
                        r.setNullBytePossible(True)


            if (scriptpath == ""):
                # Failed to get scriptpath with easy method :(
                if (pre != ""):
                    self._log("Failed to retrieve path but we are forced to go relative!", self.LOG_WARN)
                    self._log("Go and try it to scan with --enable-blind.", self.LOG_WARN)
                    return(None)
                else:
                    self._log("Failed to retrieve path! It's an absolute injection so I'll fake it to '/'...", self.LOG_WARN)
                    scriptpath = "/"
                    r.setServerPath(scriptpath)
                    r.setServerScript(script)

            return(r)
        
        
        else:
            # Blindmode
            prefix = blindmode[0]
            isNull = blindmode[1]
            self._log("Identifying Vulnerability '%s' with Parameter '%s' blindly..."%(URL, VulnParam), self.LOG_ALWAYS)
            r = report(URL, Params, VulnParam)
            r.setBlindDiscovered(True)
            r.setSurfix("")
            if isNull: r.setSurfix("%00")
            r.setNullBytePossible(isNull)
            if (prefix.strip() == ""):
                r.setServerPath("/noop")
            else:
                r.setServerPath(prefix.replace("..", "a"))
            r.setServerScript("noop")
            r.setPrefix(prefix)
            if (not isUnix):
                r.setWindows()
            return(r)

Example 27

Project: imagefactory Source File: Docker.py
    def builder_should_create_target_image(self, builder, target, image_id, template, parameters):
        self.log.debug("builder_should_create_target_image called for Docker plugin - doing all our work here then stopping the process")
        tdlobj = oz.TDL.TDL(xmlstring=template.xml, rootpw_required=self.app_config["tdl_require_root_pw"])
        # At this point our input base_image is available as builder.base_image.data
        # We simply mount it up in libguestfs and tar out the results as builder.target_image.data
        wrap_metadata = parameter_cast_to_bool(parameters.get('create_docker_metadata', True))
        compress_type = parameters.get('compress', None)
        if compress_type:
            if compress_type in self.compress_commands.keys():
                compress_command = self.compress_commands[compress_type]
            else:
                raise Exception("Passed unknown compression type (%s) for Docker plugin" % (compress_type))
        else:
            compress_command = None
        guestfs_handle = launch_inspect_and_mount(builder.base_image.data, readonly = True)
        storagedir = os.path.dirname(builder.target_image.data)

        # guestfs lets us mount locally via the API, which is cool, but requires that
        # we call a blocking function to activate the mount, which requires a thread
        # We also need a temp dir to mount it to - do our best to clean up when things
        # go wrong
        tempdir = None
        fuse_thread = None
        try:
            tempdir = tempfile.mkdtemp(dir=storagedir)
            self.log.debug("Mounting input image locally at (%s)" % (tempdir))
            guestfs_handle.mount_local(tempdir)
            def _run_guestmount(g):
                g.mount_local_run()
            self.log.debug("Launching mount_local_run thread")
            fuse_thread = threading.Thread(group=None, target=_run_guestmount, args=(guestfs_handle,))
            fuse_thread.start()
            self.log.debug("Creating tar of entire image")
            # NOTE - we used to capture xattrs here but have reverted the change for now
            #        as SELinux xattrs break things in unexpected ways and the tar feature
            #        to allow selective inclusion is broken
            # TODO: Follow up with tar maintainers and docker image creators to find out what
            #       if any xattrs we really need to capture here
            tarcmd = [ 'tar',  '-cf', builder.target_image.data, '-C', tempdir ]
            # User may pass in a comma separated list of additional options to the tar command
            tar_options = parameters.get('tar_options', None)
            if tar_options:
                tar_options_list=tar_options.split(',')
                for option in tar_options_list:
                    tarcmd.append(option.strip())
            # User may pass in a comma separated list of excludes to override this
            # Default to ./etc/fstab as many people have complained this does not belong in Docker images
            tar_excludes = parameters.get('tar_excludes', './etc/fstab').split(',')
            for exclude in tar_excludes:
                tarcmd.append('--exclude=%s' % (exclude.strip()))
            tarcmd.append('./')
            self.log.debug("Command: %s" % (str(tarcmd)))
            subprocess.check_call(tarcmd)
            if wrap_metadata:
                self.log.debug("Estimating size of tar contents to include in Docker metadata")
                size = 0
                for root, dirs, files in os.walk(tempdir):
                    for name in files:
                        fp = os.path.join(root,name)
                        if os.path.isfile(fp) and not os.path.islink(fp):
                            size += os.path.getsize(fp)
                self.log.debug("Total real file content size (%d)" % (size))
        except Exception, e:
            self.log.exception(e)
            raise
        finally:
            if tempdir:
                try:
                    subprocess.check_call( ['umount', '-f', tempdir] )
                    os.rmdir(tempdir)
                except Exception, e:
                    self.log.exception(e)
                    self.log.error("WARNING: Could not unmount guest at (%s) - may still be mounted" % (tempdir) )
            if fuse_thread:
                fuse_thread.join(30.0)
                if fuse_thread.isAlive():
                    self.log.error("Guestfs local mount thread is still active - FUSE filesystem still mounted at (%s)" % (tempdir) )

        if wrap_metadata:
            # Get any parameters and if they are not set, create our defaults
            # Docker image names should not have uppercase characters 
            # https://fedorahosted.org/cloud/ticket/131
            repository = parameters.get('repository',tdlobj.name).lower()
            tag = parameters.get('tag','latest')
            docker_image_id = parameters.get('docker_image_id', self._generate_docker_id())
            cmd = parameters.get('docker_cmd', 'null')
            env = parameters.get('docker_env', 'null')
            label = parameters.get('docker_label', 'null')
            rdict = { repository: { tag: docker_image_id } }
                       
            dockerversion = parameters.get('dockerversion', '0.11.1')
            if not dockerversion in self.docker_templates_dict:
                raise Exception("No docker JSON template available for specified docker version (%s)" % (dockerversion))
            docker_json_template=self.docker_templates_dict[dockerversion]

            arch = tdlobj.arch
            if arch == "x86_64":
                arch = "amd64"
            elif arch == "armv7hl":
                arch = "armhfp"
            tdict = { }
            tdict['commentstring'] = parameters.get('comment', 'Created by Image Factory')
            tdict['os'] = parameters.get('os', 'linux')
            tdict['createdtime'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
            tdict['arch'] = arch
            tdict['idstring'] = docker_image_id
            tdict['cmd'] = cmd
            tdict['env'] = env
            tdict['label'] = label
            tdict['size'] = size

            image_json = docker_json_template.format(**tdict) 

            # v2 images
            # TODO: Something significantly less hacky looking.....
            if dockerversion == "1.10.1":
                shasum = self._file_sha256(builder.target_image.data)
                image_v2_config = json.loads(image_json)
                # The new top level JSON file is a light modification of the layer JSON
                del image_v2_config['Size']
                del image_v2_config['id']
                image_v2_config['history'] = [ { 'comment': image_v2_config['comment'],
                                               'created': image_v2_config['created'] } ]
                image_v2_config['rootfs'] = { 'diff_ids': [ "sha256:%s" % (shasum) ],
                                            'type': 'layers' }

                # Docker wants this config file to be named after its own sha256 sum
                image_v2_config_id = hashlib.sha256(json.dumps(image_v2_config)).hexdigest()

                image_v2_manifest = [ { "Config": "%s.json" % (image_v2_config_id),
                                        "Layers": [ "%s/layer.tar" % (docker_image_id) ],
                                        "RepoTags": [ "%s:%s" % (repository, tag) ] } ]

            # Create directory
            storagedir = os.path.dirname(builder.target_image.data)
            tempdir = None
            try:
                tempdir = tempfile.mkdtemp(dir=storagedir)
                self.log.debug("Creating docker image directory structure in (%s)" % (tempdir))

                repositories_path = os.path.join(tempdir,'repositories')
                repositories = open(repositories_path,"w")
                json.dump(rdict, repositories)
                repositories.close()

                if dockerversion == "1.10.1":
                    config_path = os.path.join(tempdir, '%s.json' % (image_v2_config_id))
                    config = open(config_path, "w")
                    json.dump(image_v2_config, config)
                    config.close()

                    manifest_path = os.path.join(tempdir, 'manifest.json')
                    manifest = open(manifest_path, "w")
                    json.dump(image_v2_manifest, manifest)
                    manifest.close()

                imagedir = os.path.join(tempdir, docker_image_id)
                os.mkdir(imagedir)

                jsonfile_path = os.path.join(imagedir,'json')
                jsonfile = open(jsonfile_path,'w')
                jsonfile.write(image_json)
                jsonfile.close()

                versionfile_path = os.path.join(imagedir,'VERSION')
                versionfile = open(versionfile_path, 'w')
                # TODO - Track version developments and compatibility
                versionfile.write("1.0")
                versionfile.close()

                layerfile_path = os.path.join(imagedir,'layer.tar')
                shutil.move(builder.target_image.data, layerfile_path)

                outtar = tarfile.TarFile(name=builder.target_image.data, mode="w")
                # It turns out that in at least some configurations or versions, Docker will
                # complain if the repositories file is not the last file in the archive
                # we add our single image directory first and then the repositories file to
                # avoid this
                outtar.add(imagedir, arcname=docker_image_id)
                outtar.add(repositories_path, arcname='repositories')
                if dockerversion == "1.10.1":
                    outtar.add(config_path, arcname='%s.json' % (image_v2_config_id))
                    outtar.add(manifest_path, arcname='manifest.json')
                outtar.close()
            finally:
                if tempdir:
                    try:
                        shutil.rmtree(tempdir)
                    except:
                        self.log.warning("Error encountered when removing temp dir (%s) - may not have been deleted" % (tempdir))

        if compress_command:
            self.log.debug("Compressing tar file using %s" % (compress_type))
            rawimage =  builder.target_image.data
            compimage =  builder.target_image.data + ".tmp.%s" % (compress_type)
            result = subprocess.call(compress_command % ( rawimage, compimage), shell = True)
            if result:
                raise Exception("Compression of image failed")
            self.log.debug("Compression complete, replacing original")
            os.unlink(rawimage)
            os.rename(compimage, rawimage)
            self.log.debug("Done")
        return False

Example 28

Project: phobos Source File: bobj_import.py
Function: create_materials
def create_materials(filepath, relpath,                                             # copy from blender addons; should be imported from there
                     material_libs, unique_materials, unique_material_images,
                     use_image_search, float_func):
    """
    Create all the used materials in this obj,
    assign colors and images to the materials from all referenced material libs
    """
    DIR = os.path.dirname(filepath)
    context_material_vars = set()

    def load_material_image(blender_material, context_material_name, imagepath, type):
        """Sets textures defined in .mtl file
        """

        texture = bpy.data.textures.new(name=type, type='IMAGE')

        # Absolute path - c:\.. etc would work here
        image = obj_image_load(imagepath, DIR, use_image_search, relpath)

        if image is not None:
            texture.image = image

        # Adds textures for materials (rendering)
        if type == 'Kd':
            mtex = blender_material.texture_slots.add()
            mtex.texture = texture
            mtex.texture_coords = 'UV'
            mtex.use_map_color_diffuse = True

            # adds textures to faces (Textured/Alt-Z mode)
            # Only apply the diffuse texture to the face if the image has not been set with the inline usemat func.
            unique_material_images[context_material_name] = image  # set the texface image

        elif type == 'Ka':
            mtex = blender_material.texture_slots.add()
            mtex.use_map_color_diffuse = False

            mtex.texture = texture
            mtex.texture_coords = 'UV'
            mtex.use_map_ambient = True

        elif type == 'Ks':
            mtex = blender_material.texture_slots.add()
            mtex.use_map_color_diffuse = False

            mtex.texture = texture
            mtex.texture_coords = 'UV'
            mtex.use_map_specular = True

        elif type == 'Bump':
            mtex = blender_material.texture_slots.add()
            mtex.use_map_color_diffuse = False

            mtex.texture = texture
            mtex.texture_coords = 'UV'
            mtex.use_map_normal = True

        elif type == 'D':
            mtex = blender_material.texture_slots.add()
            mtex.use_map_color_diffuse = False

            mtex.texture = texture
            mtex.texture_coords = 'UV'
            mtex.use_map_alpha = True
            blender_material.use_transparency = True
            blender_material.transparency_method = 'Z_TRANSPARENCY'
            if "alpha" not in context_material_vars:
                blender_material.alpha = 0.0
            # Todo, unset deffuse material alpha if it has an alpha channel

        elif type == 'disp':
            mtex = blender_material.texture_slots.add()
            mtex.use_map_color_diffuse = False

            mtex.texture = texture
            mtex.texture_coords = 'UV'
            mtex.use_map_displacement = True

        elif type == 'refl':
            mtex = blender_material.texture_slots.add()
            mtex.use_map_color_diffuse = False

            mtex.texture = texture
            mtex.texture_coords = 'REFLECTION'
            mtex.use_map_color_diffuse = True
        else:
            raise Exception("invalid type %r" % type)

    # Add an MTL with the same name as the obj if no MTLs are spesified.
    temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + b'.mtl'

    if os.path.exists(os.path.join(DIR, temp_mtl)) and temp_mtl not in material_libs:
        material_libs.append(temp_mtl)
    del temp_mtl

    # Create new materials
    for name in unique_materials:
        if name is not None:
            unique_materials[name] = bpy.data.materials.new(name.decode('utf-8', "replace"))
            unique_material_images[name] = None  # assign None to all material images to start with, add to later.

    unique_materials[None] = None
    unique_material_images[None] = None

    for libname in material_libs:
        # print(libname)
        mtlpath = os.path.join(DIR, libname)
        if not os.path.exists(mtlpath):
            print ("\tMaterial not found MTL: %r" % mtlpath)
        else:
            context_material = None
            mtl = open(mtlpath, 'rb')
            for line in mtl:  # .readlines():
                line = line.strip()
                if not line or line.startswith(b'#'):
                    pass
                elif line.startswith(b'newmtl'):
                    context_material_name = line_value(line.split())
                    context_material = unique_materials.get(context_material_name)
                    context_material_vars.clear()

                elif context_material:
                    # we need to make a material to assign properties to it.
                    line_split = line.split()
                    line_lower = line.lower().lstrip()
                    if line_lower.startswith(b'ka'):
                        context_material.mirror_color = float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])
                    elif line_lower.startswith(b'kd'):
                        context_material.diffuse_color = float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])
                    elif line_lower.startswith(b'ks'):
                        context_material.specular_color = float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])
                    elif line_lower.startswith(b'ns'):
                        context_material.specular_hardness = int((float_func(line_split[1]) * 0.51))
                    elif line_lower.startswith(b'ni'):  # Refraction index
                        context_material.raytrace_transparency.ior = max(1, min(float_func(line_split[1]), 3))  # between 1 and 3
                        context_material_vars.add("ior")
                    elif line_lower.startswith(b'd'):  # dissolve (trancparency)
                        context_material.alpha = float_func(line_split[1])
                        context_material.use_transparency = True
                        context_material.transparency_method = 'Z_TRANSPARENCY'
                        context_material_vars.add("alpha")
                    elif line_lower.startswith(b'tr'):  # trancelucency
                        context_material.translucency = float_func(line_split[1])
                    elif line_lower.startswith(b'tf'):
                        # rgb, filter color, blender has no support for this.
                        pass
                    elif line_lower.startswith(b'illum'):
                        illum = int(line_split[1])

                        do_ambient = True
                        do_highlight = False
                        do_reflection = False
                        do_transparency = False
                        do_glass = False
                        do_fresnel = False
                        do_raytrace = False

                        # inline comments are from the spec, v4.2
                        if illum == 0:
                            # Color on and Ambient off
                            do_ambient = False
                        elif illum == 1:
                            # Color on and Ambient on
                            pass
                        elif illum == 2:
                            # Highlight on
                            do_highlight = True
                        elif illum == 3:
                            # Reflection on and Ray trace on
                            do_reflection = True
                            do_raytrace = True
                        elif illum == 4:
                            # Transparency: Glass on
                            # Reflection: Ray trace on
                            do_transparency = True
                            do_reflection = True
                            do_glass = True
                            do_raytrace = True
                        elif illum == 5:
                            # Reflection: Fresnel on and Ray trace on
                            do_reflection = True
                            do_fresnel = True
                            do_raytrace = True
                        elif illum == 6:
                            # Transparency: Refraction on
                            # Reflection: Fresnel off and Ray trace on
                            do_transparency = True
                            do_reflection = True
                            do_raytrace = True
                        elif illum == 7:
                            # Transparency: Refraction on
                            # Reflection: Fresnel on and Ray trace on
                            do_transparency = True
                            do_reflection = True
                            do_fresnel = True
                            do_raytrace = True
                        elif illum == 8:
                            # Reflection on and Ray trace off
                            do_reflection = True
                        elif illum == 9:
                            # Transparency: Glass on
                            # Reflection: Ray trace off
                            do_transparency = True
                            do_reflection = True
                            do_glass = True
                        elif illum == 10:
                            # Casts shadows onto invisible surfaces

                            # blender cant do this
                            pass

                        if do_ambient:
                            context_material.ambient = 1.0
                        else:
                            context_material.ambient = 0.0

                        if do_highlight:
                            # FIXME, how else to use this?
                            context_material.specular_intensity = 1.0

                        if do_reflection:
                            context_material.raytrace_mirror.use = True
                            context_material.raytrace_mirror.reflect_factor = 1.0

                        if do_transparency:
                            context_material.use_transparency = True
                            context_material.transparency_method = 'RAYTRACE' if do_raytrace else 'Z_TRANSPARENCY'
                            if "alpha" not in context_material_vars:
                                context_material.alpha = 0.0

                        if do_glass:
                            if "ior" not in context_material_vars:
                                context_material.raytrace_transparency.ior = 1.5

                        if do_fresnel:
                            context_material.raytrace_mirror.fresnel = 1.0  # could be any value for 'ON'

                        """
                        if do_raytrace:
                            context_material.use_raytrace = True
                        else:
                            context_material.use_raytrace = False
                        """
                        # XXX, this is not following the OBJ spec, but this was
                        # written when raytracing wasnt default, annoying to disable for blender users.
                        context_material.use_raytrace = True

                    elif line_lower.startswith(b'map_ka'):
                        img_filepath = line_value(line.split())
                        if img_filepath:
                            load_material_image(context_material, context_material_name, img_filepath, 'Ka')
                    elif line_lower.startswith(b'map_ks'):
                        img_filepath = line_value(line.split())
                        if img_filepath:
                            load_material_image(context_material, context_material_name, img_filepath, 'Ks')
                    elif line_lower.startswith(b'map_kd'):
                        img_filepath = line_value(line.split())
                        if img_filepath:
                            load_material_image(context_material, context_material_name, img_filepath, 'Kd')
                    elif line_lower.startswith((b'map_bump', b'bump')):  # 'bump' is incorrect but some files use it.
                        img_filepath = line_value(line.split())
                        if img_filepath:
                            load_material_image(context_material, context_material_name, img_filepath, 'Bump')
                    elif line_lower.startswith((b'map_d', b'map_tr')):  # Alpha map - Dissolve
                        img_filepath = line_value(line.split())
                        if img_filepath:
                            load_material_image(context_material, context_material_name, img_filepath, 'D')

                    elif line_lower.startswith((b'map_disp', b'disp')):  # reflectionmap
                        img_filepath = line_value(line.split())
                        if img_filepath:
                            load_material_image(context_material, context_material_name, img_filepath, 'disp')

                    elif line_lower.startswith((b'map_refl', b'refl')):  # reflectionmap
                        img_filepath = line_value(line.split())
                        if img_filepath:
                            load_material_image(context_material, context_material_name, img_filepath, 'refl')
                    else:
                        print("\t%r:%r (ignored)" % (filepath, line))
            mtl.close()

Example 29

Project: PokemonGo-Map Source File: utils.py
Function: get_args
@memoize
def get_args():
    # pre-check to see if the -cf or --config flag is used on the command line
    # if not, we'll use the env var or default value.  this prevents layering of
    # config files, and handles missing config.ini as well
    defaultconfigfiles = []
    if '-cf' not in sys.argv and '--config' not in sys.argv:
        defaultconfigfiles = [os.getenv('POGOMAP_CONFIG', os.path.join(os.path.dirname(__file__), '../config/config.ini'))]
    parser = configargparse.ArgParser(default_config_files=defaultconfigfiles, auto_env_var_prefix='POGOMAP_')
    parser.add_argument('-cf', '--config', is_config_file=True, help='Configuration file')
    parser.add_argument('-a', '--auth-service', type=str.lower, action='append', default=[],
                        help='Auth Services, either one for all accounts or one per account: ptc or google. Defaults all to ptc.')
    parser.add_argument('-u', '--username', action='append', default=[],
                        help='Usernames, one per account.')
    parser.add_argument('-p', '--password', action='append', default=[],
                        help='Passwords, either single one for all accounts or one per account.')
    parser.add_argument('-w', '--workers', type=int,
                        help='Number of search worker threads to start. Defaults to the number of accounts specified.')
    parser.add_argument('-asi', '--account-search-interval', type=int, default=0,
                        help='Seconds for accounts to search before switching to a new account. 0 to disable.')
    parser.add_argument('-ari', '--account-rest-interval', type=int, default=7200,
                        help='Seconds for accounts to rest when they fail or are switched out')
    parser.add_argument('-ac', '--accountcsv',
                        help='Load accounts from CSV file containing "auth_service,username,passwd" lines')
    parser.add_argument('-l', '--location', type=parse_unicode,
                        help='Location, can be an address or coordinates')
    parser.add_argument('-j', '--jitter', help='Apply random -9m to +9m jitter to location',
                        action='store_true', default=False)
    parser.add_argument('-st', '--step-limit', help='Steps', type=int,
                        default=12)
    parser.add_argument('-sd', '--scan-delay',
                        help='Time delay between requests in scan threads',
                        type=float, default=10)
    parser.add_argument('-enc', '--encounter',
                        help='Start an encounter to gather IVs and moves',
                        action='store_true', default=False)
    parser.add_argument('-ed', '--encounter-delay',
                        help='Time delay between encounter pokemon in scan threads',
                        type=float, default=1)
    encounter_list = parser.add_mutually_exclusive_group()
    encounter_list.add_argument('-ewht', '--encounter-whitelist', action='append', default=[],
                                help='List of pokemon to encounter for more stats')
    encounter_list.add_argument('-eblk', '--encounter-blacklist', action='append', default=[],
                                help='List of pokemon to NOT encounter for more stats')
    parser.add_argument('-ld', '--login-delay',
                        help='Time delay between each login attempt',
                        type=float, default=5)
    parser.add_argument('-lr', '--login-retries',
                        help='Number of logins attempts before refreshing a thread',
                        type=int, default=3)
    parser.add_argument('-mf', '--max-failures',
                        help='Maximum number of failures to parse locations before an account will go into a two hour sleep',
                        type=int, default=5)
    parser.add_argument('-msl', '--min-seconds-left',
                        help='Time that must be left on a spawn before considering it too late and skipping it. eg. 600 would skip anything with < 10 minutes remaining. Default 0.',
                        type=int, default=0)
    parser.add_argument('-dc', '--display-in-console',
                        help='Display Found Pokemon in Console',
                        action='store_true', default=False)
    parser.add_argument('-H', '--host', help='Set web server listening host',
                        default='127.0.0.1')
    parser.add_argument('-P', '--port', type=int,
                        help='Set web server listening port', default=5000)
    parser.add_argument('-L', '--locale',
                        help='Locale for Pokemon names (default: {},\
                        check {} for more)'.
                        format(config['LOCALE'], config['LOCALES_DIR']), default='en')
    parser.add_argument('-c', '--china',
                        help='Coordinates transformer for China',
                        action='store_true')
    parser.add_argument('-m', '--mock', type=str,
                        help='Mock mode - point to a fpgo endpoint instead of using the real PogoApi, ec: http://127.0.0.1:9090',
                        default='')
    parser.add_argument('-ns', '--no-server',
                        help='No-Server Mode. Starts the searcher but not the Webserver.',
                        action='store_true', default=False)
    parser.add_argument('-os', '--only-server',
                        help='Server-Only Mode. Starts only the Webserver without the searcher.',
                        action='store_true', default=False)
    parser.add_argument('-nsc', '--no-search-control',
                        help='Disables search control',
                        action='store_false', dest='search_control', default=True)
    parser.add_argument('-fl', '--fixed-location',
                        help='Hides the search bar for use in shared maps.',
                        action='store_true', default=False)
    parser.add_argument('-k', '--gmaps-key',
                        help='Google Maps Javascript API Key',
                        required=True)
    parser.add_argument('--skip-empty', help='Enables skipping of empty cells  in normal scans - requires previously populated database (not to be used with -ss)',
                        action='store_true', default=False)
    parser.add_argument('-C', '--cors', help='Enable CORS on web server',
                        action='store_true', default=False)
    parser.add_argument('-D', '--db', help='Database filename',
                        default='pogom.db')
    parser.add_argument('-cd', '--clear-db',
                        help='Deletes the existing database before starting the Webserver.',
                        action='store_true', default=False)
    parser.add_argument('-np', '--no-pokemon',
                        help='Disables Pokemon from the map (including parsing them into local db)',
                        action='store_true', default=False)
    parser.add_argument('-ng', '--no-gyms',
                        help='Disables Gyms from the map (including parsing them into local db)',
                        action='store_true', default=False)
    parser.add_argument('-nk', '--no-pokestops',
                        help='Disables PokeStops from the map (including parsing them into local db)',
                        action='store_true', default=False)
    parser.add_argument('-ss', '--spawnpoint-scanning',
                        help='Use spawnpoint scanning (instead of hex grid). Scans in a circle based on step_limit when on DB', nargs='?', const='nofile', default=False)
    parser.add_argument('--dump-spawnpoints', help='dump the spawnpoints from the db to json (only for use with -ss)',
                        action='store_true', default=False)
    parser.add_argument('-pd', '--purge-data',
                        help='Clear pokemon from database this many hours after they disappear \
                        (0 to disable)', type=int, default=0)
    parser.add_argument('-px', '--proxy', help='Proxy url (e.g. socks5://127.0.0.1:9050)', action='append')
    parser.add_argument('-pxsc', '--proxy-skip-check', help='Disable checking of proxies before start', action='store_true', default=False)
    parser.add_argument('-pxt', '--proxy-timeout', help='Timeout settings for proxy checker in seconds ', type=int, default=5)
    parser.add_argument('-pxd', '--proxy-display', help='Display info on which proxy beeing used (index or full) To be used with -ps', type=str, default='index')
    parser.add_argument('--db-type', help='Type of database to be used (default: sqlite)',
                        default='sqlite')
    parser.add_argument('--db-name', help='Name of the database to be used')
    parser.add_argument('--db-user', help='Username for the database')
    parser.add_argument('--db-pass', help='Password for the database')
    parser.add_argument('--db-host', help='IP or hostname for the database')
    parser.add_argument('--db-port', help='Port for the database', type=int, default=3306)
    parser.add_argument('--db-max_connections', help='Max connections (per thread) for the database',
                        type=int, default=5)
    parser.add_argument('--db-threads', help='Number of db threads; increase if the db queue falls behind',
                        type=int, default=1)
    parser.add_argument('-wh', '--webhook', help='Define URL(s) to POST webhook information to',
                        nargs='*', default=False, dest='webhooks')
    parser.add_argument('-gi', '--gym-info', help='Get all details about gyms (causes an additional API hit for every gym)',
                        action='store_true', default=False)
    parser.add_argument('--disable-clean', help='Disable clean db loop',
                        action='store_true', default=False)
    parser.add_argument('--webhook-updates-only', help='Only send updates (pokémon & lured pokéstops)',
                        action='store_true', default=False)
    parser.add_argument('--wh-threads', help='Number of webhook threads; increase if the webhook queue falls behind',
                        type=int, default=1)
    parser.add_argument('--ssl-certificate', help='Path to SSL certificate file')
    parser.add_argument('--ssl-privatekey', help='Path to SSL private key file')
    parser.add_argument('-ps', '--print-status', action='store_true',
                        help='Show a status screen instead of log messages. Can switch between status and logs by pressing enter.', default=False)
    parser.add_argument('-sn', '--status-name', default=None,
                        help='Enable status page database update using STATUS_NAME as main worker name')
    parser.add_argument('-spp', '--status-page-password', default=None,
                        help='Set the status page password')
    parser.add_argument('-el', '--encrypt-lib', help='Path to encrypt lib to be used instead of the shipped ones')
    parser.add_argument('-odt', '--on-demand_timeout', help='Pause searching while web UI is inactive for this timeout(in seconds)', type=int, default=0)
    verbosity = parser.add_mutually_exclusive_group()
    verbosity.add_argument('-v', '--verbose', help='Show debug messages from PomemonGo-Map and pgoapi. Optionally specify file to log to.', nargs='?', const='nofile', default=False, metavar='filename.log')
    verbosity.add_argument('-vv', '--very-verbose', help='Like verbose, but show debug messages from all modules as well.  Optionally specify file to log to.', nargs='?', const='nofile', default=False, metavar='filename.log')
    parser.set_defaults(DEBUG=False)

    args = parser.parse_args()

    if args.only_server:
        if args.location is None:
            parser.print_usage()
            print(sys.argv[0] + ": error: arguments -l/--location is required")
            sys.exit(1)
    else:
        # If using a CSV file, add the data where needed into the username,password and auth_service arguments.
        # CSV file should have lines like "ptc,username,password", "username,password" or "username".
        if args.accountcsv is not None:
            # Giving num_fields something it would usually not get
            num_fields = -1
            with open(args.accountcsv, 'r') as f:
                for num, line in enumerate(f, 1):

                    fields = []

                    # First time around populate num_fields with current field count.
                    if num_fields < 0:
                        num_fields = line.count(',') + 1

                    csv_input = []
                    csv_input.append('')
                    csv_input.append('<username>')
                    csv_input.append('<username>,<password>')
                    csv_input.append('<ptc/google>,<username>,<password>')

                    # If the number of fields is differend this is not a CSV
                    if num_fields != line.count(',') + 1:
                        print(sys.argv[0] + ": Error parsing CSV file on line " + str(num) + ". Your file started with the following input, '" + csv_input[num_fields] + "' but now you gave us '" + csv_input[line.count(',') + 1] + "'.")
                        sys.exit(1)

                    field_error = ''
                    line = line.strip()

                    # Ignore blank lines and comment lines
                    if len(line) == 0 or line.startswith('#'):
                        continue

                    # If number of fields is more than 1 split the line into fields and strip them
                    if num_fields > 1:
                        fields = line.split(",")
                        fields = map(str.strip, fields)

                    # If the number of fields is one then assume this is "username". As requested..
                    if num_fields == 1:
                        # Empty lines are already ignored.
                        args.username.append(line)

                    # If the number of fields is two then assume this is "username,password". As requested..
                    if num_fields == 2:
                        # If field length is not longer then 0 something is wrong!
                        if len(fields[0]) > 0:
                            args.username.append(fields[0])
                        else:
                            field_error = 'username'

                        # If field length is not longer then 0 something is wrong!
                        if len(fields[1]) > 0:
                            args.password.append(fields[1])
                        else:
                            field_error = 'password'

                    # If the number of fields is three then assume this is "ptc,username,password". As requested..
                    if num_fields == 3:
                        # If field 0 is not ptc or google something is wrong!
                        if fields[0].lower() == 'ptc' or fields[0].lower() == 'google':
                            args.auth_service.append(fields[0])
                        else:
                            field_error = 'method'

                        # If field length is not longer then 0 something is wrong!
                        if len(fields[1]) > 0:
                            args.username.append(fields[1])
                        else:
                            field_error = 'username'

                        # If field length is not longer then 0 something is wrong!
                        if len(fields[2]) > 0:
                            args.password.append(fields[2])
                        else:
                            field_error = 'password'

                    if num_fields > 3:
                        print 'Too many fields in accounts file: max supported are 3 fields. Found {} fields'.format(num_fields)
                        sys.exit(1)

                    # If something is wrong display error.
                    if field_error != '':
                        type_error = 'empty!'
                        if field_error == 'method':
                            type_error = 'not ptc or google instead we got \'' + fields[0] + '\'!'
                        print(sys.argv[0] + ": Error parsing CSV file on line " + str(num) + ". We found " + str(num_fields) + " fields, so your input should have looked like '" + csv_input[num_fields] + "'\nBut you gave us '" + line + "', your " + field_error + " was " + type_error)
                        sys.exit(1)

        errors = []

        num_auths = len(args.auth_service)
        num_usernames = 0
        num_passwords = 0

        if len(args.username) == 0:
            errors.append('Missing `username` either as -u/--username, csv file using -ac, or in config')
        else:
            num_usernames = len(args.username)

        if args.location is None:
            errors.append('Missing `location` either as -l/--location or in config')

        if len(args.password) == 0:
            errors.append('Missing `password` either as -p/--password, csv file, or in config')
        else:
            num_passwords = len(args.password)

        if args.step_limit is None:
            errors.append('Missing `step_limit` either as -st/--step-limit or in config')

        if num_auths == 0:
            args.auth_service = ['ptc']

        num_auths = len(args.auth_service)

        if num_usernames > 1:
            if num_passwords > 1 and num_usernames != num_passwords:
                errors.append('The number of provided passwords ({}) must match the username count ({})'.format(num_passwords, num_usernames))
            if num_auths > 1 and num_usernames != num_auths:
                errors.append('The number of provided auth ({}) must match the username count ({})'.format(num_auths, num_usernames))

        if len(errors) > 0:
            parser.print_usage()
            print(sys.argv[0] + ": errors: \n - " + "\n - ".join(errors))
            sys.exit(1)

        # Fill the pass/auth if set to a single value
        if num_passwords == 1:
            args.password = [args.password[0]] * num_usernames
        if num_auths == 1:
            args.auth_service = [args.auth_service[0]] * num_usernames

        # Make our accounts list
        args.accounts = []

        # Make the accounts list
        for i, username in enumerate(args.username):
            args.accounts.append({'username': username, 'password': args.password[i], 'auth_service': args.auth_service[i]})

        # Make max workers equal number of accounts if unspecified, and disable account switching
        if args.workers is None:
            args.workers = len(args.accounts)
            args.account_search_interval = None

        # Disable search interval if 0 specified
        if args.account_search_interval == 0:
            args.account_search_interval = None

        # Make sure we don't have an empty account list after adding command line and CSV accounts
        if len(args.accounts) == 0:
            print(sys.argv[0] + ": Error: no accounts specified. Use -a, -u, and -p or --accountcsv to add accounts")
            sys.exit(1)

        args.encounter_blacklist = [int(i) for i in args.encounter_blacklist]
        args.encounter_whitelist = [int(i) for i in args.encounter_whitelist]

        # Decide which scanning mode to use
        if args.spawnpoint_scanning:
            args.scheduler = 'SpawnScan'
        elif args.skip_empty:
            args.scheduler = 'HexSearchSpawnpoint'
        else:
            args.scheduler = 'HexSearch'

    return args

Example 30

Project: tp-libvirt Source File: virsh_dump.py
Function: run
def run(test, params, env):
    """
    Test command: virsh dump.

    This command can dump the core of a domain to a file for analysis.
    1. Positive testing
        1.1 Dump domain with valid options.
        1.2 Avoid file system cache when dumping.
        1.3 Compress the dump images to valid/invalid formats.
    2. Negative testing
        2.1 Dump domain to a non-exist directory.
        2.2 Dump domain with invalid option.
        2.3 Dump a shut-off domain.
    """

    vm_name = params.get("main_vm", "vm1")
    vm = env.get_vm(vm_name)
    options = params.get("dump_options")
    dump_file = params.get("dump_file", "vm.core")
    dump_dir = params.get("dump_dir", data_dir.get_tmp_dir())
    if os.path.dirname(dump_file) is "":
        dump_file = os.path.join(dump_dir, dump_file)
    dump_image_format = params.get("dump_image_format")
    start_vm = params.get("start_vm") == "yes"
    paused_after_start_vm = params.get("paused_after_start_vm") == "yes"
    status_error = params.get("status_error", "no") == "yes"
    timeout = int(params.get("check_pid_timeout", "5"))
    memory_dump_format = params.get("memory_dump_format", "")
    uri = params.get("virsh_uri")
    unprivileged_user = params.get('unprivileged_user')
    if unprivileged_user:
        if unprivileged_user.count('EXAMPLE'):
            unprivileged_user = 'testacl'

    if not libvirt_version.version_compare(1, 1, 1):
        if params.get('setup_libvirt_polkit') == 'yes':
            raise error.TestNAError("API acl test not supported in current"
                                    " libvirt version.")

    def check_domstate(actual, options):
        """
        Check the domain status according to dump options.
        """

        if options.find('live') >= 0:
            domstate = "running"
            if options.find('crash') >= 0 or options.find('reset') > 0:
                domstate = "running"
            if paused_after_start_vm:
                domstate = "paused"
        elif options.find('crash') >= 0:
            domstate = "shut off"
            if options.find('reset') >= 0:
                domstate = "running"
        elif options.find('reset') >= 0:
            domstate = "running"
            if paused_after_start_vm:
                domstate = "paused"
        else:
            domstate = "running"
            if paused_after_start_vm:
                domstate = "paused"

        if not start_vm:
            domstate = "shut off"

        logging.debug("Domain should %s after run dump %s", domstate, options)

        return (domstate == actual)

    def check_dump_format(dump_image_format, dump_file):
        """
        Check the format of dumped file.

        If 'dump_image_format' is not specified or invalid in qemu.conf, then
        the file shoule be normal raw file, otherwise it shoud be compress to
        specified format, the supported compress format including: lzop, gzip,
        bzip2, and xz.
        For memory-only dump, the default dump format is ELF, and it can also
        specify format by --format option, the result could be 'elf' or 'data'.
        """

        valid_format = ["lzop", "gzip", "bzip2", "xz", 'elf', 'data']
        if len(dump_image_format) == 0 or dump_image_format not in valid_format:
            logging.debug("No need check the dumped file format")
            return True
        else:
            file_cmd = "file %s" % dump_file
            (status, output) = commands.getstatusoutput(file_cmd)
            if status:
                logging.error("Fail to check dumped file %s", dump_file)
                return False
            logging.debug("Run file %s output: %s", dump_file, output)
            actual_format = output.split(" ")[1]
            if actual_format.lower() != dump_image_format.lower():
                logging.error("Compress dumped file to %s fail: %s" %
                              (dump_image_format, actual_format))
                return False
            else:
                return True

    # Configure dump_image_format in /etc/libvirt/qemu.conf.
    qemu_config = utils_config.LibvirtQemuConfig()
    libvirtd = utils_libvirtd.Libvirtd()
    if len(dump_image_format):
        qemu_config.dump_image_format = dump_image_format
        libvirtd.restart()

    # Deal with bypass-cache option
    child_pid = 0
    if options.find('bypass-cache') >= 0:
        pid = os.fork()
        if pid:
            # Guarantee check_bypass function has run before dump
            child_pid = pid
            try:
                wait_pid_active(pid, timeout)
            finally:
                os.kill(child_pid, signal.SIGTERM)
        else:
            check_bypass(dump_file)
            # Wait for parent process kills us
            while True:
                time.sleep(1)

    # Deal with memory-only dump format
    if len(memory_dump_format):
        # Make sure libvirt support this option
        if virsh.has_command_help_match("dump", "--format") is None:
            raise error.TestNAError("Current libvirt version doesn't support"
                                    " --format option for dump command")
        # Make sure QEMU support this format
        query_cmd = '{"execute":"query-dump-guest-memory-capability"}'
        qemu_capa = virsh.qemu_monitor_command(vm_name, query_cmd).stdout
        if (memory_dump_format not in qemu_capa) and not status_error:
            raise error.TestNAError("Unsupported dump format '%s' for"
                                    " this QEMU binary" % memory_dump_format)
        options += " --format %s" % memory_dump_format
        if memory_dump_format == 'elf':
            dump_image_format = 'elf'
        if memory_dump_format in ['kdump-zlib', 'kdump-lzo', 'kdump-snappy']:
            dump_image_format = 'data'

    # Back up xml file
    vmxml = vm_xml.VMXML.new_from_inactive_dumpxml(vm_name)
    backup_xml = vmxml.copy()

    dump_guest_core = params.get("dump_guest_core", "")
    if dump_guest_core not in ["", "on", "off"]:
        raise error.TestError("invalid dumpCore value: %s" % dump_guest_core)
    try:
        # Set dumpCore in guest xml
        if dump_guest_core:
            if vm.is_alive():
                vm.destroy(gracefully=False)
            vmxml.dumpcore = dump_guest_core
            vmxml.sync()
            vm.start()
            # check qemu-kvm cmdline
            vm_pid = vm.get_pid()
            cmd = "cat /proc/%d/cmdline|xargs -0 echo" % vm_pid
            cmd += "|grep dump-guest-core=%s" % dump_guest_core
            result = utils.run(cmd, ignore_status=True)
            logging.debug("cmdline: %s" % result.stdout)
            if result.exit_status:
                error.TestFail("Not find dump-guest-core=%s in qemu cmdline"
                               % dump_guest_core)
            else:
                logging.info("Find dump-guest-core=%s in qemum cmdline",
                             dump_guest_core)

        # Run virsh command
        cmd_result = virsh.dump(vm_name, dump_file, options,
                                unprivileged_user=unprivileged_user,
                                uri=uri,
                                ignore_status=True, debug=True)
        status = cmd_result.exit_status

        logging.info("Start check result")
        if not check_domstate(vm.state(), options):
            raise error.TestFail("Domain status check fail.")
        if status_error:
            if not status:
                raise error.TestFail("Expect fail, but run successfully")
        else:
            if status:
                raise error.TestFail("Expect succeed, but run fail")
            if not os.path.exists(dump_file):
                raise error.TestFail("Fail to find domain dumped file.")
            if check_dump_format(dump_image_format, dump_file):
                logging.info("Successfully dump domain to %s", dump_file)
            else:
                raise error.TestFail("The format of dumped file is wrong.")
    finally:
        if child_pid:
            os.kill(child_pid, signal.SIGTERM)
        if os.path.isfile(dump_file):
            os.remove(dump_file)
        if vm.is_alive():
            vm.destroy(gracefully=False)
        backup_xml.sync()
        qemu_config.restore()
        libvirtd.restart()

Example 31

Project: kerncraft Source File: lex.py
def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab',
        reflags=0, nowarn=False, outputdir=None, debuglog=None, errorlog=None):

    if lextab is None:
        lextab = 'lextab'

    global lexer

    ldict = None
    stateinfo  = {'INITIAL': 'inclusive'}
    lexobj = Lexer()
    lexobj.lexoptimize = optimize
    global token, input

    if errorlog is None:
        errorlog = PlyLogger(sys.stderr)

    if debug:
        if debuglog is None:
            debuglog = PlyLogger(sys.stderr)

    # Get the module dictionary used for the lexer
    if object:
        module = object

    # Get the module dictionary used for the parser
    if module:
        _items = [(k, getattr(module, k)) for k in dir(module)]
        ldict = dict(_items)
        # If no __file__ attribute is available, try to obtain it from the __module__ instead
        if '__file__' not in ldict:
            ldict['__file__'] = sys.modules[ldict['__module__']].__file__
    else:
        ldict = get_caller_module_dict(2)

    # Determine if the module is package of a package or not.
    # If so, fix the tabmodule setting so that tables load correctly
    pkg = ldict.get('__package__')
    if pkg and isinstance(lextab, str):
        if '.' not in lextab:
            lextab = pkg + '.' + lextab

    # Collect parser information from the dictionary
    linfo = LexerReflect(ldict, log=errorlog, reflags=reflags)
    linfo.get_all()
    if not optimize:
        if linfo.validate_all():
            raise SyntaxError("Can't build lexer")

    if optimize and lextab:
        try:
            lexobj.readtab(lextab, ldict)
            token = lexobj.token
            input = lexobj.input
            lexer = lexobj
            return lexobj

        except ImportError:
            pass

    # Dump some basic debugging information
    if debug:
        debuglog.info('lex: tokens   = %r', linfo.tokens)
        debuglog.info('lex: literals = %r', linfo.literals)
        debuglog.info('lex: states   = %r', linfo.stateinfo)

    # Build a dictionary of valid token names
    lexobj.lextokens = set()
    for n in linfo.tokens:
        lexobj.lextokens.add(n)

    # Get literals specification
    if isinstance(linfo.literals, (list, tuple)):
        lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
    else:
        lexobj.lexliterals = linfo.literals

    lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals)

    # Get the stateinfo dictionary
    stateinfo = linfo.stateinfo

    regexs = {}
    # Build the master regular expressions
    for state in stateinfo:
        regex_list = []

        # Add rules defined by functions first
        for fname, f in linfo.funcsym[state]:
            line = f.__code__.co_firstlineno
            file = f.__code__.co_filename
            regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f)))
            if debug:
                debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state)

        # Now add all of the simple rules
        for name, r in linfo.strsym[state]:
            regex_list.append('(?P<%s>%s)' % (name, r))
            if debug:
                debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", name, r, state)

        regexs[state] = regex_list

    # Build the master regular expressions

    if debug:
        debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====')

    for state in regexs:
        lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames)
        lexobj.lexstatere[state] = lexre
        lexobj.lexstateretext[state] = re_text
        lexobj.lexstaterenames[state] = re_names
        if debug:
            for i, text in enumerate(re_text):
                debuglog.info("lex: state '%s' : regex[%d] = '%s'", state, i, text)

    # For inclusive states, we need to add the regular expressions from the INITIAL state
    for state, stype in stateinfo.items():
        if state != 'INITIAL' and stype == 'inclusive':
            lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
            lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
            lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])

    lexobj.lexstateinfo = stateinfo
    lexobj.lexre = lexobj.lexstatere['INITIAL']
    lexobj.lexretext = lexobj.lexstateretext['INITIAL']
    lexobj.lexreflags = reflags

    # Set up ignore variables
    lexobj.lexstateignore = linfo.ignore
    lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '')

    # Set up error functions
    lexobj.lexstateerrorf = linfo.errorf
    lexobj.lexerrorf = linfo.errorf.get('INITIAL', None)
    if not lexobj.lexerrorf:
        errorlog.warning('No t_error rule is defined')

    # Set up eof functions
    lexobj.lexstateeoff = linfo.eoff
    lexobj.lexeoff = linfo.eoff.get('INITIAL', None)

    # Check state information for ignore and error rules
    for s, stype in stateinfo.items():
        if stype == 'exclusive':
            if s not in linfo.errorf:
                errorlog.warning("No error rule is defined for exclusive state '%s'", s)
            if s not in linfo.ignore and lexobj.lexignore:
                errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
        elif stype == 'inclusive':
            if s not in linfo.errorf:
                linfo.errorf[s] = linfo.errorf.get('INITIAL', None)
            if s not in linfo.ignore:
                linfo.ignore[s] = linfo.ignore.get('INITIAL', '')

    # Create global versions of the token() and input() functions
    token = lexobj.token
    input = lexobj.input
    lexer = lexobj

    # If in optimize mode, we write the lextab
    if lextab and optimize:
        if outputdir is None:
            # If no output directory is set, the location of the output files
            # is determined according to the following rules:
            #     - If lextab specifies a package, files go into that package directory
            #     - Otherwise, files go in the same directory as the specifying module
            if isinstance(lextab, types.ModuleType):
                srcfile = lextab.__file__
            else:
                if '.' not in lextab:
                    srcfile = ldict['__file__']
                else:
                    parts = lextab.split('.')
                    pkgname = '.'.join(parts[:-1])
                    exec('import %s' % pkgname)
                    srcfile = getattr(sys.modules[pkgname], '__file__', '')
            outputdir = os.path.dirname(srcfile)
        try:
            lexobj.writetab(lextab, outputdir)
        except IOError as e:
            errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e))

    return lexobj

Example 32

Project: tp-libvirt Source File: virsh_volume.py
def run(test, params, env):
    """
    1. Create a pool
    2. Create n number of volumes(vol-create-as)
    3. Check the volume details from the following commands
       vol-info
       vol-key
       vol-list
       vol-name
       vol-path
       vol-pool
       qemu-img info
    4. Delete the volume and check in vol-list
    5. Repeat the steps for number of volumes given
    6. Delete the pool and target
    TODO: Handle negative testcases
    """

    def delete_volume(expected_vol):
        """
        Deletes Volume
        """
        pool_name = expected_vol['pool_name']
        vol_name = expected_vol['name']
        pv = libvirt_storage.PoolVolume(pool_name)
        if not pv.delete_volume(vol_name):
            raise error.TestFail("Delete volume failed." % vol_name)
        else:
            logging.debug("Volume: %s successfully deleted on pool: %s",
                          vol_name, pool_name)

    def get_vol_list(pool_name, vol_name):
        """
        Parse the volume list
        """
        output = virsh.vol_list(pool_name, "--details")
        rg = re.compile(
            r'^(\S+)\s+(\S+)\s+(\S+)\s+(\d+.\d+\s\S+)\s+(\d+.\d+.*)')
        vol = {}
        vols = []
        volume_detail = None
        for line in output.stdout.splitlines():
            match = re.search(rg, line.lstrip())
            if match is not None:
                vol['name'] = match.group(1)
                vol['path'] = match.group(2)
                vol['type'] = match.group(3)
                vol['capacity'] = match.group(4)
                vol['allocation'] = match.group(5)
                vols.append(vol)
                vol = {}
        for volume in vols:
            if volume['name'] == vol_name:
                volume_detail = volume
        return volume_detail

    def norm_capacity(capacity):
        """
        Normalize the capacity values to bytes
        """
        # Normaize all values to bytes
        norm_capacity = {}
        des = {'B': 'B', 'bytes': 'B', 'b': 'B', 'kib': 'K',
               'KiB': 'K', 'K': 'K', 'k': 'K', 'KB': 'K',
               'mib': 'M', 'MiB': 'M', 'M': 'M', 'm': 'M',
               'MB': 'M', 'gib': 'G', 'GiB': 'G', 'G': 'G',
               'g': 'G', 'GB': 'G', 'Gb': 'G', 'tib': 'T',
               'TiB': 'T', 'TB': 'T', 'T': 'T', 't': 'T'
               }
        val = {'B': 1,
               'K': 1024,
               'M': 1048576,
               'G': 1073741824,
               'T': 1099511627776
               }

        reg_list = re.compile(r'(\S+)\s(\S+)')
        match_list = re.search(reg_list, capacity['list'])
        if match_list is not None:
            mem_value = float(match_list.group(1))
            norm = val[des[match_list.group(2)]]
            norm_capacity['list'] = int(mem_value * norm)
        else:
            raise error.TestFail("Error in parsing capacity value in"
                                 " virsh vol-list")

        match_info = re.search(reg_list, capacity['info'])
        if match_info is not None:
            mem_value = float(match_info.group(1))
            norm = val[des[match_list.group(2)]]
            norm_capacity['info'] = int(mem_value * norm)
        else:
            raise error.TestFail("Error in parsing capacity value "
                                 "in virsh vol-info")

        norm_capacity['qemu_img'] = capacity['qemu_img']
        norm_capacity['xml'] = int(capacity['xml'])

        return norm_capacity

    def check_vol(expected, avail=True):
        """
        Checks the expected volume details with actual volume details from
        vol-dumpxml
        vol-list
        vol-info
        vol-key
        vol-path
        qemu-img info
        """
        error_count = 0

        pv = libvirt_storage.PoolVolume(expected['pool_name'])
        vol_exists = pv.volume_exists(expected['name'])
        if vol_exists:
            if not avail:
                error_count += 1
                logging.error("Expect volume %s not exists but find it",
                              expected['name'])
                return error_count
        else:
            if avail:
                error_count += 1
                logging.error("Expect volume %s exists but not find it",
                              expected['name'])
                return error_count
            else:
                logging.info("Volume %s checked successfully for deletion",
                             expected['name'])
                return error_count

        actual_list = get_vol_list(expected['pool_name'], expected['name'])
        actual_info = pv.volume_info(expected['name'])
        # Get values from vol-dumpxml
        volume_xml = vol_xml.VolXML.new_from_vol_dumpxml(expected['name'],
                                                         expected['pool_name'])

        # Check against virsh vol-key
        vol_key = virsh.vol_key(expected['name'], expected['pool_name'])
        if vol_key.stdout.strip() != volume_xml.key:
            logging.error("Volume key is mismatch \n%s"
                          "Key from xml: %s\nKey from command: %s",
                          expected['name'], volume_xml.key, vol_key)
            error_count += 1
        else:
            logging.debug("virsh vol-key for volume: %s successfully"
                          " checked against vol-dumpxml", expected['name'])

        # Check against virsh vol-name
        get_vol_name = virsh.vol_name(expected['path'])
        if get_vol_name.stdout.strip() != expected['name']:
            logging.error("Volume name mismatch\n"
                          "Expected name: %s\nOutput of vol-name: %s",
                          expected['name'], get_vol_name)

        # Check against virsh vol-path
        vol_path = virsh.vol_path(expected['name'], expected['pool_name'])
        if expected['path'] != vol_path.stdout.strip():
            logging.error("Volume path mismatch for volume: %s\n"
                          "Expected path: %s\nOutput of vol-path: %s\n",
                          expected['name'],
                          expected['path'], vol_path)
            error_count += 1
        else:
            logging.debug("virsh vol-path for volume: %s successfully checked"
                          " against created volume path", expected['name'])

        # Check path against virsh vol-list
        if expected['path'] != actual_list['path']:
            logging.error("Volume path mismatch for volume:%s\n"
                          "Expected Path: %s\nPath from virsh vol-list: %s",
                          expected['name'], expected['path'],
                          actual_list['path'])
            error_count += 1
        else:
            logging.debug("Path of volume: %s from virsh vol-list "
                          "successfully checked against created "
                          "volume path", expected['name'])

        # Check path against virsh vol-dumpxml
        if expected['path'] != volume_xml.path:
            logging.error("Volume path mismatch for volume: %s\n"
                          "Expected Path: %s\nPath from virsh vol-dumpxml: %s",
                          expected['name'], expected['path'], volume_xml.path)
            error_count += 1

        else:
            logging.debug("Path of volume: %s from virsh vol-dumpxml "
                          "successfully checked against created volume path",
                          expected['name'])

        # Check type against virsh vol-list
        if expected['type'] != actual_list['type']:
            logging.error("Volume type mismatch for volume: %s\n"
                          "Expected Type: %s\n Type from vol-list: %s",
                          expected['name'], expected['type'],
                          actual_list['type'])
            error_count += 1
        else:
            logging.debug("Type of volume: %s from virsh vol-list "
                          "successfully checked against the created "
                          "volume type", expected['name'])

        # Check type against virsh vol-info
        if expected['type'] != actual_info['Type']:
            logging.error("Volume type mismatch for volume: %s\n"
                          "Expected Type: %s\n Type from vol-info: %s",
                          expected['name'], expected['type'],
                          actual_info['Type'])
            error_count += 1
        else:
            logging.debug("Type of volume: %s from virsh vol-info successfully"
                          " checked against the created volume type",
                          expected['name'])

        # Check name against virsh vol-info
        if expected['name'] != actual_info['Name']:
            logging.error("Volume name mismatch for volume: %s\n"
                          "Expected name: %s\n Name from vol-info: %s",
                          expected['name'],
                          expected['name'], actual_info['Name'])
            error_count += 1
        else:
            logging.debug("Name of volume: %s from virsh vol-info successfully"
                          " checked against the created volume name",
                          expected['name'])

        # Check format from against qemu-img info
        img_info = utils_misc.get_image_info(expected['path'])
        if expected['format']:
            if expected['format'] != img_info['format']:
                logging.error("Volume format mismatch for volume: %s\n"
                              "Expected format: %s\n"
                              "Format from qemu-img info: %s",
                              expected['name'], expected['format'],
                              img_info['format'])
                error_count += 1
            else:
                logging.debug("Format of volume: %s from qemu-img info "
                              "checked successfully against the created "
                              "volume format", expected['name'])

        # Check format against vol-dumpxml
        if expected['format']:
            if expected['format'] != volume_xml.format:
                logging.error("Volume format mismatch for volume: %s\n"
                              "Expected format: %s\n"
                              "Format from vol-dumpxml: %s",
                              expected['name'], expected['format'],
                              volume_xml.format)
                error_count += 1
            else:
                logging.debug("Format of volume: %s from virsh vol-dumpxml "
                              "checked successfully against the created"
                              " volume format", expected['name'])

        logging.info(expected['encrypt_format'])
        # Check encrypt against vol-dumpxml
        if expected['encrypt_format']:
            # As the 'default' format will change to specific valut(qcow), so
            # just output it here
            logging.debug("Encryption format of volume '%s' is: %s",
                          expected['name'], volume_xml.encryption.format)
            # And also output encryption secret uuid
            secret_uuid = volume_xml.encryption.secret['uuid']
            logging.debug("Encryption secret of volume '%s' is: %s",
                          expected['name'], secret_uuid)
            if expected['encrypt_secret']:
                if expected['encrypt_secret'] != secret_uuid:
                    logging.error("Encryption secret mismatch for volume: %s\n"
                                  "Expected secret uuid: %s\n"
                                  "Secret uuid from vol-dumpxml: %s",
                                  expected['name'], expected['encrypt_secret'],
                                  secret_uuid)
                    error_count += 1
                else:
                    # If no set encryption secret value, automatically
                    # generate a secret value at the time of volume creation
                    logging.debug("Volume encryption secret is %s", secret_uuid)

        # Check pool name against vol-pool
        vol_pool = virsh.vol_pool(expected['path'])
        if expected['pool_name'] != vol_pool.stdout.strip():
            logging.error("Pool name mismatch for volume: %s against"
                          "virsh vol-pool", expected['name'])
            error_count += 1
        else:
            logging.debug("Pool name of volume: %s checked successfully"
                          " against the virsh vol-pool", expected['name'])

        norm_cap = {}
        capacity = {}
        capacity['list'] = actual_list['capacity']
        capacity['info'] = actual_info['Capacity']
        capacity['xml'] = volume_xml.capacity
        capacity['qemu_img'] = img_info['vsize']
        norm_cap = norm_capacity(capacity)
        delta_size = params.get('delta_size', "1024")
        if abs(expected['capacity'] - norm_cap['list']) > delta_size:
            logging.error("Capacity mismatch for volume: %s against virsh"
                          " vol-list\nExpected: %s\nActual: %s",
                          expected['name'], expected['capacity'],
                          norm_cap['list'])
            error_count += 1
        else:
            logging.debug("Capacity value checked successfully against"
                          " virsh vol-list for volume %s", expected['name'])

        if abs(expected['capacity'] - norm_cap['info']) > delta_size:
            logging.error("Capacity mismatch for volume: %s against virsh"
                          " vol-info\nExpected: %s\nActual: %s",
                          expected['name'], expected['capacity'],
                          norm_cap['info'])
            error_count += 1
        else:
            logging.debug("Capacity value checked successfully against"
                          " virsh vol-info for volume %s", expected['name'])

        if abs(expected['capacity'] - norm_cap['xml']) > delta_size:
            logging.error("Capacity mismatch for volume: %s against virsh"
                          " vol-dumpxml\nExpected: %s\nActual: %s",
                          expected['name'], expected['capacity'],
                          norm_cap['xml'])
            error_count += 1
        else:
            logging.debug("Capacity value checked successfully against"
                          " virsh vol-dumpxml for volume: %s",
                          expected['name'])

        if abs(expected['capacity'] - norm_cap['qemu_img']) > delta_size:
            logging.error("Capacity mismatch for volume: %s against "
                          "qemu-img info\nExpected: %s\nActual: %s",
                          expected['name'], expected['capacity'],
                          norm_cap['qemu_img'])
            error_count += 1
        else:
            logging.debug("Capacity value checked successfully against"
                          " qemu-img info for volume: %s",
                          expected['name'])
        return error_count

    def get_all_secrets():
        """
        Return all exist libvirt secrets uuid in a list
        """
        secret_list = []
        secrets = virsh.secret_list().stdout.strip()
        for secret in secrets.splitlines()[2:]:
            secret_list.append(secret.strip().split()[0])
        return secret_list

    # Initialize the variables
    pool_name = params.get("pool_name")
    pool_type = params.get("pool_type")
    pool_target = params.get("pool_target")
    if os.path.dirname(pool_target) is "":
        pool_target = os.path.join(test.tmpdir, pool_target)
    vol_name = params.get("volume_name")
    vol_number = int(params.get("number_of_volumes", "2"))
    capacity = params.get("volume_size", "1048576")
    allocation = params.get("volume_allocation", "1048576")
    vol_format = params.get("volume_format")
    source_name = params.get("gluster_source_name", "gluster-vol1")
    source_path = params.get("gluster_source_path", "/")
    encrypt_format = params.get("vol_encrypt_format")
    encrypt_secret = params.get("encrypt_secret")
    emulated_image = params.get("emulated_image")
    emulated_image_size = params.get("emulated_image_size")
    if not libvirt_version.version_compare(1, 0, 0):
        if pool_type == "gluster":
            raise error.TestNAError("Gluster pool is not supported in current"
                                    " libvirt version.")

    try:
        str_capa = utils_misc.normalize_data_size(capacity, "B")
        int_capa = int(str(str_capa).split('.')[0])
    except ValueError:
        raise error.TestError("Translate size %s to 'B' failed" % capacity)
    try:
        str_capa = utils_misc.normalize_data_size(allocation, "B")
        int_allo = int(str(str_capa).split('.')[0])
    except ValueError:
        raise error.TestError("Translate size %s to 'B' failed" % allocation)

    # Stop multipathd to avoid start pool fail(For fs like pool, the new add
    # disk may in use by device-mapper, so start pool will report disk already
    # mounted error).
    multipathd = service.Factory.create_service("multipathd")
    multipathd_status = multipathd.status()
    if multipathd_status:
        multipathd.stop()

    # Get exists libvirt secrets before test
    ori_secrets = get_all_secrets()
    expected_vol = {}
    vol_type = 'file'
    if pool_type in ['disk', 'logical']:
        vol_type = 'block'
    if pool_type == 'gluster':
        vol_type = 'network'
    logging.debug("Debug:\npool_name:%s\npool_type:%s\npool_target:%s\n"
                  "vol_name:%s\nvol_number:%s\ncapacity:%s\nallocation:%s\n"
                  "vol_format:%s", pool_name, pool_type, pool_target,
                  vol_name, vol_number, capacity, allocation, vol_format)

    libv_pvt = utlv.PoolVolumeTest(test, params)
    # Run Testcase
    total_err_count = 0
    try:
        # Create a new pool
        libv_pvt.pre_pool(pool_name=pool_name,
                          pool_type=pool_type,
                          pool_target=pool_target,
                          emulated_image=emulated_image,
                          image_size=emulated_image_size,
                          source_name=source_name,
                          source_path=source_path)
        for i in range(vol_number):
            volume_name = "%s_%d" % (vol_name, i)
            expected_vol['pool_name'] = pool_name
            expected_vol['pool_type'] = pool_type
            expected_vol['pool_target'] = pool_target
            expected_vol['capacity'] = int_capa
            expected_vol['allocation'] = int_allo
            expected_vol['format'] = vol_format
            expected_vol['name'] = volume_name
            expected_vol['type'] = vol_type
            expected_vol['encrypt_format'] = encrypt_format
            expected_vol['encrypt_secret'] = encrypt_secret
            # Creates volume
            if pool_type != "gluster":
                expected_vol['path'] = pool_target + '/' + volume_name
                new_volxml = vol_xml.VolXML()
                new_volxml.name = volume_name
                new_volxml.capacity = int_capa
                new_volxml.allocation = int_allo
                if vol_format:
                    new_volxml.format = vol_format
                encrypt_dict = {}
                if encrypt_format:
                    encrypt_dict.update({"format": encrypt_format})
                if encrypt_secret:
                    encrypt_dict.update({"secret": {'uuid': encrypt_secret}})
                if encrypt_dict:
                    new_volxml.encryption = new_volxml.new_encryption(**encrypt_dict)
                logging.debug("Volume XML for creation:\n%s", str(new_volxml))
                virsh.vol_create(pool_name, new_volxml.xml, debug=True)
            else:
                ip_addr = utlv.get_host_ipv4_addr()
                expected_vol['path'] = "gluster://%s/%s/%s" % (ip_addr,
                                                               source_name,
                                                               volume_name)
                utils.run("qemu-img create -f %s %s %s" % (vol_format,
                                                           expected_vol['path'],
                                                           capacity))
            virsh.pool_refresh(pool_name)
            # Check volumes
            total_err_count += check_vol(expected_vol)
            # Delete volume and check for results
            delete_volume(expected_vol)
            total_err_count += check_vol(expected_vol, False)
        if total_err_count > 0:
            raise error.TestFail("Get %s errors when checking volume" % total_err_count)
    finally:
        # Clean up
        for sec in get_all_secrets():
            if sec not in ori_secrets:
                virsh.secret_undefine(sec)
        try:
            libv_pvt.cleanup_pool(pool_name, pool_type, pool_target,
                                  emulated_image, source_name=source_name)
        except error.TestFail, detail:
            logging.error(str(detail))
        if multipathd_status:
            multipathd.start()

Example 33

Project: rst2pdf Source File: styles.py
    def __init__(self, flist, font_path=None, style_path=None, def_dpi=300):
        log.info('Using stylesheets: %s' % ','.join(flist))
        # find base path
        if hasattr(sys, 'frozen'):
            self.PATH = abspath(dirname(sys.executable))
        else:
            self.PATH = abspath(dirname(__file__))

        # flist is a list of stylesheet filenames.
        # They will be loaded and merged in order.
        # but the two default stylesheets will always
        # be loaded first
        flist = [join(self.PATH, 'styles', 'styles.style'),
                join(self.PATH, 'styles', 'default.style')] + flist

        self.def_dpi=def_dpi
        if font_path is None:
            font_path=[]
        font_path+=['.', os.path.join(self.PATH, 'fonts')]
        self.FontSearchPath = list(map(os.path.expanduser, font_path))

        if style_path is None:
            style_path=[]
        style_path+=['.', os.path.join(self.PATH, 'styles'),
                      '~/.rst2pdf/styles']
        self.StyleSearchPath = list(map(os.path.expanduser, style_path))
        self.FontSearchPath=list(set(self.FontSearchPath))
        self.StyleSearchPath=list(set(self.StyleSearchPath))

        log.info('FontPath:%s'%self.FontSearchPath)
        log.info('StylePath:%s'%self.StyleSearchPath)

        findfonts.flist = self.FontSearchPath
        # Page width, height
        self.pw = 0
        self.ph = 0

        # Page size [w,h]
        self.ps = None

        # Margins (top,bottom,left,right,gutter)
        self.tm = 0
        self.bm = 0
        self.lm = 0
        self.rm = 0
        self.gm = 0

        #text width
        self.tw = 0

        # Default emsize, later it will be the fontSize of the base style
        self.emsize=10

        self.languages = []

        ssdata = self.readSheets(flist)

        # Get pageSetup data from all stylessheets in order:
        self.ps = pagesizes.A4
        self.page={}
        for data, ssname in ssdata:
            page = data.get('pageSetup', {})
            if page:
                self.page.update(page)
                pgs=page.get('size', None)
                if pgs: # A standard size
                    pgs=pgs.upper()
                    if pgs in pagesizes.__dict__:
                        self.ps = list(pagesizes.__dict__[pgs])
                        self.psname = pgs
                        if 'width' in self.page: del(self.page['width'])
                        if 'height' in self.page: del(self.page['height'])
                    elif pgs.endswith('-LANDSCAPE'):
                        self.psname = pgs.split('-')[0]
                        self.ps = list(pagesizes.landscape(pagesizes.__dict__[self.psname]))
                        if 'width' in self.page: del(self.page['width'])
                        if 'height' in self.page: del(self.page['height'])
                    else:
                        log.critical('Unknown page size %s in stylesheet %s'%\
                            (page['size'], ssname))
                        continue
                else: #A custom size
                    if 'size'in self.page:
                        del(self.page['size'])
                    # The sizes are expressed in some unit.
                    # For example, 2cm is 2 centimeters, and we need
                    # to do 2*cm (cm comes from reportlab.lib.units)
                    if 'width' in page:
                        self.ps[0] = self.adjustUnits(page['width'])
                    if 'height' in page:
                        self.ps[1] = self.adjustUnits(page['height'])
                self.pw, self.ph = self.ps
                if 'margin-left' in page:
                    self.lm = self.adjustUnits(page['margin-left'])
                if 'margin-right' in page:
                    self.rm = self.adjustUnits(page['margin-right'])
                if 'margin-top' in page:
                    self.tm = self.adjustUnits(page['margin-top'])
                if 'margin-bottom' in page:
                    self.bm = self.adjustUnits(page['margin-bottom'])
                if 'margin-gutter' in page:
                    self.gm = self.adjustUnits(page['margin-gutter'])
                if 'spacing-header' in page:
                    self.ts = self.adjustUnits(page['spacing-header'])
                if 'spacing-footer' in page:
                    self.bs = self.adjustUnits(page['spacing-footer'])
                if 'firstTemplate' in page:
                    self.firstTemplate = page['firstTemplate']

                # tw is the text width.
                # We need it to calculate header-footer height
                # and compress literal blocks.
                self.tw = self.pw - self.lm - self.rm - self.gm

        # Get page templates from all stylesheets
        self.pageTemplates = {}
        for data, ssname in ssdata:
            templates = data.get('pageTemplates', {})
            # templates is a dictionary of pageTemplates
            for key in templates:
                template = templates[key]
                # template is a dict.
                # template[´frames'] is a list of frames
                if key in self.pageTemplates:
                    self.pageTemplates[key].update(template)
                else:
                    self.pageTemplates[key] = template

        # Get font aliases from all stylesheets in order
        self.fontsAlias = {}
        for data, ssname in ssdata:
            self.fontsAlias.update(data.get('fontsAlias', {}))

        embedded_fontnames = []
        self.embedded = []
        # Embed all fonts indicated in all stylesheets
        for data, ssname in ssdata:
            embedded = data.get('embeddedFonts', [])

            for font in embedded:
                try:
                    # Just a font name, try to embed it
                    if isinstance(font, str):
                        # See if we can find the font
                        fname, pos = findfonts.guessFont(font)
                        if font in embedded_fontnames:
                            pass
                        else:
                            fontList = findfonts.autoEmbed(font)
                            if fontList:
                                embedded_fontnames.append(font)
                        if not fontList:
                            if (fname, pos) in embedded_fontnames:
                                fontList = None
                            else:
                                fontList = findfonts.autoEmbed(fname)
                        if fontList is not None:
                            self.embedded += fontList
                            # Maybe the font we got is not called
                            # the same as the one we gave
                            # so check that out
                            suff = ["", "-Oblique", "-Bold", "-BoldOblique"]
                            if not fontList[0].startswith(font):
                                # We need to create font aliases, and use them
                                for fname, aliasname in zip(
                                        fontList,
                                        [font + suffix for suffix in suff]):
                                    self.fontsAlias[aliasname] = fname
                        continue

                    # Each "font" is a list of four files, which will be
                    # used for regular / bold / italic / bold+italic
                    # versions of the font.
                    # If your font doesn't have one of them, just repeat
                    # the regular font.

                    # Example, using the Tuffy font from
                    # http://tulrich.com/fonts/
                    # "embeddedFonts" : [
                    #                    ["Tuffy.ttf",
                    #                     "Tuffy_Bold.ttf",
                    #                     "Tuffy_Italic.ttf",
                    #                     "Tuffy_Bold_Italic.ttf"]
                    #                   ],

                    # The fonts will be registered with the file name,
                    # minus the extension.

                    if font[0].lower().endswith('.ttf'): # A True Type font
                        for variant in font:
                            location=self.findFont(variant)
                            pdfmetrics.registerFont(
                                TTFont(str(variant.split('.')[0]),
                                location))
                            log.info('Registering font: %s from %s'%\
                                (str(variant.split('.')[0]),location))
                            self.embedded.append(str(variant.split('.')[0]))

                        # And map them all together
                        regular, bold, italic, bolditalic = [
                            variant.split('.')[0] for variant in font]
                        addMapping(regular, 0, 0, regular)
                        addMapping(regular, 0, 1, italic)
                        addMapping(regular, 1, 0, bold)
                        addMapping(regular, 1, 1, bolditalic)
                    else: # A Type 1 font
                        # For type 1 fonts we require
                        # [FontName,regular,italic,bold,bolditalic]
                        # where each variant is a (pfbfile,afmfile) pair.
                        # For example, for the URW palladio from TeX:
                        # ["Palatino",("uplr8a.pfb","uplr8a.afm"),
                        #             ("uplri8a.pfb","uplri8a.afm"),
                        #             ("uplb8a.pfb","uplb8a.afm"),
                        #             ("uplbi8a.pfb","uplbi8a.afm")]
                        faceName = font[0]
                        regular = pdfmetrics.EmbeddedType1Face(*font[1])
                        italic = pdfmetrics.EmbeddedType1Face(*font[2])
                        bold = pdfmetrics.EmbeddedType1Face(*font[3])
                        bolditalic = pdfmetrics.EmbeddedType1Face(*font[4])

                except Exception:
                    _, e, _ = sys.exc_info()
                    try:
                        if isinstance(font, list):
                            fname = font[0]
                        else:
                            fname = font
                        log.error("Error processing font %s: %s",
                            os.path.splitext(fname)[0], str(e))
                        log.error("Registering %s as Helvetica alias", fname)
                        self.fontsAlias[fname] = 'Helvetica'
                    except Exception:
                        _, e, _ = sys.exc_info()
                        log.critical("Error processing font %s: %s",
                            fname, str(e))
                        continue

        # Go though all styles in all stylesheets and find all fontNames.
        # Then decide what to do with them
        for data, ssname in ssdata:
            for [skey, style] in self.stylepairs(data):
                for key in style:
                    if key == 'fontName' or key.endswith('FontName'):
                        # It's an alias, replace it
                        if style[key] in self.fontsAlias:
                            style[key] = self.fontsAlias[style[key]]
                        # Embedded already, nothing to do
                        if style[key] in self.embedded:
                            continue
                        # Standard font, nothing to do
                        if style[key] in (
                                    "Courier",
                                    "Courier-Bold",
                                    "Courier-BoldOblique",
                                    "Courier-Oblique",
                                    "Helvetica",
                                    "Helvetica-Bold",
                                    "Helvetica-BoldOblique",
                                    "Helvetica-Oblique",
                                    "Symbol",
                                    "Times-Bold",
                                    "Times-BoldItalic",
                                    "Times-Italic",
                                    "Times-Roman",
                                    "ZapfDingbats"):
                            continue
                        # Now we need to do something
                        # See if we can find the font
                        fname, pos = findfonts.guessFont(style[key])

                        if style[key] in embedded_fontnames:
                            pass
                        else:
                            fontList = findfonts.autoEmbed(style[key])
                            if fontList:
                                embedded_fontnames.append(style[key])
                        if not fontList:
                            if (fname, pos) in embedded_fontnames:
                                fontList = None
                            else:
                                fontList = findfonts.autoEmbed(fname)
                            if fontList:
                                embedded_fontnames.append((fname, pos))
                        if fontList:
                            self.embedded += fontList
                            # Maybe the font we got is not called
                            # the same as the one we gave so check that out
                            suff = ["", "-Bold", "-Oblique", "-BoldOblique"]
                            if not fontList[0].startswith(style[key]):
                                # We need to create font aliases, and use them
                                basefname=style[key].split('-')[0]
                                for fname, aliasname in zip(
                                        fontList,
                                        [basefname + suffix for
                                        suffix in suff]):
                                    self.fontsAlias[aliasname] = fname
                                style[key] = self.fontsAlias[basefname +\
                                             suff[pos]]
                        else:
                            log.error("Unknown font: \"%s\","
                                      "replacing with Helvetica", style[key])
                            style[key] = "Helvetica"

        #log.info('FontList: %s'%self.embedded)
        #log.info('FontAlias: %s'%self.fontsAlias)
        # Get styles from all stylesheets in order
        self.stylesheet = {}
        self.styles = []
        self.linkColor = 'navy'
        # FIXME: linkColor should probably not be a global
        #        style, and tocColor should probably not
        #        be a special case, but for now I'm going
        #        with the flow...
        self.tocColor = None
        for data, ssname in ssdata:
            self.linkColor = data.get('linkColor') or self.linkColor
            self.tocColor = data.get('tocColor') or self.tocColor
            for [skey, style] in self.stylepairs(data):
                sdict = {}
                # FIXME: this is done completely backwards
                for key in style:
                    # Handle color references by name
                    if key == 'color' or key.endswith('Color') and style[key]:
                        style[key] = formatColor(style[key])

                    # Yet another workaround for the unicode bug in
                    # reportlab's toColor
                    elif key == 'commands':
                        style[key]=validateCommands(style[key])
                        #for command in style[key]:
                            #c=command[0].upper()
                            #if c=='ROWBACKGROUNDS':
                                #command[3]=[str(c) for c in command[3]]
                            #elif c in ['BOX','INNERGRID'] or c.startswith('LINE'):
                                #command[4]=str(command[4])

                    # Handle alignment constants
                    elif key == 'alignment':
                        style[key] = dict(TA_LEFT=0,
                                          LEFT=0,
                                          TA_CENTER=1,
                                          CENTER=1,
                                          TA_CENTRE=1,
                                          CENTRE=1,
                                          TA_RIGHT=2,
                                          RIGHT=2,
                                          TA_JUSTIFY=4,
                                          JUSTIFY=4,
                                          DECIMAL=8, )[style[key].upper()]

                    elif key == 'language':
                        if not style[key] in self.languages:
                            self.languages.append(style[key])

                    # Make keys str instead of unicode (required by reportlab)
                    sdict[str(key)] = style[key]
                    sdict['name'] = skey
                # If the style already exists, update it
                if skey in self.stylesheet:
                    self.stylesheet[skey].update(sdict)
                else: # New style
                    self.stylesheet[skey] = sdict
                    self.styles.append(sdict)

        # If the stylesheet has a style name docutils won't reach
        # make a copy with a sanitized name.
        # This may make name collisions possible but that should be
        # rare (who would have custom_name and custom-name in the
        # same stylesheet? ;-)
        # Issue 339

        styles2=[]
        for s in self.styles:
            if not re.match("^[a-z](-?[a-z0-9]+)*$", s['name']):
                s2 = copy(s)
                s2['name'] = docutils.nodes.make_id(s['name'])
                log.warning('%s is an invalid docutils class name, adding alias %s'%(s['name'], s2['name']))
                styles2.append(s2)
        self.styles.extend(styles2)

        # And create  reportlabs stylesheet
        self.StyleSheet = StyleSheet1()
        # Patch to make the code compatible with reportlab from SVN 2.4+ and
        # 2.4
        if not hasattr(self.StyleSheet, 'has_key'):
            self.StyleSheet.__class__.has_key = lambda s, k : k in s
        for s in self.styles:
            if 'parent' in s:
                if s['parent'] is None:
                    if s['name'] != 'base':
                        s['parent'] = self.StyleSheet['base']
                    else:
                        del(s['parent'])
                else:
                    s['parent'] = self.StyleSheet[s['parent']]
            else:
                if s['name'] != 'base':
                    s['parent'] = self.StyleSheet['base']

            # If the style has no bulletFontName but it has a fontName, set it
            if ('bulletFontName' not in s) and ('fontName' in s):
                s['bulletFontName'] = s['fontName']

            hasFS = True
            # Adjust fontsize units
            if 'fontSize' not in s:
                s['fontSize'] = s['parent'].fontSize
                s['trueFontSize']=None
                hasFS = False
            elif 'parent' in s:
                # This means you can set the fontSize to
                # "2cm" or to "150%" which will be calculated
                # relative to the parent style
                s['fontSize'] = self.adjustUnits(s['fontSize'],
                                    s['parent'].fontSize)
                s['trueFontSize']=s['fontSize']
            else:
                # If s has no parent, it's base, which has
                # an explicit point size by default and %
                # makes no sense, but guess it as % of 10pt
                s['fontSize'] = self.adjustUnits(s['fontSize'], 10)

            # If the leading is not set, but the size is, set it
            if 'leading' not in s and hasFS:
                s['leading'] = 1.2*s['fontSize']

            # If the bullet font size is not set, set it as fontSize
            if ('bulletFontSize' not in s) and ('fontSize' in s):
                s['bulletFontSize'] = s['fontSize']

            # If the borderPadding is a list and wordaxe <=0.3.2,
            # convert it to an integer. Workaround for Issue
            if 'borderPadding' in s and ((HAS_WORDAXE and \
                    wordaxe_version <='wordaxe 0.3.2') or
                    reportlab.Version < "2.3" )\
                    and isinstance(s['borderPadding'], list):
                log.warning('Using a borderPadding list in '\
                    'style %s with wordaxe <= 0.3.2 or Reportlab < 2.3. That is not '\
                    'supported, so it will probably look wrong'%s['name'])
                s['borderPadding']=s['borderPadding'][0]

            self.StyleSheet.add(ParagraphStyle(**s))


        self.emsize=self['base'].fontSize
        # Make stdFont the basefont, for Issue 65
        reportlab.rl_config.canvas_basefontname = self['base'].fontName
        # Make stdFont the default font for table cell styles (Issue 65)
        reportlab.platypus.tables.CellStyle.fontname=self['base'].fontName

Example 34

Project: python-netsnmpagent Source File: test_03_netsnmpagent_snmpobjs.py
Function: set_up
def setUp(self):
	global testenv, agent
	global settableInteger32, settableUnsigned32, settableTimeTicks
	global settableOctetString

	testenv = netsnmpTestEnv()

	# Create a new netsnmpAgent instance which
	# - connects to the net-snmp test environment's snmpd instance
	# - uses its statedir
	# - loads the TEST-MIB from our tests directory
	testMIBPath = os.path.abspath(os.path.dirname(__file__)) + \
				  "/TEST-MIB.txt"
	agent = netsnmpagent.netsnmpAgent(
		AgentName      = "netsnmpAgentTestAgent",
		MasterSocket   = testenv.mastersocket,
		PersistenceDir = testenv.statedir,
		MIBFiles       = [ testMIBPath ],
	)

	# Test OIDs for Integer32 scalar type
	settableInteger32 = agent.Integer32(
		oidstr = "TEST-MIB::testInteger32NoInitval",
	)

	agent.Integer32(
		oidstr  = "TEST-MIB::testInteger32ZeroInitval",
		initval = 0,
	)

	agent.Integer32(
		oidstr  = "TEST-MIB::testInteger32MinusOneInitval",
		initval = -1,
	)

	agent.Integer32(
		oidstr  = "TEST-MIB::testInteger32MinInitval",
		initval = -2147483648,
	)

	agent.Integer32(
		oidstr  = "TEST-MIB::testInteger32MinMinusOneInitval",
		initval = -2147483649,
	)

	agent.Integer32(
		oidstr  = "TEST-MIB::testInteger32OneInitval",
		initval = 1,
	)

	agent.Integer32(
		oidstr  = "TEST-MIB::testInteger32MaxInitval",
		initval = 2147483647,
	)

	agent.Integer32(
		oidstr  = "TEST-MIB::testInteger32MaxPlusOneInitval",
		initval = 2147483648,
	)

	agent.Integer32(
		oidstr   = "TEST-MIB::testInteger32ReadOnly",
		writable = False,
	)

	# Test OIDs for Unsigned32 scalar type
	settableUnsigned32 = agent.Unsigned32(
		oidstr = "TEST-MIB::testUnsigned32NoInitval",
	)

	agent.Unsigned32(
		oidstr  = "TEST-MIB::testUnsigned32ZeroInitval",
		initval = 0,
	)

	agent.Unsigned32(
		oidstr  = "TEST-MIB::testUnsigned32MinusOneInitval",
		initval = -1,
	)

	agent.Unsigned32(
		oidstr  = "TEST-MIB::testUnsigned32OneInitval",
		initval = 1,
	)

	agent.Unsigned32(
		oidstr  = "TEST-MIB::testUnsigned32MaxInitval",
		initval = 4294967295,
	)

	agent.Unsigned32(
		oidstr  = "TEST-MIB::testUnsigned32MaxPlusOneInitval",
		initval = 4294967296,
	)

	agent.Unsigned32(
		oidstr   = "TEST-MIB::testUnsigned32ReadOnly",
		writable = False,
	)

	# Test OIDs for Counter32 scalar type
	agent.Counter32(
		oidstr = "TEST-MIB::testCounter32NoInitval",
	)

	agent.Counter32(
		oidstr  = "TEST-MIB::testCounter32ZeroInitval",
		initval = 0,
	)

	agent.Counter32(
		oidstr  = "TEST-MIB::testCounter32MinusOneInitval",
		initval = -1,
	)

	agent.Counter32(
		oidstr  = "TEST-MIB::testCounter32OneInitval",
		initval = 1,
	)

	agent.Counter32(
		oidstr  = "TEST-MIB::testCounter32MaxInitval",
		initval = 4294967295,
	)

	agent.Counter32(
		oidstr  = "TEST-MIB::testCounter32MaxPlusOneInitval",
		initval = 4294967296,
	)

	# Test OIDs for Counter64 scalar type
	agent.Counter64(
		oidstr = "TEST-MIB::testCounter64NoInitval",
	)

	agent.Counter64(
		oidstr  = "TEST-MIB::testCounter64ZeroInitval",
		initval = 0,
	)

	agent.Counter64(
		oidstr  = "TEST-MIB::testCounter64MinusOneInitval",
		initval = -1,
	)

	agent.Counter64(
		oidstr  = "TEST-MIB::testCounter64OneInitval",
		initval = 1,
	)

	agent.Counter64(
		oidstr  = "TEST-MIB::testCounter64MaxInitval",
		initval = 18446744073709551615,
	)

	agent.Counter64(
		oidstr  = "TEST-MIB::testCounter64MaxPlusOneInitval",
		initval = 18446744073709551616,
	)

	# Test OIDs for TimeTicks scalar type
	settableTimeTicks = agent.TimeTicks(
		oidstr = "TEST-MIB::testTimeTicksNoInitval",
	)

	agent.TimeTicks(
		oidstr  = "TEST-MIB::testTimeTicksZeroInitval",
		initval = 0,
	)

	agent.TimeTicks(
		oidstr  = "TEST-MIB::testTimeTicksMinusOneInitval",
		initval = -1,
	)

	agent.TimeTicks(
		oidstr  = "TEST-MIB::testTimeTicksOneInitval",
		initval = 1,
	)

	agent.TimeTicks(
		oidstr  = "TEST-MIB::testTimeTicksMaxInitval",
		initval = 4294967295,
	)

	agent.TimeTicks(
		oidstr  = "TEST-MIB::testTimeTicksMaxPlusOneInitval",
		initval = 4294967296,
	)

	agent.TimeTicks(
		oidstr   = "TEST-MIB::testTimeTicksReadOnly",
		writable = False,
	)

	# Test OIDs for OctetString scalar type
	settableOctetString = agent.OctetString(
		oidstr = "TEST-MIB::testOctetStringNoInitval",
	)

	agent.OctetString(
		oidstr  = "TEST-MIB::testOctetStringEmptyInitval",
		initval = "",
	)

	agent.OctetString(
		oidstr  = "TEST-MIB::testOctetStringOneASCIICharInitval",
		initval = "A",
	)

	agent.OctetString(
		oidstr  = "TEST-MIB::testOctetStringOneUTF8CharInitval",
		initval = "Ä",
	)

	agent.OctetString(
		oidstr  = "TEST-MIB::testOctetString255ASCIICharsInitval",
		initval = "A" * 255,
	)

	agent.OctetString(
		oidstr  = "TEST-MIB::testOctetString255UTF8CharsInitval",
		initval = "Ä" * 255,
	)

	agent.OctetString(
		oidstr  = "TEST-MIB::testOctetString256ASCIICharsInitval",
		initval = "A" * 256,
	)

	agent.OctetString(
		oidstr  = "TEST-MIB::testOctetString256UTF8CharsInitval",
		initval = "Ä" * 256,
	)

	# Connect to master snmpd instance
	agent.start()

	# Create a separate thread to implement the absolutely most
	# minimalistic possible agent doing nothing but request handling
	agent.loop = True
	def RequestHandler():
		while self.agent.loop:
			agent.check_and_process(False)

	agent.thread = threading.Thread(target=RequestHandler)
	agent.thread.daemon = True
	agent.thread.start()

Example 35

Project: PokemonGo-Map Source File: runserver.py
def main():
    # Patch threading to make exceptions catchable
    install_thread_excepthook()

    # Make sure exceptions get logged
    sys.excepthook = handle_exception

    args = get_args()

    # Add file logging if enabled
    if args.verbose and args.verbose != 'nofile':
        filelog = logging.FileHandler(args.verbose)
        filelog.setFormatter(logging.Formatter('%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] %(message)s'))
        logging.getLogger('').addHandler(filelog)
    if args.very_verbose and args.very_verbose != 'nofile':
        filelog = logging.FileHandler(args.very_verbose)
        filelog.setFormatter(logging.Formatter('%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] %(message)s'))
        logging.getLogger('').addHandler(filelog)

    # Check if we have the proper encryption library file and get its path
    encryption_lib_path = get_encryption_lib_path(args)
    if encryption_lib_path is "":
        sys.exit(1)

    if args.verbose or args.very_verbose:
        log.setLevel(logging.DEBUG)
    else:
        log.setLevel(logging.INFO)

    # Let's not forget to run Grunt / Only needed when running with webserver
    if not args.no_server:
        if not os.path.exists(os.path.join(os.path.dirname(__file__), 'static/dist')):
            log.critical('Missing front-end assets (static/dist) -- please run "npm install && npm run build" before starting the server')
            sys.exit()

    # These are very noisey, let's shush them up a bit
    logging.getLogger('peewee').setLevel(logging.INFO)
    logging.getLogger('requests').setLevel(logging.WARNING)
    logging.getLogger('pgoapi.pgoapi').setLevel(logging.WARNING)
    logging.getLogger('pgoapi.rpc_api').setLevel(logging.INFO)
    logging.getLogger('werkzeug').setLevel(logging.ERROR)

    config['parse_pokemon'] = not args.no_pokemon
    config['parse_pokestops'] = not args.no_pokestops
    config['parse_gyms'] = not args.no_gyms

    # Turn these back up if debugging
    if args.verbose or args.very_verbose:
        logging.getLogger('pgoapi').setLevel(logging.DEBUG)
    if args.very_verbose:
        logging.getLogger('peewee').setLevel(logging.DEBUG)
        logging.getLogger('requests').setLevel(logging.DEBUG)
        logging.getLogger('pgoapi.pgoapi').setLevel(logging.DEBUG)
        logging.getLogger('pgoapi.rpc_api').setLevel(logging.DEBUG)
        logging.getLogger('rpc_api').setLevel(logging.DEBUG)
        logging.getLogger('werkzeug').setLevel(logging.DEBUG)

    # use lat/lng directly if matches such a pattern
    prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$")
    res = prog.match(args.location)
    if res:
        log.debug('Using coordinates from CLI directly')
        position = (float(res.group(1)), float(res.group(2)), 0)
    else:
        log.debug('Looking up coordinates in API')
        position = util.get_pos_by_name(args.location)

    # Use the latitude and longitude to get the local altitude from Google
    try:
        url = 'https://maps.googleapis.com/maps/api/elevation/json?locations={},{}'.format(
            str(position[0]), str(position[1]))
        altitude = requests.get(url).json()[u'results'][0][u'elevation']
        log.debug('Local altitude is: %sm', altitude)
        position = (position[0], position[1], altitude)
    except (requests.exceptions.RequestException, IndexError, KeyError):
        log.error('Unable to retrieve altitude from Google APIs; setting to 0')

    if not any(position):
        log.error('Could not get a position by name, aborting')
        sys.exit()

    log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)',
             position[0], position[1], position[2])

    if args.no_pokemon:
        log.info('Parsing of Pokemon disabled')
    if args.no_pokestops:
        log.info('Parsing of Pokestops disabled')
    if args.no_gyms:
        log.info('Parsing of Gyms disabled')
    if args.encounter:
        log.info('Encountering pokemon enabled')

    config['LOCALE'] = args.locale
    config['CHINA'] = args.china

    app = Pogom(__name__)
    db = init_database(app)
    if args.clear_db:
        log.info('Clearing database')
        if args.db_type == 'mysql':
            drop_tables(db)
        elif os.path.isfile(args.db):
            os.remove(args.db)
    create_tables(db)

    app.set_current_location(position)

    # Control the search status (running or not) across threads
    pause_bit = Event()
    pause_bit.clear()
    if args.on_demand_timeout > 0:
        pause_bit.set()

    heartbeat = [now()]

    # Setup the location tracking queue and push the first location on
    new_location_queue = Queue()
    new_location_queue.put(position)

    # DB Updates
    db_updates_queue = Queue()

    # Thread(s) to process database updates
    for i in range(args.db_threads):
        log.debug('Starting db-updater worker thread %d', i)
        t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(args, db_updates_queue))
        t.daemon = True
        t.start()

    # db clearner; really only need one ever
    if not args.disable_clean:
        t = Thread(target=clean_db_loop, name='db-cleaner', args=(args,))
        t.daemon = True
        t.start()

    # WH Updates
    wh_updates_queue = Queue()

    # Thread to process webhook updates
    for i in range(args.wh_threads):
        log.debug('Starting wh-updater worker thread %d', i)
        t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue))
        t.daemon = True
        t.start()

    if not args.only_server:

        # Check all proxies before continue so we know they are good
        if args.proxy and not args.proxy_skip_check:

            # Overwrite old args.proxy with new working list
            args.proxy = check_proxies(args)

        # Gather the pokemons!

        # attempt to dump the spawn points (do this before starting threads of endure the woe)
        if args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile' and args.dump_spawnpoints:
            with open(args.spawnpoint_scanning, 'w+') as file:
                log.info('Saving spawn points to %s', args.spawnpoint_scanning)
                spawns = Pokemon.get_spawnpoints_in_hex(position, args.step_limit)
                file.write(json.dumps(spawns))
                log.info('Finished exporting spawn points')

        argset = (args, new_location_queue, pause_bit, heartbeat, encryption_lib_path, db_updates_queue, wh_updates_queue)

        log.debug('Starting a %s search thread', args.scheduler)
        search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset)
        search_thread.daemon = True
        search_thread.start()

    if args.cors:
        CORS(app)

    # No more stale JS
    init_cache_busting(app)

    app.set_search_control(pause_bit)
    app.set_heartbeat_control(heartbeat)
    app.set_location_queue(new_location_queue)

    config['ROOT_PATH'] = app.root_path
    config['GMAPS_KEY'] = args.gmaps_key

    if args.no_server:
        # This loop allows for ctrl-c interupts to work since flask won't be holding the program open
        while search_thread.is_alive():
            time.sleep(60)
    else:
        ssl_context = None
        if args.ssl_certificate and args.ssl_privatekey \
                and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey):
            ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
            ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey)
            log.info('Web server in SSL mode.')
        if args.verbose or args.very_verbose:
            app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context)
        else:
            app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)

Example 36

Project: pangyp Source File: cmake.py
Function: write_target
def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
                options, generator_flags, all_qualified_targets, output):

  # The make generator does this always.
  # TODO: It would be nice to be able to tell CMake all dependencies.
  circular_libs = generator_flags.get('circular', True)

  if not generator_flags.get('standalone', False):
    output.write('\n#')
    output.write(qualified_target)
    output.write('\n')

  gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
  rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
  rel_gyp_dir = os.path.dirname(rel_gyp_file)

  # Relative path from build dir to top dir.
  build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
  # Relative path from build dir to gyp dir.
  build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)

  path_from_cmakelists_to_gyp = build_to_gyp

  spec = target_dicts.get(qualified_target, {})
  config = spec.get('configurations', {}).get(config_to_use, {})

  target_name = spec.get('target_name', '<missing target name>')
  target_type = spec.get('type', '<missing target type>')
  target_toolset = spec.get('toolset')

  cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
  if cmake_target_type is None:
    print ('Target %s has unknown target type %s, skipping.' %
          (        target_name,               target_type  ) )
    return

  SetVariable(output, 'TARGET', target_name)
  SetVariable(output, 'TOOLSET', target_toolset)

  cmake_target_name = namer.CreateCMakeTargetName(qualified_target)

  extra_sources = []
  extra_deps = []

  # Actions must come first, since they can generate more OBJs for use below.
  if 'actions' in spec:
    WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
                 path_from_cmakelists_to_gyp, output)

  # Rules must be early like actions.
  if 'rules' in spec:
    WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
               path_from_cmakelists_to_gyp, output)

  # Copies
  if 'copies' in spec:
    WriteCopies(cmake_target_name, spec['copies'], extra_deps,
                path_from_cmakelists_to_gyp, output)

  # Target and sources
  srcs = spec.get('sources', [])

  # Gyp separates the sheep from the goats based on file extensions.
  # A full separation is done here because of flag handing (see below).
  s_sources = []
  c_sources = []
  cxx_sources = []
  linkable_sources = []
  other_sources = []
  for src in srcs:
    _, ext = os.path.splitext(src)
    src_type = COMPILABLE_EXTENSIONS.get(ext, None)
    src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);

    if src_type == 's':
      s_sources.append(src_norm_path)
    elif src_type == 'cc':
      c_sources.append(src_norm_path)
    elif src_type == 'cxx':
      cxx_sources.append(src_norm_path)
    elif Linkable(ext):
      linkable_sources.append(src_norm_path)
    else:
      other_sources.append(src_norm_path)

  for extra_source in extra_sources:
    src, real_source = extra_source
    _, ext = os.path.splitext(real_source)
    src_type = COMPILABLE_EXTENSIONS.get(ext, None)

    if src_type == 's':
      s_sources.append(src)
    elif src_type == 'cc':
      c_sources.append(src)
    elif src_type == 'cxx':
      cxx_sources.append(src)
    elif Linkable(ext):
      linkable_sources.append(src)
    else:
      other_sources.append(src)

  s_sources_name = None
  if s_sources:
    s_sources_name = cmake_target_name + '__asm_srcs'
    SetVariableList(output, s_sources_name, s_sources)

  c_sources_name = None
  if c_sources:
    c_sources_name = cmake_target_name + '__c_srcs'
    SetVariableList(output, c_sources_name, c_sources)

  cxx_sources_name = None
  if cxx_sources:
    cxx_sources_name = cmake_target_name + '__cxx_srcs'
    SetVariableList(output, cxx_sources_name, cxx_sources)

  linkable_sources_name = None
  if linkable_sources:
    linkable_sources_name = cmake_target_name + '__linkable_srcs'
    SetVariableList(output, linkable_sources_name, linkable_sources)

  other_sources_name = None
  if other_sources:
    other_sources_name = cmake_target_name + '__other_srcs'
    SetVariableList(output, other_sources_name, other_sources)

  # CMake gets upset when executable targets provide no sources.
  # http://www.cmake.org/pipermail/cmake/2010-July/038461.html
  dummy_sources_name = None
  has_sources = (s_sources_name or
                 c_sources_name or
                 cxx_sources_name or
                 linkable_sources_name or
                 other_sources_name)
  if target_type == 'executable' and not has_sources:
    dummy_sources_name = cmake_target_name + '__dummy_srcs'
    SetVariable(output, dummy_sources_name,
                "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
    output.write('if(NOT EXISTS "')
    WriteVariable(output, dummy_sources_name)
    output.write('")\n')
    output.write('  file(WRITE "')
    WriteVariable(output, dummy_sources_name)
    output.write('" "")\n')
    output.write("endif()\n")


  # CMake is opposed to setting linker directories and considers the practice
  # of setting linker directories dangerous. Instead, it favors the use of
  # find_library and passing absolute paths to target_link_libraries.
  # However, CMake does provide the command link_directories, which adds
  # link directories to targets defined after it is called.
  # As a result, link_directories must come before the target definition.
  # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
  library_dirs = config.get('library_dirs')
  if library_dirs is not None:
    output.write('link_directories(')
    for library_dir in library_dirs:
      output.write(' ')
      output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
      output.write('\n')
    output.write(')\n')

  output.write(cmake_target_type.command)
  output.write('(')
  output.write(cmake_target_name)

  if cmake_target_type.modifier is not None:
    output.write(' ')
    output.write(cmake_target_type.modifier)

  if s_sources_name:
    WriteVariable(output, s_sources_name, ' ')
  if c_sources_name:
    WriteVariable(output, c_sources_name, ' ')
  if cxx_sources_name:
    WriteVariable(output, cxx_sources_name, ' ')
  if linkable_sources_name:
    WriteVariable(output, linkable_sources_name, ' ')
  if other_sources_name:
    WriteVariable(output, other_sources_name, ' ')
  if dummy_sources_name:
    WriteVariable(output, dummy_sources_name, ' ')

  output.write(')\n')

  # Let CMake know if the 'all' target should depend on this target.
  exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
                             else 'FALSE')
  SetTargetProperty(output, cmake_target_name,
                      'EXCLUDE_FROM_ALL', exclude_from_all)
  for extra_target_name in extra_deps:
    SetTargetProperty(output, extra_target_name,
                        'EXCLUDE_FROM_ALL', exclude_from_all)

  # Output name and location.
  if target_type != 'none':
    # Link as 'C' if there are no other files
    if not c_sources and not cxx_sources:
      SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])

    # Mark uncompiled sources as uncompiled.
    if other_sources_name:
      output.write('set_source_files_properties(')
      WriteVariable(output, other_sources_name, '')
      output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')

    # Mark object sources as linkable.
    if linkable_sources_name:
      output.write('set_source_files_properties(')
      WriteVariable(output, other_sources_name, '')
      output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')

    # Output directory
    target_output_directory = spec.get('product_dir')
    if target_output_directory is None:
      if target_type in ('executable', 'loadable_module'):
        target_output_directory = generator_default_variables['PRODUCT_DIR']
      elif target_type == 'shared_library':
        target_output_directory = '${builddir}/lib.${TOOLSET}'
      elif spec.get('standalone_static_library', False):
        target_output_directory = generator_default_variables['PRODUCT_DIR']
      else:
        base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
                                            options.toplevel_dir)
        target_output_directory = '${obj}.${TOOLSET}'
        target_output_directory = (
            os.path.join(target_output_directory, base_path))

    cmake_target_output_directory = NormjoinPathForceCMakeSource(
                                        path_from_cmakelists_to_gyp,
                                        target_output_directory)
    SetTargetProperty(output,
        cmake_target_name,
        cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
        cmake_target_output_directory)

    # Output name
    default_product_prefix = ''
    default_product_name = target_name
    default_product_ext = ''
    if target_type == 'static_library':
      static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
      default_product_name = RemovePrefix(default_product_name,
                                          static_library_prefix)
      default_product_prefix = static_library_prefix
      default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']

    elif target_type in ('loadable_module', 'shared_library'):
      shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
      default_product_name = RemovePrefix(default_product_name,
                                          shared_library_prefix)
      default_product_prefix = shared_library_prefix
      default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']

    elif target_type != 'executable':
      print ('ERROR: What output file should be generated?',
              'type', target_type, 'target', target_name)

    product_prefix = spec.get('product_prefix', default_product_prefix)
    product_name = spec.get('product_name', default_product_name)
    product_ext = spec.get('product_extension')
    if product_ext:
      product_ext = '.' + product_ext
    else:
      product_ext = default_product_ext

    SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
    SetTargetProperty(output, cmake_target_name,
                        cmake_target_type.property_modifier + '_OUTPUT_NAME',
                        product_name)
    SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)

    # Make the output of this target referenceable as a source.
    cmake_target_output_basename = product_prefix + product_name + product_ext
    cmake_target_output = os.path.join(cmake_target_output_directory,
                                       cmake_target_output_basename)
    SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')

    # Includes
    includes = config.get('include_dirs')
    if includes:
      # This (target include directories) is what requires CMake 2.8.8
      includes_name = cmake_target_name + '__include_dirs'
      SetVariableList(output, includes_name,
          [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
           for include in includes])
      output.write('set_property(TARGET ')
      output.write(cmake_target_name)
      output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
      WriteVariable(output, includes_name, '')
      output.write(')\n')

    # Defines
    defines = config.get('defines')
    if defines is not None:
      SetTargetProperty(output,
                          cmake_target_name,
                          'COMPILE_DEFINITIONS',
                          defines,
                          ';')

    # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
    # CMake currently does not have target C and CXX flags.
    # So, instead of doing...

    # cflags_c = config.get('cflags_c')
    # if cflags_c is not None:
    #   SetTargetProperty(output, cmake_target_name,
    #                       'C_COMPILE_FLAGS', cflags_c, ' ')

    # cflags_cc = config.get('cflags_cc')
    # if cflags_cc is not None:
    #   SetTargetProperty(output, cmake_target_name,
    #                       'CXX_COMPILE_FLAGS', cflags_cc, ' ')

    # Instead we must...
    cflags = config.get('cflags', [])
    cflags_c = config.get('cflags_c', [])
    cflags_cxx = config.get('cflags_cc', [])
    if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
      SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')

    elif c_sources and not (s_sources or cxx_sources):
      flags = []
      flags.extend(cflags)
      flags.extend(cflags_c)
      SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')

    elif cxx_sources and not (s_sources or c_sources):
      flags = []
      flags.extend(cflags)
      flags.extend(cflags_cxx)
      SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')

    else:
      # TODO: This is broken, one cannot generally set properties on files,
      # as other targets may require different properties on the same files.
      if s_sources and cflags:
        SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')

      if c_sources and (cflags or cflags_c):
        flags = []
        flags.extend(cflags)
        flags.extend(cflags_c)
        SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')

      if cxx_sources and (cflags or cflags_cxx):
        flags = []
        flags.extend(cflags)
        flags.extend(cflags_cxx)
        SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')

    # Linker flags
    ldflags = config.get('ldflags')
    if ldflags is not None:
      SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')

  # Note on Dependencies and Libraries:
  # CMake wants to handle link order, resolving the link line up front.
  # Gyp does not retain or enforce specifying enough information to do so.
  # So do as other gyp generators and use --start-group and --end-group.
  # Give CMake as little information as possible so that it doesn't mess it up.

  # Dependencies
  rawDeps = spec.get('dependencies', [])

  static_deps = []
  shared_deps = []
  other_deps = []
  for rawDep in rawDeps:
    dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
    dep_spec = target_dicts.get(rawDep, {})
    dep_target_type = dep_spec.get('type', None)

    if dep_target_type == 'static_library':
      static_deps.append(dep_cmake_name)
    elif dep_target_type ==  'shared_library':
      shared_deps.append(dep_cmake_name)
    else:
      other_deps.append(dep_cmake_name)

  # ensure all external dependencies are complete before internal dependencies
  # extra_deps currently only depend on their own deps, so otherwise run early
  if static_deps or shared_deps or other_deps:
    for extra_dep in extra_deps:
      output.write('add_dependencies(')
      output.write(extra_dep)
      output.write('\n')
      for deps in (static_deps, shared_deps, other_deps):
        for dep in gyp.common.uniquer(deps):
          output.write('  ')
          output.write(dep)
          output.write('\n')
      output.write(')\n')

  linkable = target_type in ('executable', 'loadable_module', 'shared_library')
  other_deps.extend(extra_deps)
  if other_deps or (not linkable and (static_deps or shared_deps)):
    output.write('add_dependencies(')
    output.write(cmake_target_name)
    output.write('\n')
    for dep in gyp.common.uniquer(other_deps):
      output.write('  ')
      output.write(dep)
      output.write('\n')
    if not linkable:
      for deps in (static_deps, shared_deps):
        for lib_dep in gyp.common.uniquer(deps):
          output.write('  ')
          output.write(lib_dep)
          output.write('\n')
    output.write(')\n')

  # Libraries
  if linkable:
    external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
    if external_libs or static_deps or shared_deps:
      output.write('target_link_libraries(')
      output.write(cmake_target_name)
      output.write('\n')
      if static_deps:
        write_group = circular_libs and len(static_deps) > 1
        if write_group:
          output.write('-Wl,--start-group\n')
        for dep in gyp.common.uniquer(static_deps):
          output.write('  ')
          output.write(dep)
          output.write('\n')
        if write_group:
          output.write('-Wl,--end-group\n')
      if shared_deps:
        for dep in gyp.common.uniquer(shared_deps):
          output.write('  ')
          output.write(dep)
          output.write('\n')
      if external_libs:
        for lib in gyp.common.uniquer(external_libs):
          output.write('  ')
          output.write(lib)
          output.write('\n')

      output.write(')\n')

  UnsetVariable(output, 'TOOLSET')
  UnsetVariable(output, 'TARGET')

Example 37

Project: PYPOWER Source File: runpf.py
def runpf(casedata=None, ppopt=None, fname='', solvedcase=''):
    """Runs a power flow.

    Runs a power flow [full AC Newton's method by default] and optionally
    returns the solved values in the data matrices, a flag which is C{True} if
    the algorithm was successful in finding a solution, and the elapsed
    time in seconds. All input arguments are optional. If C{casename} is
    provided it specifies the name of the input data file or dict
    containing the power flow data. The default value is 'case9'.

    If the ppopt is provided it overrides the default PYPOWER options
    vector and can be used to specify the solution algorithm and output
    options among other things. If the 3rd argument is given the pretty
    printed output will be appended to the file whose name is given in
    C{fname}. If C{solvedcase} is specified the solved case will be written
    to a case file in PYPOWER format with the specified name. If C{solvedcase}
    ends with '.mat' it saves the case as a MAT-file otherwise it saves it
    as a Python-file.

    If the C{ENFORCE_Q_LIMS} options is set to C{True} [default is false] then
    if any generator reactive power limit is violated after running the AC
    power flow, the corresponding bus is converted to a PQ bus, with Qg at
    the limit, and the case is re-run. The voltage magnitude at the bus
    will deviate from the specified value in order to satisfy the reactive
    power limit. If the reference bus is converted to PQ, the first
    remaining PV bus will be used as the slack bus for the next iteration.
    This may result in the real power output at this generator being
    slightly off from the specified values.

    Enforcing of generator Q limits inspired by contributions from Mu Lin,
    Lincoln University, New Zealand (1/14/05).

    @author: Ray Zimmerman (PSERC Cornell)
    """
    ## default arguments
    if casedata is None:
        casedata = join(dirname(__file__), 'case9')
    ppopt = ppoption(ppopt)

    ## options
    verbose = ppopt["VERBOSE"]
    qlim = ppopt["ENFORCE_Q_LIMS"]  ## enforce Q limits on gens?
    dc = ppopt["PF_DC"]             ## use DC formulation?

    ## read data
    ppc = loadcase(casedata)

    ## add zero columns to branch for flows if needed
    if ppc["branch"].shape[1] < QT:
        ppc["branch"] = c_[ppc["branch"],
                           zeros((ppc["branch"].shape[0],
                                  QT - ppc["branch"].shape[1] + 1))]

    ## convert to internal indexing
    ppc = ext2int(ppc)
    baseMVA, bus, gen, branch = \
        ppc["baseMVA"], ppc["bus"], ppc["gen"], ppc["branch"]

    ## get bus index lists of each type of bus
    ref, pv, pq = bustypes(bus, gen)

    ## generator info
    on = find(gen[:, GEN_STATUS] > 0)      ## which generators are on?
    gbus = gen[on, GEN_BUS].astype(int)    ## what buses are they at?

    ##-----  run the power flow  -----
    t0 = time()
    if verbose > 0:
        v = ppver('all')
        stdout.write('PYPOWER Version %s, %s' % (v["Version"], v["Date"]))

    if dc:                               # DC formulation
        if verbose:
            stdout.write(' -- DC Power Flow\n')

        ## initial state
        Va0 = bus[:, VA] * (pi / 180)

        ## build B matrices and phase shift injections
        B, Bf, Pbusinj, Pfinj = makeBdc(baseMVA, bus, branch)

        ## compute complex bus power injections [generation - load]
        ## adjusted for phase shifters and real shunts
        Pbus = makeSbus(baseMVA, bus, gen).real - Pbusinj - bus[:, GS] / baseMVA

        ## "run" the power flow
        Va = dcpf(B, Pbus, Va0, ref, pv, pq)

        ## update data matrices with solution
        branch[:, [QF, QT]] = zeros((branch.shape[0], 2))
        branch[:, PF] = (Bf * Va + Pfinj) * baseMVA
        branch[:, PT] = -branch[:, PF]
        bus[:, VM] = ones(bus.shape[0])
        bus[:, VA] = Va * (180 / pi)
        ## update Pg for slack generator (1st gen at ref bus)
        ## (note: other gens at ref bus are accounted for in Pbus)
        ##      Pg = Pinj + Pload + Gs
        ##      newPg = oldPg + newPinj - oldPinj
        refgen = zeros(len(ref), dtype=int)
        for k in range(len(ref)):
            temp = find(gbus == ref[k])
            refgen[k] = on[temp[0]]
        gen[refgen, PG] = gen[refgen, PG] + (B[ref, :] * Va - Pbus[ref]) * baseMVA

        success = 1
    else:                                ## AC formulation
        alg = ppopt['PF_ALG']
        if verbose > 0:
            if alg == 1:
                solver = 'Newton'
            elif alg == 2:
                solver = 'fast-decoupled, XB'
            elif alg == 3:
                solver = 'fast-decoupled, BX'
            elif alg == 4:
                solver = 'Gauss-Seidel'
            else:
                solver = 'unknown'
            print(' -- AC Power Flow (%s)\n' % solver)

        ## initial state
        # V0    = ones(bus.shape[0])            ## flat start
        V0  = bus[:, VM] * exp(1j * pi/180 * bus[:, VA])
        V0[gbus] = gen[on, VG] / abs(V0[gbus]) * V0[gbus]

        if qlim:
            ref0 = ref                         ## save index and angle of
            Varef0 = bus[ref0, VA]             ##   original reference bus(es)
            limited = []                       ## list of indices of gens @ Q lims
            fixedQg = zeros(gen.shape[0])      ## Qg of gens at Q limits

        repeat = True
        while repeat:
            ## build admittance matrices
            Ybus, Yf, Yt = makeYbus(baseMVA, bus, branch)

            ## compute complex bus power injections [generation - load]
            Sbus = makeSbus(baseMVA, bus, gen)

            ## run the power flow
            alg = ppopt["PF_ALG"]
            if alg == 1:
                V, success, _ = newtonpf(Ybus, Sbus, V0, ref, pv, pq, ppopt)
            elif alg == 2 or alg == 3:
                Bp, Bpp = makeB(baseMVA, bus, branch, alg)
                V, success, _ = fdpf(Ybus, Sbus, V0, Bp, Bpp, ref, pv, pq, ppopt)
            elif alg == 4:
                V, success, _ = gausspf(Ybus, Sbus, V0, ref, pv, pq, ppopt)
            else:
                stderr.write('Only Newton''s method, fast-decoupled, and '
                             'Gauss-Seidel power flow algorithms currently '
                             'implemented.\n')

            ## update data matrices with solution
            bus, gen, branch = pfsoln(baseMVA, bus, gen, branch, Ybus, Yf, Yt, V, ref, pv, pq)

            if qlim:             ## enforce generator Q limits
                ## find gens with violated Q constraints
                gen_status = gen[:, GEN_STATUS] > 0
                qg_max_lim = gen[:, QG] > gen[:, QMAX]
                qg_min_lim = gen[:, QG] < gen[:, QMIN]
                
                mx = find( gen_status & qg_max_lim )
                mn = find( gen_status & qg_min_lim )
                
                if len(mx) > 0 or len(mn) > 0:  ## we have some Q limit violations
                    # No PV generators
                    if len(pv) == 0:
                        if verbose:
                            if len(mx) > 0:
                                print('Gen %d [only one left] exceeds upper Q limit : INFEASIBLE PROBLEM\n' % mx + 1)
                            else:
                                print('Gen %d [only one left] exceeds lower Q limit : INFEASIBLE PROBLEM\n' % mn + 1)

                        success = 0
                        break

                    ## one at a time?
                    if qlim == 2:    ## fix largest violation, ignore the rest
                        k = argmax(r_[gen[mx, QG] - gen[mx, QMAX],
                                      gen[mn, QMIN] - gen[mn, QG]])
                        if k > len(mx):
                            mn = mn[k - len(mx)]
                            mx = []
                        else:
                            mx = mx[k]
                            mn = []

                    if verbose and len(mx) > 0:
                        for i in range(len(mx)):
                            print('Gen ' + str(mx[i] + 1) + ' at upper Q limit, converting to PQ bus\n')

                    if verbose and len(mn) > 0:
                        for i in range(len(mn)):
                            print('Gen ' + str(mn[i] + 1) + ' at lower Q limit, converting to PQ bus\n')

                    ## save corresponding limit values
                    fixedQg[mx] = gen[mx, QMAX]
                    fixedQg[mn] = gen[mn, QMIN]
                    mx = r_[mx, mn].astype(int)

                    ## convert to PQ bus
                    gen[mx, QG] = fixedQg[mx]      ## set Qg to binding 
                    for i in range(len(mx)):            ## [one at a time, since they may be at same bus]
                        gen[mx[i], GEN_STATUS] = 0        ## temporarily turn off gen,
                        bi = gen[mx[i], GEN_BUS]   ## adjust load accordingly,
                        bus[bi, [PD, QD]] = (bus[bi, [PD, QD]] - gen[mx[i], [PG, QG]])
                    
                    if len(ref) > 1 and any(bus[gen[mx, GEN_BUS], BUS_TYPE] == REF):
                        raise ValueError('Sorry, PYPOWER cannot enforce Q '
                                         'limits for slack buses in systems '
                                         'with multiple slacks.')
                    
                    bus[gen[mx, GEN_BUS].astype(int), BUS_TYPE] = PQ   ## & set bus type to PQ

                    ## update bus index lists of each type of bus
                    ref_temp = ref
                    ref, pv, pq = bustypes(bus, gen)
                    if verbose and ref != ref_temp:
                        print('Bus %d is new slack bus\n' % ref)

                    limited = r_[limited, mx].astype(int)
                else:
                    repeat = 0 ## no more generator Q limits violated
            else:
                repeat = 0     ## don't enforce generator Q limits, once is enough

        if qlim and len(limited) > 0:
            ## restore injections from limited gens [those at Q limits]
            gen[limited, QG] = fixedQg[limited]    ## restore Qg value,
            for i in range(len(limited)):               ## [one at a time, since they may be at same bus]
                bi = gen[limited[i], GEN_BUS]           ## re-adjust load,
                bus[bi, [PD, QD]] = bus[bi, [PD, QD]] + gen[limited[i], [PG, QG]]
                gen[limited[i], GEN_STATUS] = 1           ## and turn gen back on
            
            if ref != ref0:
                ## adjust voltage angles to make original ref bus correct
                bus[:, VA] = bus[:, VA] - bus[ref0, VA] + Varef0

    ppc["et"] = time() - t0
    ppc["success"] = success

    ##-----  output results  -----
    ## convert back to original bus numbering & print results
    ppc["bus"], ppc["gen"], ppc["branch"] = bus, gen, branch
    results = int2ext(ppc)

    ## zero out result fields of out-of-service gens & branches
    if len(results["order"]["gen"]["status"]["off"]) > 0:
        results["gen"][ix_(results["order"]["gen"]["status"]["off"], [PG, QG])] = 0

    if len(results["order"]["branch"]["status"]["off"]) > 0:
        results["branch"][ix_(results["order"]["branch"]["status"]["off"], [PF, QF, PT, QT])] = 0

    if fname:
        fd = None
        try:
            fd = open(fname, "a")
        except Exception as detail:
            stderr.write("Error opening %s: %s.\n" % (fname, detail))
        finally:
            if fd is not None:
                printpf(results, fd, ppopt)
                fd.close()
    else:
        printpf(results, stdout, ppopt)

    ## save solved case
    if solvedcase:
        savecase(solvedcase, results)

    return results, success

Example 38

Project: PYPOWER Source File: t_dcline.py
def t_dcline(quiet=False):
    """Tests for DC line extension in L{{toggle_dcline}.

    @author: Ray Zimmerman (PSERC Cornell)
    """
    num_tests = 50

    t_begin(num_tests, quiet)

    tdir = dirname(__file__)
    casefile = join(tdir, 't_case9_dcline')
    if quiet:
        verbose = False
    else:
        verbose = False

    t0 = ''
    ppopt = ppoption(OPF_VIOLATION=1e-6, PDIPM_GRADTOL=1e-8,
            PDIPM_COMPTOL=1e-8, PDIPM_COSTTOL=1e-9)
    ppopt = ppoption(ppopt, OPF_ALG=560, OPF_ALG_DC=200)
    ppopt = ppoption(ppopt, OUT_ALL=0, VERBOSE=verbose)

    ## set up indices
    ib_data     = r_[arange(BUS_AREA + 1), arange(BASE_KV, VMIN + 1)]
    ib_voltage  = arange(VM, VA + 1)
    ib_lam      = arange(LAM_P, LAM_Q + 1)
    ib_mu       = arange(MU_VMAX, MU_VMIN + 1)
    ig_data     = r_[[GEN_BUS, QMAX, QMIN], arange(MBASE, APF + 1)]
    ig_disp     = array([PG, QG, VG])
    ig_mu       = arange(MU_PMAX, MU_QMIN + 1)
    ibr_data    = arange(ANGMAX + 1)
    ibr_flow    = arange(PF, QT + 1)
    ibr_mu      = array([MU_SF, MU_ST])
    ibr_angmu   = array([MU_ANGMIN, MU_ANGMAX])

    ## load case
    ppc0 = loadcase(casefile)
    del ppc0['dclinecost']
    ppc = ppc0
    ppc = toggle_dcline(ppc, 'on')
    ppc = toggle_dcline(ppc, 'off')
    ndc = ppc['dcline'].shape[0]

    ## run AC OPF w/o DC lines
    t = ''.join([t0, 'AC OPF (no DC lines) : '])
    r0 = runopf(ppc0, ppopt)
    success = r0['success']
    t_ok(success, [t, 'success'])
    r = runopf(ppc, ppopt)
    success = r['success']
    t_ok(success, [t, 'success'])
    t_is(r['f'], r0['f'], 8, [t, 'f'])
    t_is(   r['bus'][:,ib_data   ],    r0['bus'][:,ib_data   ], 10, [t, 'bus data'])
    t_is(   r['bus'][:,ib_voltage],    r0['bus'][:,ib_voltage],  3, [t, 'bus voltage'])
    t_is(   r['bus'][:,ib_lam    ],    r0['bus'][:,ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   r['bus'][:,ib_mu     ],    r0['bus'][:,ib_mu     ],  2, [t, 'bus mu'])
    t_is(   r['gen'][:,ig_data   ],    r0['gen'][:,ig_data   ], 10, [t, 'gen data'])
    t_is(   r['gen'][:,ig_disp   ],    r0['gen'][:,ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   r['gen'][:,ig_mu     ],    r0['gen'][:,ig_mu     ],  3, [t, 'gen mu'])
    t_is(r['branch'][:,ibr_data  ], r0['branch'][:,ibr_data  ], 10, [t, 'branch data'])
    t_is(r['branch'][:,ibr_flow  ], r0['branch'][:,ibr_flow  ],  3, [t, 'branch flow'])
    t_is(r['branch'][:,ibr_mu    ], r0['branch'][:,ibr_mu    ],  2, [t, 'branch mu'])

    t = ''.join([t0, 'AC PF (no DC lines) : '])
    ppc1 = {'baseMVA': r['baseMVA'],
            'bus': r['bus'][:, :VMIN + 1].copy(),
            'gen': r['gen'][:, :APF + 1].copy(),
            'branch': r['branch'][:, :ANGMAX + 1].copy(),
            'gencost': r['gencost'].copy(),
            'dcline': r['dcline'][:, :c.LOSS1 + 1].copy()}
    ppc1['bus'][:, VM] = 1
    ppc1['bus'][:, VA] = 0
    rp = runpf(ppc1, ppopt)
    success = rp['success']
    t_ok(success, [t, 'success'])
    t_is(   rp['bus'][:,ib_voltage],    r['bus'][:,ib_voltage],  3, [t, 'bus voltage'])
    t_is(   rp['gen'][:,ig_disp   ],    r['gen'][:,ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(rp['branch'][:,ibr_flow  ], r['branch'][:,ibr_flow  ],  3, [t, 'branch flow'])

    ## run with DC lines
    t = ''.join([t0, 'AC OPF (with DC lines) : '])
    ppc = toggle_dcline(ppc, 'on')
    r = runopf(ppc, ppopt)
    success = r['success']
    t_ok(success, [t, 'success'])
    expected = array([
        [10,     8.9,  -10,       10, 1.0674, 1.0935],
        [2.2776, 2.2776, 0,        0, 1.0818, 1.0665],
        [0,      0,      0,        0, 1.0000, 1.0000],
        [10,     9.5,    0.0563, -10, 1.0778, 1.0665]
    ])
    t_is(r['dcline'][:, c.PF:c.VT + 1], expected, 4, [t, 'P Q V'])
    expected = array([
        [0, 0.8490, 0.6165, 0,      0,      0.2938],
        [0, 0,      0,      0.4290, 0.0739, 0],
        [0, 0,      0,      0,      0,      0],
        [0, 7.2209, 0,      0,      0.0739, 0]
    ])
    t_is(r['dcline'][:, c.MU_PMIN:c.MU_QMAXT + 1], expected, 3, [t, 'mu'])

    t = ''.join([t0, 'AC PF (with DC lines) : '])
    ppc1 = {'baseMVA': r['baseMVA'],
            'bus': r['bus'][:, :VMIN + 1].copy(),
            'gen': r['gen'][:, :APF + 1].copy(),
            'branch': r['branch'][:, :ANGMAX + 1].copy(),
            'gencost': r['gencost'].copy(),
            'dcline': r['dcline'][:, :c.LOSS1 + 1].copy()}
    ppc1 = toggle_dcline(ppc1, 'on')
    ppc1['bus'][:, VM] = 1
    ppc1['bus'][:, VA] = 0
    rp = runpf(ppc1, ppopt)
    success = rp['success']
    t_ok(success, [t, 'success'])
    t_is(   rp['bus'][:,ib_voltage],    r['bus'][:,ib_voltage], 3, [t, 'bus voltage'])
    #t_is(   rp['gen'][:,ig_disp   ],    r['gen'][:,ig_disp   ], 3, [t, 'gen dispatch'])
    t_is(   rp['gen'][:2,ig_disp ],    r['gen'][:2,ig_disp ], 3, [t, 'gen dispatch'])
    t_is(   rp['gen'][2,PG        ],    r['gen'][2,PG        ], 3, [t, 'gen dispatch'])
    t_is(   rp['gen'][2,QG]+rp['dcline'][0,c.QF], r['gen'][2,QG]+r['dcline'][0,c.QF], 3, [t, 'gen dispatch'])
    t_is(rp['branch'][:,ibr_flow  ], r['branch'][:,ibr_flow  ], 3, [t, 'branch flow'])

    ## add appropriate P and Q injections and check angles and generation when running PF
    t = ''.join([t0, 'AC PF (with equivalent injections) : '])
    ppc1 = {'baseMVA': r['baseMVA'],
            'bus': r['bus'][:, :VMIN + 1].copy(),
            'gen': r['gen'][:, :APF + 1].copy(),
            'branch': r['branch'][:, :ANGMAX + 1].copy(),
            'gencost': r['gencost'].copy(),
            'dcline': r['dcline'][:, :c.LOSS1 + 1].copy()}
    ppc1['bus'][:, VM] = 1
    ppc1['bus'][:, VA] = 0
    for k in range(ndc):
        if ppc1['dcline'][k, c.BR_STATUS]:
            ff = find(ppc1['bus'][:, BUS_I] == ppc1['dcline'][k, c.F_BUS])
            tt = find(ppc1['bus'][:, BUS_I] == ppc1['dcline'][k, c.T_BUS])
            ppc1['bus'][ff, PD] = ppc1['bus'][ff, PD] + r['dcline'][k, c.PF]
            ppc1['bus'][ff, QD] = ppc1['bus'][ff, QD] - r['dcline'][k, c.QF]
            ppc1['bus'][tt, PD] = ppc1['bus'][tt, PD] - r['dcline'][k, c.PT]
            ppc1['bus'][tt, QD] = ppc1['bus'][tt, QD] - r['dcline'][k, c.QT]
            ppc1['bus'][ff, VM] = r['dcline'][k, c.VF]
            ppc1['bus'][tt, VM] = r['dcline'][k, c.VT]
            ppc1['bus'][ff, BUS_TYPE] = PV
            ppc1['bus'][tt, BUS_TYPE] = PV

    rp = runpf(ppc1, ppopt)
    success = rp['success']
    t_ok(success, [t, 'success'])
    t_is(   rp['bus'][:,ib_voltage],    r['bus'][:,ib_voltage],  3, [t, 'bus voltage'])
    t_is(   rp['gen'][:,ig_disp   ],    r['gen'][:,ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(rp['branch'][:,ibr_flow  ], r['branch'][:,ibr_flow  ],  3, [t, 'branch flow'])

    ## test DC OPF
    t = ''.join([t0, 'DC OPF (with DC lines) : '])
    ppc = ppc0.copy()
    ppc['gen'][0, PMIN] = 10
    ppc['branch'][4, RATE_A] = 100
    ppc = toggle_dcline(ppc, 'on')
    r = rundcopf(ppc, ppopt)
    success = r['success']
    t_ok(success, [t, 'success'])
    expected = array([
        [10, 8.9, 0, 0, 1.01, 1],
        [2,  2,   0, 0, 1,    1],
        [0,  0,   0, 0, 1,    1],
        [10, 9.5, 0, 0, 1, 0.98]
    ])
    t_is(r['dcline'][:, c.PF:c.VT + 1], expected, 4, [t, 'P Q V'])
    expected = array([
        [0,      1.8602, 0, 0, 0, 0],
        [1.8507, 0,      0, 0, 0, 0],
        [0,      0,      0, 0, 0, 0],
        [0,      0.2681, 0, 0, 0, 0]
    ])
    t_is(r['dcline'][:, c.MU_PMIN:c.MU_QMAXT + 1], expected, 3, [t, 'mu'])

    t = ''.join([t0, 'DC PF (with DC lines) : '])
    ppc1 = {'baseMVA': r['baseMVA'],
            'bus': r['bus'][:, :VMIN + 1].copy(),
            'gen': r['gen'][:, :APF + 1].copy(),
            'branch': r['branch'][:, :ANGMAX + 1].copy(),
            'gencost': r['gencost'].copy(),
            'dcline': r['dcline'][:, :c.LOSS1 + 1].copy()}
    ppc1 = toggle_dcline(ppc1, 'on')
    ppc1['bus'][:, VA] = 0
    rp = rundcpf(ppc1, ppopt)
    success = rp['success']
    t_ok(success, [t, 'success'])
    t_is(   rp['bus'][:,ib_voltage],    r['bus'][:,ib_voltage], 3, [t, 'bus voltage'])
    t_is(   rp['gen'][:,ig_disp   ],    r['gen'][:,ig_disp   ], 3, [t, 'gen dispatch'])
    t_is(rp['branch'][:,ibr_flow  ], r['branch'][:,ibr_flow  ], 3, [t, 'branch flow'])

    ## add appropriate P injections and check angles and generation when running PF
    t = ''.join([t0, 'DC PF (with equivalent injections) : '])
    ppc1 = {'baseMVA': r['baseMVA'],
            'bus': r['bus'][:, :VMIN + 1].copy(),
            'gen': r['gen'][:, :APF + 1].copy(),
            'branch': r['branch'][:, :ANGMAX + 1].copy(),
            'gencost': r['gencost'].copy(),
            'dcline': r['dcline'][:, :c.LOSS1 + 1].copy()}
    ppc1['bus'][:, VA] = 0
    for k in range(ndc):
        if ppc1['dcline'][k, c.BR_STATUS]:
            ff = find(ppc1['bus'][:, BUS_I] == ppc1['dcline'][k, c.F_BUS])
            tt = find(ppc1['bus'][:, BUS_I] == ppc1['dcline'][k, c.T_BUS])
            ppc1['bus'][ff, PD] = ppc1['bus'][ff, PD] + r['dcline'][k, c.PF]
            ppc1['bus'][tt, PD] = ppc1['bus'][tt, PD] - r['dcline'][k, c.PT]
            ppc1['bus'][ff, BUS_TYPE] = PV
            ppc1['bus'][tt, BUS_TYPE] = PV

    rp = rundcpf(ppc1, ppopt)
    success = rp['success']
    t_ok(success, [t, 'success'])
    t_is(   rp['bus'][:,ib_voltage],    r['bus'][:,ib_voltage],  3, [t, 'bus voltage'])
    t_is(   rp['gen'][:,ig_disp   ],    r['gen'][:,ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(rp['branch'][:,ibr_flow  ], r['branch'][:,ibr_flow  ],  3, [t, 'branch flow'])

    ## run with DC lines
    t = ''.join([t0, 'AC OPF (with DC lines + poly cost) : '])
    ppc = loadcase(casefile)
    ppc = toggle_dcline(ppc, 'on')
    r = runopf(ppc, ppopt)
    success = r['success']
    t_ok(success, [t, 'success'])
    expected1 = array([
        [10,     8.9,   -10,       10, 1.0663, 1.0936],
        [7.8429, 7.8429,  0,        0, 1.0809, 1.0667],
        [0,      0,       0,        0, 1.0000, 1.0000],
        [6.0549, 5.7522, -0.5897, -10, 1.0778, 1.0667]
    ])
    t_is(r['dcline'][:, c.PF:c.VT + 1], expected1, 4, [t, 'P Q V'])
    expected2 = array([
        [0, 0.7605, 0.6226, 0,      0,      0.2980],
        [0, 0,      0,      0.4275, 0.0792, 0],
        [0, 0,      0,      0,      0,      0],
        [0, 0,      0,      0,      0.0792, 0]
    ])
    t_is(r['dcline'][:, c.MU_PMIN:c.MU_QMAXT + 1], expected2, 3, [t, 'mu'])

    ppc['dclinecost'][3, :8] = array([2, 0, 0, 4, 0, 0, 7.3, 0])
    r = runopf(ppc, ppopt)
    success = r['success']
    t_ok(success, [t, 'success'])
    t_is(r['dcline'][:, c.PF:c.VT + 1], expected1, 4, [t, 'P Q V'])
    t_is(r['dcline'][:, c.MU_PMIN:c.MU_QMAXT + 1], expected2, 3, [t, 'mu'])

    t = ''.join([t0, 'AC OPF (with DC lines + pwl cost) : '])
    ppc['dclinecost'][3, :8] = array([1, 0, 0, 2, 0, 0, 10, 73])
    r = runopf(ppc, ppopt)
    success = r['success']
    t_ok(success, [t, 'success'])
    t_is(r['dcline'][:, c.PF:c.VT + 1], expected1, 4, [t, 'P Q V'])
    t_is(r['dcline'][:, c.MU_PMIN:c.MU_QMAXT + 1], expected2, 3, [t, 'mu'])

    t_end()

Example 39

Project: numexpr Source File: setup.py
def setup_package():
    metadata = dict(
                      description='Fast numerical expression evaluator for NumPy',
                      author='David M. Cooke, Francesc Alted and others',
                      author_email='[email protected], [email protected]',
                      url='https://github.com/pydata/numexpr',
                      license='MIT',
                      packages=['numexpr'],
                      install_requires=requirements,
                      setup_requires=requirements
    )
    if (len(sys.argv) >= 2 and
        ('--help' in sys.argv[1:] or
         (sys.argv[1] in (
             '--help-commands', 'egg_info', '--version', 'clean', '--name')))):

        # For these actions, NumPy is not required.
        #
        # They are required to succeed without Numpy for example when
        # pip is used to install Numexpr when Numpy is not yet present in
        # the system.
        # (via https://github.com/abhirk/scikit-learn/blob/master/setup.py)
        try:
            from setuptools import setup
        except ImportError:
            from distutils.core import setup

        metadata['name']    = 'numexpr'
        metadata['version'] = version
    else:
        from numpy.distutils.core import setup
        from numpy.distutils.command.build_ext import build_ext as numpy_build_ext

        try:  # Python 3
            # Code taken form numpy/distutils/command/build_py.py
            # XXX: update LICENSES
            from distutils.command.build_py import build_py_2to3 as old_build_py
            from numpy.distutils.misc_util import is_string

            class build_py(old_build_py):

                def run(self):
                    build_src = self.get_finalized_command('build_src')
                    if build_src.py_modules_dict and self.packages is None:
                        self.packages = list(build_src.py_modules_dict.keys())
                    old_build_py.run(self)

                def find_package_modules(self, package, package_dir):
                    modules = old_build_py.find_package_modules(
                        self, package, package_dir)

                    # Find build_src generated *.py files.
                    build_src = self.get_finalized_command('build_src')
                    modules += build_src.py_modules_dict.get(package, [])

                    return modules

                def find_modules(self):
                    old_py_modules = self.py_modules[:]
                    new_py_modules = list(filter(is_string, self.py_modules))
                    self.py_modules[:] = new_py_modules
                    modules = old_build_py.find_modules(self)
                    self.py_modules[:] = old_py_modules

                    return modules

        except ImportError:  # Python 2
            from numpy.distutils.command.build_py import build_py

        DEBUG = False

        def localpath(*args):
            return op.abspath(op.join(*((op.dirname(__file__),) + args)))

        def debug(instring):
            if DEBUG:
                print(" DEBUG: " + instring)


        def configuration():
            from numpy.distutils.misc_util import Configuration, dict_append
            from numpy.distutils.system_info import system_info

            config = Configuration('numexpr')

            #try to find configuration for MKL, either from environment or site.cfg
            if op.exists('site.cfg'):
                mkl_config_data = config.get_info('mkl')
                # Some version of MKL needs to be linked with libgfortran.
                # For this, use entries of DEFAULT section in site.cfg.
                default_config = system_info()
                dict_append(mkl_config_data,
                            libraries=default_config.get_libraries(),
                            library_dirs=default_config.get_lib_dirs())
            else:
                mkl_config_data = {}

            # setup information for C extension
            if os.name == 'nt':
                pthread_win = ['numexpr/win32/pthread.c']
            else:
                pthread_win = []
            extension_config_data = {
                'sources': ['numexpr/interpreter.cpp',
                            'numexpr/module.cpp',
                            'numexpr/numexpr_object.cpp'] + pthread_win,
                'depends': ['numexpr/interp_body.cpp',
                            'numexpr/complex_functions.hpp',
                            'numexpr/interpreter.hpp',
                            'numexpr/module.hpp',
                            'numexpr/msvc_function_stubs.hpp',
                            'numexpr/numexpr_config.hpp',
                            'numexpr/numexpr_object.hpp'],
                'libraries': ['m'],
                'extra_compile_args': ['-funroll-all-loops', ],
            }
            dict_append(extension_config_data, **mkl_config_data)
            if 'library_dirs' in mkl_config_data:
                library_dirs = ':'.join(mkl_config_data['library_dirs'])
            config.add_extension('interpreter', **extension_config_data)
            config.set_options(quiet=True)

            config.make_config_py()
            config.add_subpackage('tests', 'numexpr/tests')

            #version handling
            config.get_version('numexpr/version.py')
            return config


        class cleaner(clean):

            def run(self):
                # Recursive deletion of build/ directory
                path = localpath("build")
                try:
                    shutil.rmtree(path)
                except Exception:
                    debug("Failed to remove directory %s" % path)
                else:
                    debug("Cleaned up %s" % path)

                # Now, the extension and other files
                try:
                    import imp
                except ImportError:
                    if os.name == 'posix':
                        paths = [localpath("numexpr/interpreter.so")]
                    else:
                        paths = [localpath("numexpr/interpreter.pyd")]
                else:
                    paths = []
                    for suffix, _, _ in imp.get_suffixes():
                        if suffix == '.py':
                            continue
                        paths.append(localpath("numexpr", "interpreter" + suffix))
                paths.append(localpath("numexpr/__config__.py"))
                paths.append(localpath("numexpr/__config__.pyc"))
                for path in paths:
                    try:
                        os.remove(path)
                    except Exception:
                        debug("Failed to clean up file %s" % path)
                    else:
                        debug("Cleaning up %s" % path)

                clean.run(self)

        class build_ext(numpy_build_ext):
            def build_extension(self, ext):
                # at this point we know what the C compiler is.
                if self.compiler.compiler_type == 'msvc' or self.compiler.compiler_type == 'intelemw':
                    ext.extra_compile_args = []
                    # also remove extra linker arguments msvc doesn't understand
                    ext.extra_link_args = []
                    # also remove gcc math library
                    ext.libraries.remove('m')
                numpy_build_ext.build_extension(self, ext)

        if setuptools:
            metadata['zip_safe'] = False

        metadata['cmdclass'] = {
            'build_ext': build_ext,
            'clean': cleaner,
            'build_py': build_py,
        }
        metadata['configuration'] = configuration

    setup(**metadata)

Example 40

Project: PYPOWER Source File: t_loadcase.py
def t_loadcase(quiet=False):
    """Test that C{loadcase} works with an object as well as case file.

    @author: Ray Zimmerman (PSERC Cornell)
    """
    t_begin(240, quiet)

    ## compare result of loading from M-file file to result of using data matrices
    tdir = dirname(__file__)
    casefile = join(tdir, 't_case9_opf')
    matfile  = join(tdir, 't_mat9_opf')
    pfcasefile = join(tdir, 't_case9_pf')
    pfmatfile  = join(tdir, 't_mat9_pf')
    casefilev2 = join(tdir, 't_case9_opfv2')
    matfilev2  = join(tdir, 't_mat9_opfv2')
    pfcasefilev2 = join(tdir, 't_case9_pfv2')
    pfmatfilev2  = join(tdir, 't_mat9_pfv2')

    ## read version 1 OPF data matrices
    baseMVA, bus, gen, branch, areas, gencost = t_case9_opf()
    ## save as .mat file
    savemat(matfile + '.mat', {'baseMVA': baseMVA, 'bus': bus, 'gen': gen,
            'branch': branch, 'areas': areas, 'gencost': gencost}, oned_as='row')

    ## read version 2 OPF data matrices
    ppc = t_case9_opfv2()
    ## save as .mat file
    savemat(matfilev2 + '.mat', {'ppc': ppc}, oned_as='column')

    ## prepare expected matrices for v1 load
    ## (missing gen cap curve & branch ang diff lims)
    tmp1 = (ppc['baseMVA'], ppc['bus'].copy(), ppc['gen'].copy(), ppc['branch'].copy(),
        ppc['areas'].copy(), ppc['gencost'].copy())
    tmp2 = (ppc['baseMVA'], ppc['bus'].copy(), ppc['gen'].copy(), ppc['branch'].copy(),
        ppc['areas'].copy(), ppc['gencost'].copy())
    ## remove capability curves, angle difference limits
    tmp1[2][1:3, [PC1, PC2, QC1MIN, QC1MAX, QC2MIN, QC2MAX]] = zeros((2,6))
    tmp1[3][0, ANGMAX] = 360
    tmp1[3][8, ANGMIN] = -360

    baseMVA, bus, gen, branch, areas, gencost = tmp1

    ##-----  load OPF data into individual matrices  -----
    t = 'loadcase(opf_PY_file_v1) without .py extension : '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = \
            loadcase(casefile, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])
    t_is(areas1,    areas,      12, [t, 'areas'])
    t_is(gencost1,  gencost,    12, [t, 'gencost'])

    t = 'loadcase(opf_PY_file_v1) with .py extension : '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = \
            loadcase(casefile + '.py', False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])
    t_is(areas1,    areas,      12, [t, 'areas'])
    t_is(gencost1,  gencost,    12, [t, 'gencost'])

    t = 'loadcase(opf_MAT_file_v1) without .mat extension : '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = \
            loadcase(matfile, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])
    t_is(areas1,    areas,      12, [t, 'areas'])
    t_is(gencost1,  gencost,    12, [t, 'gencost'])

    t = 'loadcase(opf_MAT_file_v1) with .mat extension : '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = \
            loadcase(matfile + '.mat', False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])
    t_is(areas1,    areas,      12, [t, 'areas'])
    t_is(gencost1,  gencost,    12, [t, 'gencost'])

    ## prepare expected matrices for v2 load
    baseMVA, bus, gen, branch, areas, gencost = tmp2

    t = 'loadcase(opf_PY_file_v2) without .py extension : '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = \
            loadcase(casefilev2, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])
    t_is(areas1,    areas,      12, [t, 'areas'])
    t_is(gencost1,  gencost,    12, [t, 'gencost'])

    t = 'loadcase(opf_PY_file_v2) with .py extension : '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = \
            loadcase(casefilev2 + '.py', False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])
    t_is(areas1,    areas,      12, [t, 'areas'])
    t_is(gencost1,  gencost,    12, [t, 'gencost'])

    t = 'loadcase(opf_MAT_file_v2) without .mat extension : '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = \
            loadcase(matfilev2, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])
    t_is(areas1,    areas,      12, [t, 'areas'])
    t_is(gencost1,  gencost,    12, [t, 'gencost'])

    t = 'loadcase(opf_MAT_file_v2) with .mat extension : '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = \
            loadcase(matfilev2 + '.mat', False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])
    t_is(areas1,    areas,      12, [t, 'areas'])
    t_is(gencost1,  gencost,    12, [t, 'gencost'])

    ## prepare expected matrices for v1 load
    baseMVA, bus, gen, branch, areas, gencost = tmp1

    t = 'loadcase(opf_struct_v1) (no version): '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = t_case9_opf()
    c = {}
    c['baseMVA']   = baseMVA1
    c['bus']       = bus1.copy()
    c['gen']       = gen1.copy()
    c['branch']    = branch1.copy()
    c['areas']     = areas1.copy()
    c['gencost']   = gencost1.copy()
    baseMVA2, bus2, gen2, branch2, areas2, gencost2 = loadcase(c, False)
    t_is(baseMVA2,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus2,      bus,        12, [t, 'bus'])
    t_is(gen2,      gen,        12, [t, 'gen'])
    t_is(branch2,   branch,     12, [t, 'branch'])
    t_is(areas2,    areas,      12, [t, 'areas'])
    t_is(gencost2,  gencost,    12, [t, 'gencost'])

    t = 'loadcase(opf_struct_v1) (version=\'1\'): '
    c['version']   = '1'
    baseMVA2, bus2, gen2, branch2, areas2, gencost2 = loadcase(c, False)
    t_is(baseMVA2,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus2,      bus,        12, [t, 'bus'])
    t_is(gen2,      gen,        12, [t, 'gen'])
    t_is(branch2,   branch,     12, [t, 'branch'])
    t_is(areas2,    areas,      12, [t, 'areas'])
    t_is(gencost2,  gencost,    12, [t, 'gencost'])

    ## prepare expected matrices for v2 load
    baseMVA, bus, gen, branch, areas, gencost = tmp2

    t = 'loadcase(opf_struct_v2) (no version): '
    c = {}
    c['baseMVA']   = baseMVA
    c['bus']       = bus.copy()
    c['gen']       = gen.copy()
    c['branch']    = branch.copy()
    c['areas']     = areas.copy()
    c['gencost']   = gencost.copy()
    baseMVA2, bus2, gen2, branch2, areas2, gencost2 = loadcase(c, False)
    t_is(baseMVA2,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus2,      bus,        12, [t, 'bus'])
    t_is(gen2,      gen,        12, [t, 'gen'])
    t_is(branch2,   branch,     12, [t, 'branch'])
    t_is(areas2,    areas,      12, [t, 'areas'])
    t_is(gencost2,  gencost,    12, [t, 'gencost'])

    t = 'loadcase(opf_struct_v2) (version=''2''): '
    c = {}
    c['baseMVA']   = baseMVA
    c['bus']       = bus.copy()
    c['gen']       = gen.copy()
    c['branch']    = branch.copy()
    c['areas']     = areas.copy()
    c['gencost']   = gencost.copy()
    c['version']   = '2'
    baseMVA2, bus2, gen2, branch2, areas2, gencost2 = loadcase(c, False)
    t_is(baseMVA2,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus2,      bus,        12, [t, 'bus'])
    t_is(gen2,      gen,        12, [t, 'gen'])
    t_is(branch2,   branch,     12, [t, 'branch'])
    t_is(areas2,    areas,      12, [t, 'areas'])
    t_is(gencost2,  gencost,    12, [t, 'gencost'])

    ##-----  load OPF data into struct  -----
    ## prepare expected matrices for v1 load
    baseMVA, bus, gen, branch, areas, gencost = tmp1

    t = 'ppc = loadcase(opf_PY_file_v1) without .py extension : '
    ppc1 = loadcase(casefile)
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc1['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc1['gencost'],  gencost,    12, [t, 'gencost'])

    t = 'ppc = loadcase(opf_PY_file_v1) with .py extension : '
    ppc1 = loadcase(casefile + '.py')
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc1['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc1['gencost'],  gencost,    12, [t, 'gencost'])

    t = 'ppc = loadcase(opf_MAT_file_v1) without .mat extension : '
    ppc1 = loadcase(matfile)
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc1['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc1['gencost'],  gencost,    12, [t, 'gencost'])

    t = 'ppc = loadcase(opf_MAT_file_v1) with .mat extension : '
    ppc1 = loadcase(matfile + '.mat')
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc1['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc1['gencost'],  gencost,    12, [t, 'gencost'])

    ## prepare expected matrices for v2 load
    baseMVA, bus, gen, branch, areas, gencost = tmp2

    t = 'ppc = loadcase(opf_PY_file_v2) without .m extension : '
    ppc1 = loadcase(casefilev2)
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc1['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc1['gencost'],  gencost,    12, [t, 'gencost'])

    t = 'ppc = loadcase(opf_PY_file_v2) with .py extension : '
    ppc1 = loadcase(casefilev2 + '.py')
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc1['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc1['gencost'],  gencost,    12, [t, 'gencost'])

    t = 'ppc = loadcase(opf_MAT_file_v2) without .mat extension : '
    ppc1 = loadcase(matfilev2)
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc1['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc1['gencost'],  gencost,    12, [t, 'gencost'])

    t = 'ppc = loadcase(opf_MAT_file_v2) with .mat extension : '
    ppc1 = loadcase(matfilev2 + '.mat')
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc1['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc1['gencost'],  gencost,    12, [t, 'gencost'])

    ## prepare expected matrices for v1 load
    baseMVA, bus, gen, branch, areas, gencost = tmp1

    t = 'ppc = loadcase(opf_struct_v1) (no version): '
    baseMVA1, bus1, gen1, branch1, areas1, gencost1 = t_case9_opf()
    c = {}
    c['baseMVA']   = baseMVA1
    c['bus']       = bus1.copy()
    c['gen']       = gen1.copy()
    c['branch']    = branch1.copy()
    c['areas']     = areas1.copy()
    c['gencost']   = gencost1.copy()
    ppc2 = loadcase(c)
    t_is(ppc2['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc2['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc2['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc2['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc2['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc2['gencost'],  gencost,    12, [t, 'gencost'])

    t = 'ppc = loadcase(opf_struct_v1) (version=''1''): '
    c['version']   = '1'
    ppc2 = loadcase(c)
    t_is(ppc2['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc2['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc2['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc2['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc2['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc2['gencost'],  gencost,    12, [t, 'gencost'])

    ## prepare expected matrices for v2 load
    baseMVA, bus, gen, branch, areas, gencost = tmp2

    t = 'ppc = loadcase(opf_struct_v2) (no version): '
    c = {}
    c['baseMVA']   = baseMVA
    c['bus']       = bus.copy()
    c['gen']       = gen.copy()
    c['branch']    = branch.copy()
    c['areas']     = areas.copy()
    c['gencost']   = gencost.copy()
    ppc2 = loadcase(c)
    t_is(ppc2['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc2['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc2['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc2['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc2['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc2['gencost'],  gencost,    12, [t, 'gencost'])
    t_ok(ppc2['version'] == '2', [t, 'version'])

    t = 'ppc = loadcase(opf_struct_v2) (version=''2''): '
    c = {}
    c['baseMVA']   = baseMVA
    c['bus']       = bus.copy()
    c['gen']       = gen.copy()
    c['branch']    = branch.copy()
    c['areas']     = areas.copy()
    c['gencost']   = gencost.copy()
    c['version']   = '2'
    ppc2 = loadcase(c)
    t_is(ppc2['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc2['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc2['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc2['branch'],   branch,     12, [t, 'branch'])
    t_is(ppc2['areas'],    areas,      12, [t, 'areas'])
    t_is(ppc2['gencost'],  gencost,    12, [t, 'gencost'])


    ## read version 1 PF data matrices
    baseMVA, bus, gen, branch = t_case9_pf()
    savemat(pfmatfile + '.mat',
        {'baseMVA': baseMVA, 'bus': bus, 'gen': gen, 'branch': branch},
        oned_as='column')

    ## read version 2 PF data matrices
    ppc = t_case9_pfv2()
    tmp = (ppc['baseMVA'], ppc['bus'].copy(),
           ppc['gen'].copy(), ppc['branch'].copy())
    baseMVA, bus, gen, branch = tmp
    ## save as .mat file
    savemat(pfmatfilev2 + '.mat', {'ppc': ppc}, oned_as='column')

    ##-----  load PF data into individual matrices  -----
    t = 'loadcase(pf_PY_file_v1) without .py extension : '
    baseMVA1, bus1, gen1, branch1 = \
            loadcase(pfcasefile, False, False, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_PY_file_v1) with .py extension : '
    baseMVA1, bus1, gen1, branch1 = \
            loadcase(pfcasefile + '.py', False, False, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_MAT_file_v1) without .mat extension : '
    baseMVA1, bus1, gen1, branch1 = \
            loadcase(pfmatfile, False, False, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_MAT_file_v1) with .mat extension : '
    baseMVA1, bus1, gen1, branch1 = \
            loadcase(pfmatfile + '.mat', False, False, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_PY_file_v2) without .py extension : '
    baseMVA1, bus1, gen1, branch1 = \
            loadcase(pfcasefilev2, False, False, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_PY_file_v2) with .py extension : '
    baseMVA1, bus1, gen1, branch1 = \
            loadcase(pfcasefilev2 + '.py', False, False, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_MAT_file_v2) without .mat extension : '
    baseMVA1, bus1, gen1, branch1 = \
            loadcase(pfmatfilev2, False, False, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_MAT_file_v2) with .mat extension : '
    baseMVA1, bus1, gen1, branch1 = \
            loadcase(pfmatfilev2 + '.mat', False, False, False)
    t_is(baseMVA1,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus1,      bus,        12, [t, 'bus'])
    t_is(gen1,      gen,        12, [t, 'gen'])
    t_is(branch1,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_struct_v1) (no version): '
    baseMVA1, bus1, gen1, branch1 = t_case9_pf()
    c = {}
    c['baseMVA']   = baseMVA1
    c['bus']       = bus1.copy()
    c['gen']       = gen1.copy()
    c['branch']    = branch1.copy()
    baseMVA2, bus2, gen2, branch2 = loadcase(c, False, False, False)
    t_is(baseMVA2,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus2,      bus,        12, [t, 'bus'])
    t_is(gen2,      gen,        12, [t, 'gen'])
    t_is(branch2,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_struct_v1) (version=''1''): '
    c['version']   = '1'
    baseMVA2, bus2, gen2, branch2 = loadcase(c, False, False, False)
    t_is(baseMVA2,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus2,      bus,        12, [t, 'bus'])
    t_is(gen2,      gen,        12, [t, 'gen'])
    t_is(branch2,   branch,     12, [t, 'branch'])

    t = 'loadcase(pf_struct_v2) : '
    c = {}
    c['baseMVA']   = baseMVA
    c['bus']       = bus.copy()
    c['gen']       = gen.copy()
    c['branch']    = branch.copy()
    c['version']   = '2'
    baseMVA2, bus2, gen2, branch2 = loadcase(c, False, False, False)
    t_is(baseMVA2,  baseMVA,    12, [t, 'baseMVA'])
    t_is(bus2,      bus,        12, [t, 'bus'])
    t_is(gen2,      gen,        12, [t, 'gen'])
    t_is(branch2,   branch,     12, [t, 'branch'])






    ##-----  load PF data into struct  -----
    t = 'ppc = loadcase(pf_PY_file_v1) without .py extension : '
    ppc1 = loadcase(pfcasefile)
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_PY_file_v1) with .py extension : '
    ppc1 = loadcase(pfcasefile + '.py')
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_MAT_file_v1) without .mat extension : '
    ppc1 = loadcase(pfmatfile)
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_MAT_file_v1) with .mat extension : '
    ppc1 = loadcase(pfmatfile + '.mat')
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_PY_file_v2) without .py extension : '
    ppc1 = loadcase(pfcasefilev2)
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_PY_file_v2) with .py extension : '
    ppc1 = loadcase(pfcasefilev2 + '.py')
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_MAT_file_v2) without .mat extension : '
    ppc1 = loadcase(pfmatfilev2)
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_MAT_file_v2) with .mat extension : '
    ppc1 = loadcase(pfmatfilev2 + '.mat')
    t_is(ppc1['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc1['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc1['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc1['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_struct_v1) (no version): '
    baseMVA1, bus1, gen1, branch1 = t_case9_pf()
    c = {}
    c['baseMVA']   = baseMVA1
    c['bus']       = bus1.copy()
    c['gen']       = gen1.copy()
    c['branch']    = branch1.copy()
    ppc2 = loadcase(c)
    t_is(ppc2['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc2['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc2['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc2['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_struct_v1) (version=''1''): '
    c['version']   = '1'
    ppc2 = loadcase(c)
    t_is(ppc2['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc2['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc2['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc2['branch'],   branch,     12, [t, 'branch'])

    t = 'ppc = loadcase(pf_struct_v2) : '
    c = {}
    c['baseMVA']   = baseMVA
    c['bus']       = bus.copy()
    c['gen']       = gen.copy()
    c['branch']    = branch.copy()
    c['version']   = '2'
    ppc2 = loadcase(c)
    t_is(ppc2['baseMVA'],  baseMVA,    12, [t, 'baseMVA'])
    t_is(ppc2['bus'],      bus,        12, [t, 'bus'])
    t_is(ppc2['gen'],      gen,        12, [t, 'gen'])
    t_is(ppc2['branch'],   branch,     12, [t, 'branch'])

    ## cleanup
    os.remove(matfile + '.mat')
    os.remove(pfmatfile + '.mat')
    os.remove(matfilev2 + '.mat')
    os.remove(pfmatfilev2 + '.mat')

    t = 'runpf(my_PY_file)'
    ppopt = ppoption(VERBOSE=0, OUT_ALL=0)
    results3, success = runpf(pfcasefile, ppopt)
    baseMVA3, bus3, gen3, branch3 = results3['baseMVA'], results3['bus'], \
            results3['gen'], results3['branch']
    t_ok( success, t )

    t = 'runpf(my_object)'
    results4, success = runpf(c, ppopt)
    baseMVA4, bus4, gen4, branch4 = results4['baseMVA'], results4['bus'], \
            results4['gen'], results4['branch']
    t_ok( success, t )

    t = 'runpf result comparison : '
    t_is(baseMVA3,  baseMVA4,   12, [t, 'baseMVA'])
    t_is(bus3,      bus4,       12, [t, 'bus'])
    t_is(gen3,      gen4,       12, [t, 'gen'])
    t_is(branch3,   branch4,    12, [t, 'branch'])

    t = 'runpf(modified_struct)'
    c['gen'][2, 1] = c['gen'][2, 1] + 1            ## increase gen 3 output by 1
    results5, success = runpf(c, ppopt)
    gen5 = results5['gen']
    t_is(gen5[0, 1], gen4[0, 1] - 1, 1, t)   ## slack bus output should decrease by 1

    t_end()

Example 41

Project: PYPOWER Source File: t_opf_ipopt.py
def t_opf_ipopt(quiet=False):
    """Tests for IPOPT-based AC optimal power flow.

    @author: Ray Zimmerman (PSERC Cornell)
    """
    num_tests = 101

    t_begin(num_tests, quiet)

    tdir = dirname(__file__)
    casefile = join(tdir, 't_case9_opf')
    verbose = 0#not quiet

    t0 = 'IPOPT : '
    ppopt = ppoption(OPF_VIOLATION=1e-6, PDIPM_GRADTOL=1e-8,
                   PDIPM_COMPTOL=1e-8, PDIPM_COSTTOL=1e-9)
    ppopt = ppoption(ppopt, OUT_ALL=0, VERBOSE=verbose, OPF_ALG=580)

    ## set up indices
    ib_data     = r_[arange(BUS_AREA + 1), arange(BASE_KV, VMIN + 1)]
    ib_voltage  = arange(VM, VA + 1)
    ib_lam      = arange(LAM_P, LAM_Q + 1)
    ib_mu       = arange(MU_VMAX, MU_VMIN + 1)
    ig_data     = r_[[GEN_BUS, QMAX, QMIN], arange(MBASE, APF + 1)]
    ig_disp     = array([PG, QG, VG])
    ig_mu       = arange(MU_PMAX, MU_QMIN + 1)
    ibr_data    = arange(ANGMAX + 1)
    ibr_flow    = arange(PF, QT + 1)
    ibr_mu      = array([MU_SF, MU_ST])
    ibr_angmu   = array([MU_ANGMIN, MU_ANGMAX])

    ## get solved AC power flow case from MAT-file
    soln9_opf = loadmat(join(tdir, 'soln9_opf.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf['bus_soln']
    gen_soln = soln9_opf['gen_soln']
    branch_soln = soln9_opf['branch_soln']
    f_soln = soln9_opf['f_soln'][0]

    ## run OPF
    t = t0
    r = runopf(casefile, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    ## run with automatic conversion of single-block pwl to linear costs
    t = ''.join([t0, '(single-block PWL) : '])
    ppc = loadcase(casefile)
    ppc['gencost'][2, NCOST] = 2
    r = runopf(ppc, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    xr = r_[r['var']['val']['Va'], r['var']['val']['Vm'], r['var']['val']['Pg'],
            r['var']['val']['Qg'], 0, r['var']['val']['y']]
    t_is(r['x'], xr, 8, [t, 'check on raw x returned from OPF'])

    ## get solved AC power flow case from MAT-file
    soln9_opf_Plim = loadmat(join(tdir, 'soln9_opf_Plim.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_Plim['bus_soln']
    gen_soln = soln9_opf_Plim['gen_soln']
    branch_soln = soln9_opf_Plim['branch_soln']
    f_soln = soln9_opf_Plim['f_soln'][0]

    ## run OPF with active power line limits
    t = ''.join([t0, '(P line lim) : '])
    ppopt1 = ppoption(ppopt, OPF_FLOW_LIM=1)
    r = runopf(casefile, ppopt1)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    ##-----  test OPF with quadratic gen costs moved to generalized costs  -----
    ppc = loadcase(casefile)
    ppc['gencost'] = array([
        [2,   1500, 0,   3,   0.11,    5,   0],
        [2,   2000, 0,   3,   0.085,   1.2, 0],
        [2,   3000, 0,   3,   0.1225,  1,   0]
    ])
    r = runopf(ppc, ppopt)
    bus_soln, gen_soln, branch_soln, f_soln, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    branch_soln = branch_soln[:, :MU_ST + 1]

    A = None
    l = array([])
    u = array([])
    nb = ppc['bus'].shape[0]      # number of buses
    ng = ppc['gen'].shape[0]      # number of gens
    thbas = 0;                thend    = thbas + nb
    vbas     = thend;     vend     = vbas + nb
    pgbas    = vend;      pgend    = pgbas + ng
#    qgbas    = pgend;     qgend    = qgbas + ng
    nxyz = 2 * nb + 2 * ng
    N = sparse((ppc['baseMVA'] * ones(ng), (arange(ng), arange(pgbas, pgend))), (ng, nxyz))
    fparm = ones((ng, 1)) * array([[1, 0, 0, 1]])
    ix = argsort(ppc['gen'][:, 0])
    H = 2 * spdiags(ppc['gencost'][ix, 4], 0, ng, ng, 'csr')
    Cw = ppc['gencost'][ix, 5]
    ppc['gencost'][:, 4:7] = 0

    ## run OPF with quadratic gen costs moved to generalized costs
    t = ''.join([t0, 'w/quadratic generalized gen cost : '])
    r = opf(ppc, A, l, u, ppopt, N, fparm, H, Cw)
    f, bus, gen, branch, success = \
            r['f'], r['bus'], r['gen'], r['branch'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    t_is(r['cost']['usr'], f, 12, [t, 'user cost'])

    ##-----  run OPF with extra linear user constraints & costs  -----
    ## single new z variable constrained to be greater than or equal to
    ## deviation from 1 pu voltage at bus 1, linear cost on this z
    ## get solved AC power flow case from MAT-file
    soln9_opf_extras1 = loadmat(join(tdir, 'soln9_opf_extras1.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_extras1['bus_soln']
    gen_soln = soln9_opf_extras1['gen_soln']
    branch_soln = soln9_opf_extras1['branch_soln']
    f_soln = soln9_opf_extras1['f_soln'][0]

    row = [0, 0, 1, 1]
    col = [9, 24, 9, 24]
    A = sparse(([-1, 1, 1, 1], (row, col)), (2, 25))
    u = array([Inf, Inf])
    l = array([-1, 1])

    N = sparse(([1], ([0], [24])), (1, 25))    ## new z variable only
    fparm = array([[1, 0, 0, 1]])              ## w = r = z
    H = sparse((1, 1))                ## no quadratic term
    Cw = array([100.0])

    t = ''.join([t0, 'w/extra constraints & costs 1 : '])
    r = opf(casefile, A, l, u, ppopt, N, fparm, H, Cw)
    f, bus, gen, branch, success = \
            r['f'], r['bus'], r['gen'], r['branch'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    t_is(r['var']['val']['z'], 0.025419, 6, [t, 'user variable'])
    t_is(r['cost']['usr'], 2.5419, 4, [t, 'user cost'])

    ##-----  test OPF with capability curves  -----
    ppc = loadcase(join(tdir, 't_case9_opfv2'))
    ## remove angle diff limits
    ppc['branch'][0, ANGMAX] =  360
    ppc['branch'][8, ANGMIN] = -360

    ## get solved AC power flow case from MAT-file
    soln9_opf_PQcap = loadmat(join(tdir, 'soln9_opf_PQcap.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_PQcap['bus_soln']
    gen_soln = soln9_opf_PQcap['gen_soln']
    branch_soln = soln9_opf_PQcap['branch_soln']
    f_soln = soln9_opf_PQcap['f_soln'][0]

    ## run OPF with capability curves
    t = ''.join([t0, 'w/capability curves : '])
    r = runopf(ppc, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    ##-----  test OPF with angle difference limits  -----
    ppc = loadcase(join(tdir, 't_case9_opfv2'))
    ## remove capability curves
    ppc['gen'][ix_(arange(1, 3),
                   [PC1, PC2, QC1MIN, QC1MAX, QC2MIN, QC2MAX])] = zeros((2, 6))

    ## get solved AC power flow case from MAT-file
    soln9_opf_ang = loadmat(join(tdir, 'soln9_opf_ang.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_ang['bus_soln']
    gen_soln = soln9_opf_ang['gen_soln']
    branch_soln = soln9_opf_ang['branch_soln']
    f_soln = soln9_opf_ang['f_soln'][0]

    ## run OPF with angle difference limits
    t = ''.join([t0, 'w/angle difference limits : '])
    r = runopf(ppc, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  1, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    t_is(branch[:, ibr_angmu ], branch_soln[:, ibr_angmu ],  2, [t, 'branch angle mu'])

    ##-----  test OPF with ignored angle difference limits  -----
    ## get solved AC power flow case from MAT-file
    soln9_opf = loadmat(join(tdir, 'soln9_opf.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf['bus_soln']
    gen_soln = soln9_opf['gen_soln']
    branch_soln = soln9_opf['branch_soln']
    f_soln = soln9_opf['f_soln'][0]

    ## run OPF with ignored angle difference limits
    t = ''.join([t0, 'w/ignored angle difference limits : '])
    ppopt1 = ppoption(ppopt, OPF_IGNORE_ANG_LIM=1)
    r = runopf(ppc, ppopt1)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    ## ang limits are not in this solution data, so let's remove them
    branch[0, ANGMAX] =  360
    branch[8, ANGMIN] = -360
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    t_end()

Example 42

Project: mrq Source File: config.py
def add_parser_args(parser, config_type):

    # General arguments

    parser.add_argument(
        '--trace_greenlets',
        action='store_true',
        default=False,
        help='Collect stats about each greenlet execution time and switches.')

    parser.add_argument(
        '--trace_memory',
        action='store_true',
        default=False,
        help='Collect stats about memory for each task. Incompatible with --greenlets > 1')

    parser.add_argument(
        '--trace_io',
        action='store_true',
        default=False,
        help='Collect stats about all I/O operations')

    parser.add_argument(
        '--print_mongodb',
        action='store_true',
        default=False,
        help='Print all MongoDB requests')

    parser.add_argument(
        '--trace_memory_type',
        action='store',
        default="",
        help='Create a .png object graph in trace_memory_output_dir ' +
             'with a random object of this type.')

    parser.add_argument(
        '--trace_memory_output_dir',
        action='store',
        default="memory_traces",
        help='Directory where to output .pngs with object graphs')

    parser.add_argument(
        '--profile',
        action='store_true',
        default=False,
        help='Run profiling on the whole worker')

    parser.add_argument(
        '--mongodb_jobs', '--mongodb',
        action='store',
        default="mongodb://127.0.0.1:27017/mrq",
        help='MongoDB URI for the jobs, scheduled_jobs & workers database')

    parser.add_argument(
        '--mongodb_logs',
        action='store',
        default="1",
        help='MongoDB URI for the logs database. ' +
             ' "0" will disable remote logs, "1" will use main MongoDB.')

    parser.add_argument(
        '--mongodb_logs_size',
        action='store',
        default=16 *
        1024 *
        1024,
        type=int,
        help='If provided, sets the log collection to capped to that amount of bytes.')

    parser.add_argument(
        '--no_mongodb_ensure_indexes',
        action='store_true',
        default=False,
        help='If provided, skip the creation of MongoDB indexes at worker startup.')

    parser.add_argument(
        '--redis',
        action='store',
        default="redis://127.0.0.1:6379",
        help='Redis URI')

    parser.add_argument(
        '--redis_prefix',
        action='store',
        default="mrq",
        help='Redis key prefix')

    parser.add_argument(
        '--redis_max_connections',
        action='store',
        type=int,
        default=1000,
        help='Redis max connection pool size')

    parser.add_argument(
        '--redis_timeout',
        action='store',
        type=float,
        default=30,
        help='Redis connection pool timeout to wait for an available connection')

    parser.add_argument(
        '--name',
        default=None,
        action='store',
        help='Specify a different name')

    parser.add_argument(
        '--quiet',
        default=False,
        action='store_true',
        help='Don\'t output task logs')

    parser.add_argument(
        '--config',
        '-c',
        default=None,
        action="store",
        help='Path of a config file')

    parser.add_argument(
        '--worker_class',
        default="mrq.worker.Worker",
        action="store",
        help='Path to a custom worker class')

    parser.add_argument(
        '--version',
        '-v',
        default=False,
        action="store_true",
        help='Prints current MRQ version')

    parser.add_argument(
        '--no_import_patch',
        default=False,
        action='store_true',
        help='(DEPRECATED) Skips patching __import__ to fix gevent bug #108')

    parser.add_argument(
        '--add_network_latency',
        default="0",
        action='store',
        type=str,
        help='Adds random latency to the network calls, zero to N seconds. Can be a range (1-2)')

    parser.add_argument(
        '--default_job_result_ttl',
        default=7 * 24 * 3600,
        action='store',
        type=float,
        help='Seconds the results are kept in MongoDB when status is success')

    parser.add_argument(
        '--default_job_abort_ttl',
        default=24 * 3600,
        action='store',
        type=float,
        help='Seconds the tasks are kept in MongoDB when status is abort')

    parser.add_argument(
        '--default_job_cancel_ttl',
        default=24 * 3600,
        action='store',
        type=float,
        help='Seconds the tasks are kept in MongoDB when status is cancel')

    parser.add_argument(
        '--default_job_timeout',
        default=3600,
        action='store',
        type=float,
        help='In seconds, delay before interrupting the job')

    parser.add_argument(
        '--default_job_max_retries',
        default=3,
        action='store',
        type=int,
        help='Set the status to "maxretries" after retrying that many times')

    parser.add_argument(
        '--default_job_retry_delay',
        default=3,
        action='store',
        type=int,
        help='Seconds before a job in retry status is requeued again')

    parser.add_argument(
        '--use_large_job_ids',
        action='store_true',
        default=False,
        help='Do not use compacted job IDs in Redis. For compatibility with 0.1.x only')

    # mrq-run-specific arguments

    if config_type == "run":

        parser.add_argument(
            '--queue',
            action='store',
            default="",
            help='Queue the task on this queue instead of running it right away')

        parser.add_argument(
            'taskpath',
            action='store',
            help='Task to run')

        parser.add_argument(
            'taskargs',
            action='store',
            default='{}',
            nargs='*',
            help='JSON-encoded arguments, or "key value" pairs')

    # Dashboard-specific arguments

    elif config_type == "dashboard":

        parser.add_argument(
            '--dashboard_httpauth',
            default="",
            action="store",
            help='HTTP Auth for the Dashboard. Format is user:pass')

        parser.add_argument(
            '--dashboard_queue',
            default=None,
            action="store",
            help='Default queue for dashboard actions.')

        parser.add_argument(
            '--dashboard_port',
            default=5555,
            action="store",
            type=int,
            help='Use this port for mrq-dashboard. 5555 by default.')

        parser.add_argument(
            '--dashboard_ip',
            default="0.0.0.0",
            action="store",
            type=str,
            help='Bind the dashboard to this IP. Default is "0.0.0.0", use "127.0.0.1" to restrict access.')

    # Worker-specific args

    elif config_type == "worker":

        parser.add_argument(
            '--max_jobs',
            default=0,
            type=int,
            action='store',
            help='Gevent: max number of jobs to do before quitting.' +
                 ' Temp workaround for memory leaks')

        parser.add_argument(
            '--max_memory',
            default=0,
            type=int,
            action='store',
            help='Max memory (in Mb) after which the process will be shut down. Use with --processes [1-N]' +
                 'to have supervisord automatically respawn the worker when this happens')

        parser.add_argument(
            '--greenlets',
            '--gevent',  # deprecated
            '-g',
            default=1,
            type=int,
            action='store',
            help='Max number of greenlets to use')

        parser.add_argument(
            '--processes',
            '-p',
            default=0,
            type=int,
            action='store',
            help='Number of processes to launch with supervisord')

        default_template = os.path.abspath(os.path.join(
            os.path.dirname(__file__),
            "supervisord_templates/default.conf"
        ))

        parser.add_argument(
            '--supervisord_template',
            default=default_template,
            action='store',
            help='Path of supervisord template to use')

        parser.add_argument(
            '--scheduler',
            default=False,
            action='store_true',
            help='Run the scheduler')

        parser.add_argument(
            '--scheduler_interval',
            default=60,
            action='store',
            type=float,
            help='Seconds between scheduler checks')

        parser.add_argument(
            '--report_interval',
            default=10,
            action='store',
            type=float,
            help='Seconds between worker reports to MongoDB')

        parser.add_argument(
            '--report_file',
            default="",
            action='store',
            type=unicode,
            help='Filepath of a json dump of the worker status. Disabled if none')

        parser.add_argument(
            'queues',
            nargs='*',
            default=["default"],
            help='The queues to listen on (default: \'default\')')

        parser.add_argument(
            '--subqueues_refresh_interval',
            default=10,
            action='store',
            type=float,
            help="Seconds between worker refreshes of the known subqueues")

        parser.add_argument(
            '--paused_queues_refresh_interval',
            default=10,
            action='store',
            type=float,
            help="Seconds between worker refreshes of the paused queues list")

        parser.add_argument(
            '--subqueues_delimiter',
            default='/',
            help='Delimiter between main queue and subqueue names',
            action=DelimiterArgParser)

        parser.add_argument(
            '--admin_port',
            default=0,
            action="store",
            type=int,
            help='Start an admin server on this port, if provided. Incompatible with --processes')

        parser.add_argument(
            '--admin_ip',
            default="127.0.0.1",
            action="store",
            type=str,
            help='IP for the admin server to listen on. Use "0.0.0.0" to allow access from outside')

        parser.add_argument(
            '--local_ip',
            default=get_local_ip(),
            action="store",
            type=str,
            help='Overwrite the local IP, to be displayed in the dashboard.')

        parser.add_argument(
            '--max_latency',
            default=1.,
            type=float,
            action='store',
            help='Max seconds while worker may sleep waiting for a new job. ' +
                 'Can be < 1.')

        parser.add_argument(
            '--dequeue_strategy',
            default="sequential",
            type=str,
            action='store',
            help='Strategy for dequeuing multiple queues. Default is \'sequential\',' +
                 'to dequeue them in command-line order.')

Example 43

Project: pyqtgraph Source File: PlotItem.py
    def __init__(self, parent=None, name=None, labels=None, title=None, viewBox=None, axisItems=None, enableMenu=True, **kargs):
        """
        Create a new PlotItem. All arguments are optional.
        Any extra keyword arguments are passed to PlotItem.plot().
        
        ==============  ==========================================================================================
        **Arguments:**
        *title*         Title to display at the top of the item. Html is allowed.
        *labels*        A dictionary specifying the axis labels to display::
                   
                            {'left': (args), 'bottom': (args), ...}
                     
                        The name of each axis and the corresponding arguments are passed to 
                        :func:`PlotItem.setLabel() <pyqtgraph.PlotItem.setLabel>`
                        Optionally, PlotItem my also be initialized with the keyword arguments left,
                        right, top, or bottom to achieve the same effect.
        *name*          Registers a name for this view so that others may link to it
        *viewBox*       If specified, the PlotItem will be constructed with this as its ViewBox.
        *axisItems*     Optional dictionary instructing the PlotItem to use pre-constructed items
                        for its axes. The dict keys must be axis names ('left', 'bottom', 'right', 'top')
                        and the values must be instances of AxisItem (or at least compatible with AxisItem).
        ==============  ==========================================================================================
        """
        
        GraphicsWidget.__init__(self, parent)
        
        self.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
        
        ## Set up control buttons
        path = os.path.dirname(__file__)
        #self.autoImageFile = os.path.join(path, 'auto.png')
        #self.lockImageFile = os.path.join(path, 'lock.png')
        self.autoBtn = ButtonItem(pixmaps.getPixmap('auto'), 14, self)
        self.autoBtn.mode = 'auto'
        self.autoBtn.clicked.connect(self.autoBtnClicked)
        #self.autoBtn.hide()
        self.buttonsHidden = False ## whether the user has requested buttons to be hidden
        self.mouseHovering = False
        
        self.layout = QtGui.QGraphicsGridLayout()
        self.layout.setContentsMargins(1,1,1,1)
        self.setLayout(self.layout)
        self.layout.setHorizontalSpacing(0)
        self.layout.setVerticalSpacing(0)
        
        if viewBox is None:
            viewBox = ViewBox(parent=self)
        self.vb = viewBox
        self.vb.sigStateChanged.connect(self.viewStateChanged)
        self.setMenuEnabled(enableMenu, enableMenu) ## en/disable plotitem and viewbox menus
        
        if name is not None:
            self.vb.register(name)
        self.vb.sigRangeChanged.connect(self.sigRangeChanged)
        self.vb.sigXRangeChanged.connect(self.sigXRangeChanged)
        self.vb.sigYRangeChanged.connect(self.sigYRangeChanged)
        
        self.layout.addItem(self.vb, 2, 1)
        self.alpha = 1.0
        self.autoAlpha = True
        self.spectrumMode = False
        
        self.legend = None
        
        ## Create and place axis items
        if axisItems is None:
            axisItems = {}
        self.axes = {}
        for k, pos in (('top', (1,1)), ('bottom', (3,1)), ('left', (2,0)), ('right', (2,2))):
            if k in axisItems:
                axis = axisItems[k]
            else:
                axis = AxisItem(orientation=k, parent=self)
            axis.linkToView(self.vb)
            self.axes[k] = {'item': axis, 'pos': pos}
            self.layout.addItem(axis, *pos)
            axis.setZValue(-1000)
            axis.setFlag(axis.ItemNegativeZStacksBehindParent)
        
        self.titleLabel = LabelItem('', size='11pt', parent=self)
        self.layout.addItem(self.titleLabel, 0, 1)
        self.setTitle(None)  ## hide
        
        
        for i in range(4):
            self.layout.setRowPreferredHeight(i, 0)
            self.layout.setRowMinimumHeight(i, 0)
            self.layout.setRowSpacing(i, 0)
            self.layout.setRowStretchFactor(i, 1)
            
        for i in range(3):
            self.layout.setColumnPreferredWidth(i, 0)
            self.layout.setColumnMinimumWidth(i, 0)
            self.layout.setColumnSpacing(i, 0)
            self.layout.setColumnStretchFactor(i, 1)
        self.layout.setRowStretchFactor(2, 100)
        self.layout.setColumnStretchFactor(1, 100)
        

        self.items = []
        self.curves = []
        self.itemMeta = weakref.WeakKeyDictionary()
        self.dataItems = []
        self.paramList = {}
        self.avgCurves = {}
        
        ### Set up context menu
        
        w = QtGui.QWidget()
        self.ctrl = c = Ui_Form()
        c.setupUi(w)
        dv = QtGui.QDoubleValidator(self)
        
        menuItems = [
            ('Transforms', c.transformGroup),
            ('Downsample', c.decimateGroup),
            ('Average', c.averageGroup),
            ('Alpha', c.alphaGroup),
            ('Grid', c.gridGroup),
            ('Points', c.pointsGroup),
        ]
        
        
        self.ctrlMenu = QtGui.QMenu()
        
        self.ctrlMenu.setTitle('Plot Options')
        self.subMenus = []
        for name, grp in menuItems:
            sm = QtGui.QMenu(name)
            act = QtGui.QWidgetAction(self)
            act.setDefaultWidget(grp)
            sm.addAction(act)
            self.subMenus.append(sm)
            self.ctrlMenu.addMenu(sm)
        
        self.stateGroup = WidgetGroup()
        for name, w in menuItems:
            self.stateGroup.autoAdd(w)
        
        self.fileDialog = None
        
        c.alphaGroup.toggled.connect(self.updateAlpha)
        c.alphaSlider.valueChanged.connect(self.updateAlpha)
        c.autoAlphaCheck.toggled.connect(self.updateAlpha)

        c.xGridCheck.toggled.connect(self.updateGrid)
        c.yGridCheck.toggled.connect(self.updateGrid)
        c.gridAlphaSlider.valueChanged.connect(self.updateGrid)

        c.fftCheck.toggled.connect(self.updateSpectrumMode)
        c.logXCheck.toggled.connect(self.updateLogMode)
        c.logYCheck.toggled.connect(self.updateLogMode)

        c.downsampleSpin.valueChanged.connect(self.updateDownsampling)
        c.downsampleCheck.toggled.connect(self.updateDownsampling)
        c.autoDownsampleCheck.toggled.connect(self.updateDownsampling)
        c.subsampleRadio.toggled.connect(self.updateDownsampling)
        c.meanRadio.toggled.connect(self.updateDownsampling)
        c.clipToViewCheck.toggled.connect(self.updateDownsampling)

        self.ctrl.avgParamList.itemClicked.connect(self.avgParamListClicked)
        self.ctrl.averageGroup.toggled.connect(self.avgToggled)
        
        self.ctrl.maxTracesCheck.toggled.connect(self.updateDecimation)
        self.ctrl.maxTracesSpin.valueChanged.connect(self.updateDecimation)
        
        self.hideAxis('right')
        self.hideAxis('top')
        self.showAxis('left')
        self.showAxis('bottom')
        
        if labels is None:
            labels = {}
        for label in list(self.axes.keys()):
            if label in kargs:
                labels[label] = kargs[label]
                del kargs[label]
        for k in labels:
            if isinstance(labels[k], basestring):
                labels[k] = (labels[k],)
            self.setLabel(k, *labels[k])
                
        if title is not None:
            self.setTitle(title)
        
        if len(kargs) > 0:
            self.plot(**kargs)

Example 44

Project: PYPOWER Source File: t_opf_pips.py
def t_opf_pips(quiet=False):
    """Tests for PIPS-based AC optimal power flow.

    @author: Ray Zimmerman (PSERC Cornell)
    """
    num_tests = 101

    t_begin(num_tests, quiet)

    tdir = dirname(__file__)
    casefile = join(tdir, 't_case9_opf')
    verbose = 0#not quiet

    t0 = 'PIPS : '
    ppopt = ppoption(OPF_VIOLATION=1e-6, PDIPM_GRADTOL=1e-8,
                   PDIPM_COMPTOL=1e-8, PDIPM_COSTTOL=1e-9)
    ppopt = ppoption(ppopt, OUT_ALL=0, VERBOSE=verbose, OPF_ALG=560)

    ## set up indices
    ib_data     = r_[arange(BUS_AREA + 1), arange(BASE_KV, VMIN + 1)]
    ib_voltage  = arange(VM, VA + 1)
    ib_lam      = arange(LAM_P, LAM_Q + 1)
    ib_mu       = arange(MU_VMAX, MU_VMIN + 1)
    ig_data     = r_[[GEN_BUS, QMAX, QMIN], arange(MBASE, APF + 1)]
    ig_disp     = array([PG, QG, VG])
    ig_mu       = arange(MU_PMAX, MU_QMIN + 1)
    ibr_data    = arange(ANGMAX + 1)
    ibr_flow    = arange(PF, QT + 1)
    ibr_mu      = array([MU_SF, MU_ST])
    ibr_angmu   = array([MU_ANGMIN, MU_ANGMAX])

    ## get solved AC power flow case from MAT-file
    soln9_opf = loadmat(join(tdir, 'soln9_opf.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf['bus_soln']
    gen_soln = soln9_opf['gen_soln']
    branch_soln = soln9_opf['branch_soln']
    f_soln = soln9_opf['f_soln'][0]

    ## run OPF
    t = t0
    r = runopf(casefile, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    ## run with automatic conversion of single-block pwl to linear costs
    t = ''.join([t0, '(single-block PWL) : '])
    ppc = loadcase(casefile)
    ppc['gencost'][2, NCOST] = 2
    r = runopf(ppc, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    xr = r_[r['var']['val']['Va'], r['var']['val']['Vm'], r['var']['val']['Pg'],
            r['var']['val']['Qg'], 0, r['var']['val']['y']]
    t_is(r['x'], xr, 8, [t, 'check on raw x returned from OPF'])

    ## get solved AC power flow case from MAT-file
    soln9_opf_Plim = loadmat(join(tdir, 'soln9_opf_Plim.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_Plim['bus_soln']
    gen_soln = soln9_opf_Plim['gen_soln']
    branch_soln = soln9_opf_Plim['branch_soln']
    f_soln = soln9_opf_Plim['f_soln'][0]

    ## run OPF with active power line limits
    t = ''.join([t0, '(P line lim) : '])
    ppopt1 = ppoption(ppopt, OPF_FLOW_LIM=1)
    r = runopf(casefile, ppopt1)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    ##-----  test OPF with quadratic gen costs moved to generalized costs  -----
    ppc = loadcase(casefile)
    ppc['gencost'] = array([
        [2,   1500, 0,   3,   0.11,    5,   0],
        [2,   2000, 0,   3,   0.085,   1.2, 0],
        [2,   3000, 0,   3,   0.1225,  1,   0]
    ])
    r = runopf(ppc, ppopt)
    bus_soln, gen_soln, branch_soln, f_soln, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    branch_soln = branch_soln[:, :MU_ST + 1]

    A = None
    l = array([])
    u = array([])
    nb = ppc['bus'].shape[0]      # number of buses
    ng = ppc['gen'].shape[0]      # number of gens
    thbas = 0;                thend    = thbas + nb
    vbas     = thend;     vend     = vbas + nb
    pgbas    = vend;      pgend    = pgbas + ng
#    qgbas    = pgend;     qgend    = qgbas + ng
    nxyz = 2 * nb + 2 * ng
    N = sparse((ppc['baseMVA'] * ones(ng), (arange(ng), arange(pgbas, pgend))), (ng, nxyz))
    fparm = ones((ng, 1)) * array([[1, 0, 0, 1]])
    ix = argsort(ppc['gen'][:, 0])
    H = 2 * spdiags(ppc['gencost'][ix, 4], 0, ng, ng, 'csr')
    Cw = ppc['gencost'][ix, 5]
    ppc['gencost'][:, 4:7] = 0

    ## run OPF with quadratic gen costs moved to generalized costs
    t = ''.join([t0, 'w/quadratic generalized gen cost : '])
    r = opf(ppc, A, l, u, ppopt, N, fparm, H, Cw)
    f, bus, gen, branch, success = \
            r['f'], r['bus'], r['gen'], r['branch'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    t_is(r['cost']['usr'], f, 12, [t, 'user cost'])

    ##-----  run OPF with extra linear user constraints & costs  -----
    ## single new z variable constrained to be greater than or equal to
    ## deviation from 1 pu voltage at bus 1, linear cost on this z
    ## get solved AC power flow case from MAT-file
    soln9_opf_extras1 = loadmat(join(tdir, 'soln9_opf_extras1.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_extras1['bus_soln']
    gen_soln = soln9_opf_extras1['gen_soln']
    branch_soln = soln9_opf_extras1['branch_soln']
    f_soln = soln9_opf_extras1['f_soln'][0]

    row = [0, 0, 1, 1]
    col = [9, 24, 9, 24]
    A = sparse(([-1, 1, 1, 1], (row, col)), (2, 25))
    u = array([Inf, Inf])
    l = array([-1, 1])

    N = sparse(([1], ([0], [24])), (1, 25))    ## new z variable only
    fparm = array([[1, 0, 0, 1]])              ## w = r = z
    H = sparse((1, 1))                ## no quadratic term
    Cw = array([100.0])

    t = ''.join([t0, 'w/extra constraints & costs 1 : '])
    r = opf(casefile, A, l, u, ppopt, N, fparm, H, Cw)
    f, bus, gen, branch, success = \
            r['f'], r['bus'], r['gen'], r['branch'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    t_is(r['var']['val']['z'], 0.025419, 6, [t, 'user variable'])
    t_is(r['cost']['usr'], 2.5419, 4, [t, 'user cost'])

    ##-----  test OPF with capability curves  -----
    ppc = loadcase(join(tdir, 't_case9_opfv2'))
    ## remove angle diff limits
    ppc['branch'][0, ANGMAX] =  360
    ppc['branch'][8, ANGMIN] = -360

    ## get solved AC power flow case from MAT-file
    soln9_opf_PQcap = loadmat(join(tdir, 'soln9_opf_PQcap.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_PQcap['bus_soln']
    gen_soln = soln9_opf_PQcap['gen_soln']
    branch_soln = soln9_opf_PQcap['branch_soln']
    f_soln = soln9_opf_PQcap['f_soln'][0]

    ## run OPF with capability curves
    t = ''.join([t0, 'w/capability curves : '])
    r = runopf(ppc, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    ##-----  test OPF with angle difference limits  -----
    ppc = loadcase(join(tdir, 't_case9_opfv2'))
    ## remove capability curves
    ppc['gen'][ix_(arange(1, 3),
                   [PC1, PC2, QC1MIN, QC1MAX, QC2MIN, QC2MAX])] = zeros((2, 6))

    ## get solved AC power flow case from MAT-file
    soln9_opf_ang = loadmat(join(tdir, 'soln9_opf_ang.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_ang['bus_soln']
    gen_soln = soln9_opf_ang['gen_soln']
    branch_soln = soln9_opf_ang['branch_soln']
    f_soln = soln9_opf_ang['f_soln'][0]

    ## run OPF with angle difference limits
    t = ''.join([t0, 'w/angle difference limits : '])
    r = runopf(ppc, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  1, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    t_is(branch[:, ibr_angmu ], branch_soln[:, ibr_angmu ],  2, [t, 'branch angle mu'])

    ##-----  test OPF with ignored angle difference limits  -----
    ## get solved AC power flow case from MAT-file
    soln9_opf = loadmat(join(tdir, 'soln9_opf.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf['bus_soln']
    gen_soln = soln9_opf['gen_soln']
    branch_soln = soln9_opf['branch_soln']
    f_soln = soln9_opf['f_soln'][0]

    ## run OPF with ignored angle difference limits
    t = ''.join([t0, 'w/ignored angle difference limits : '])
    ppopt1 = ppoption(ppopt, OPF_IGNORE_ANG_LIM=1)
    r = runopf(ppc, ppopt1)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    ## ang limits are not in this solution data, so let's remove them
    branch[0, ANGMAX] =  360
    branch[8, ANGMIN] = -360
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    t_end()

Example 45

Project: PYPOWER Source File: t_opf_pips_sc.py
def t_opf_pips_sc(quiet=False):
    """Tests for step-controlled PIPS-based AC optimal power flow.

    @author: Ray Zimmerman (PSERC Cornell)
    """
    num_tests = 101

    t_begin(num_tests, quiet)

    tdir = dirname(__file__)
    casefile = join(tdir, 't_case9_opf')
    verbose = 0#not quiet

    t0 = 'PIPS-sc : '
    ppopt = ppoption(OPF_VIOLATION=1e-6, PDIPM_GRADTOL=1e-8,
                     PDIPM_COMPTOL=1e-8, PDIPM_COSTTOL=1e-9)
    ppopt = ppoption(ppopt, OUT_ALL=0, VERBOSE=verbose, OPF_ALG=565)

    ## set up indices
    ib_data     = r_[arange(BUS_AREA + 1), arange(BASE_KV, VMIN + 1)]
    ib_voltage  = arange(VM, VA + 1)
    ib_lam      = arange(LAM_P, LAM_Q + 1)
    ib_mu       = arange(MU_VMAX, MU_VMIN + 1)
    ig_data     = r_[[GEN_BUS, QMAX, QMIN], arange(MBASE, APF + 1)]
    ig_disp     = array([PG, QG, VG])
    ig_mu       = arange(MU_PMAX, MU_QMIN + 1)
    ibr_data    = arange(ANGMAX + 1)
    ibr_flow    = arange(PF, QT + 1)
    ibr_mu      = array([MU_SF, MU_ST])
    ibr_angmu   = array([MU_ANGMIN, MU_ANGMAX])

    ## get solved AC power flow case from MAT-file
    soln9_opf = loadmat(join(tdir, 'soln9_opf.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf['bus_soln']
    gen_soln = soln9_opf['gen_soln']
    branch_soln = soln9_opf['branch_soln']
    f_soln = soln9_opf['f_soln'][0]

    ## run OPF
    t = t0
    r = runopf(casefile, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    ## run with automatic conversion of single-block pwl to linear costs
    t = ''.join([t0, '(single-block PWL) : '])
    ppc = loadcase(casefile)
    ppc['gencost'][2, NCOST] = 2
    r = runopf(ppc, ppopt)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    xr = r_[r['var']['val']['Va'], r['var']['val']['Vm'], r['var']['val']['Pg'],
            r['var']['val']['Qg'], 0, r['var']['val']['y']]
    t_is(r['x'], xr, 8, [t, 'check on raw x returned from OPF'])

    ## get solved AC power flow case from MAT-file
    soln9_opf_Plim = loadmat(join(tdir, 'soln9_opf_Plim.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_Plim['bus_soln']
    gen_soln = soln9_opf_Plim['gen_soln']
    branch_soln = soln9_opf_Plim['branch_soln']
    f_soln = soln9_opf_Plim['f_soln'][0]

    ## run OPF with active power line limits
    t = ''.join([t0, '(P line lim) : '])
    ppopt1 = ppoption(ppopt, OPF_FLOW_LIM=1)
    r = runopf(casefile, ppopt1)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    ##-----  test OPF with quadratic gen costs moved to generalized costs  -----
    ppc = loadcase(casefile)
    ppc['gencost'] = array([
        [2,   1500, 0,   3,   0.11,    5,   0],
        [2,   2000, 0,   3,   0.085,   1.2, 0],
        [2,   3000, 0,   3,   0.1225,  1,   0]
    ])
    r = runopf(ppc, ppopt)
    bus_soln, gen_soln, branch_soln, f_soln, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    branch_soln = branch_soln[:, :MU_ST + 1]

    A = None
    l = array([])
    u = array([])
    nb = ppc['bus'].shape[0]      # number of buses
    ng = ppc['gen'].shape[0]      # number of gens
    thbas = 0;            thend    = thbas + nb
    vbas     = thend;     vend     = vbas + nb
    pgbas    = vend;      pgend    = pgbas + ng
#    qgbas    = pgend;     qgend    = qgbas + ng
    nxyz = 2 * nb + 2 * ng
    N = sparse((ppc['baseMVA'] * ones(ng), (arange(ng), arange(pgbas, pgend))), (ng, nxyz))
    fparm = ones((ng, 1)) * array([[1, 0, 0, 1]])
    ix = argsort(ppc['gen'][:, 0])
    H = 2 * spdiags(ppc['gencost'][ix, 4], 0, ng, ng, 'csr')
    Cw = ppc['gencost'][ix, 5]
    ppc['gencost'][:, 4:7] = 0

    ## run OPF with quadratic gen costs moved to generalized costs
    t = ''.join([t0, 'w/quadratic generalized gen cost : '])
    r = opf(ppc, A, l, u, ppopt, N, fparm, H, Cw)
    f, bus, gen, branch, success = \
            r['f'], r['bus'], r['gen'], r['branch'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    t_is(r['cost']['usr'], f, 12, [t, 'user cost'])

    ##-----  run OPF with extra linear user constraints & costs  -----
    ## single new z variable constrained to be greater than or equal to
    ## deviation from 1 pu voltage at bus 1, linear cost on this z
    ## get solved AC power flow case from MAT-file
    soln9_opf_extras1 = loadmat(join(tdir, 'soln9_opf_extras1.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_extras1['bus_soln']
    gen_soln = soln9_opf_extras1['gen_soln']
    branch_soln = soln9_opf_extras1['branch_soln']
    f_soln = soln9_opf_extras1['f_soln'][0]

    row = [0, 0, 1, 1]
    col = [9, 24, 9, 24]
    A = sparse(([-1, 1, 1, 1], (row, col)), (2, 25))
    u = array([Inf, Inf])
    l = array([-1, 1])

    N = sparse(([1], ([0], [24])), (1, 25))    ## new z variable only
    fparm = array([[1, 0, 0, 1]])              ## w = r = z
    H = sparse((1, 1))                ## no quadratic term
    Cw = array([100.0])

    t = ''.join([t0, 'w/extra constraints & costs 1 : '])
    r = opf(casefile, A, l, u, ppopt, N, fparm, H, Cw)
    f, bus, gen, branch, success = \
            r['f'], r['bus'], r['gen'], r['branch'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    t_is(r['var']['val']['z'], 0.025419, 6, [t, 'user variable'])
    t_is(r['cost']['usr'], 2.5419, 4, [t, 'user cost'])

    ##-----  test OPF with capability curves  -----
    ppc = loadcase(join(tdir, 't_case9_opfv2'))
    ## remove angle diff limits
    ppc['branch'][0, ANGMAX] = 360
    ppc['branch'][8, ANGMIN] = -360

    ## get solved AC power flow case from MAT-file
    soln9_opf_PQcap = loadmat(join(tdir, 'soln9_opf_PQcap.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_PQcap['bus_soln']
    gen_soln = soln9_opf_PQcap['gen_soln']
    branch_soln = soln9_opf_PQcap['branch_soln']
    f_soln = soln9_opf_PQcap['f_soln'][0]

    ## run OPF with capability curves
    t = ''.join([t0, 'w/capability curves : '])
    r = runopf(ppc, ppopt)
    f, bus, gen, branch, success = \
            r['f'], r['bus'], r['gen'], r['branch'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    ##-----  test OPF with angle difference limits  -----
    ppc = loadcase(join(tdir, 't_case9_opfv2'))
    ## remove capability curves
    ppc['gen'][ix_(arange(1, 3),
                   [PC1, PC2, QC1MIN, QC1MAX, QC2MIN, QC2MAX])] = zeros((2, 6))

    ## get solved AC power flow case from MAT-file
    soln9_opf_ang = loadmat(join(tdir, 'soln9_opf_ang.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf_ang['bus_soln']
    gen_soln = soln9_opf_ang['gen_soln']
    branch_soln = soln9_opf_ang['branch_soln']
    f_soln = soln9_opf_ang['f_soln'][0]

    ## run OPF with angle difference limits
    t = ''.join([t0, 'w/angle difference limits : '])
    r = runopf(ppc, ppopt)
    f, bus, gen, branch, success = \
            r['f'], r['bus'], r['gen'], r['branch'], r['success']
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  1, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])
    t_is(branch[:, ibr_angmu ], branch_soln[:, ibr_angmu ],  2, [t, 'branch angle mu'])

    ##-----  test OPF with ignored angle difference limits  -----
    ## get solved AC power flow case from MAT-file
    soln9_opf = loadmat(join(tdir, 'soln9_opf.mat'), struct_as_record=True)
    ## defines bus_soln, gen_soln, branch_soln, f_soln
    bus_soln = soln9_opf['bus_soln']
    gen_soln = soln9_opf['gen_soln']
    branch_soln = soln9_opf['branch_soln']
    f_soln = soln9_opf['f_soln'][0]

    ## run OPF with ignored angle difference limits
    t = ''.join([t0, 'w/ignored angle difference limits : '])
    ppopt1 = ppoption(ppopt, OPF_IGNORE_ANG_LIM=1)
    r = runopf(ppc, ppopt1)
    bus, gen, branch, f, success = \
            r['bus'], r['gen'], r['branch'], r['f'], r['success']
    ## ang limits are not in this solution data, so let's remove them
    branch[0, ANGMAX] =  360
    branch[8, ANGMIN] = -360
    t_ok(success, [t, 'success'])
    t_is(f, f_soln, 3, [t, 'f'])
    t_is(   bus[:, ib_data   ],    bus_soln[:, ib_data   ], 10, [t, 'bus data'])
    t_is(   bus[:, ib_voltage],    bus_soln[:, ib_voltage],  3, [t, 'bus voltage'])
    t_is(   bus[:, ib_lam    ],    bus_soln[:, ib_lam    ],  3, [t, 'bus lambda'])
    t_is(   bus[:, ib_mu     ],    bus_soln[:, ib_mu     ],  2, [t, 'bus mu'])
    t_is(   gen[:, ig_data   ],    gen_soln[:, ig_data   ], 10, [t, 'gen data'])
    t_is(   gen[:, ig_disp   ],    gen_soln[:, ig_disp   ],  3, [t, 'gen dispatch'])
    t_is(   gen[:, ig_mu     ],    gen_soln[:, ig_mu     ],  3, [t, 'gen mu'])
    t_is(branch[:, ibr_data  ], branch_soln[:, ibr_data  ], 10, [t, 'branch data'])
    t_is(branch[:, ibr_flow  ], branch_soln[:, ibr_flow  ],  3, [t, 'branch flow'])
    t_is(branch[:, ibr_mu    ], branch_soln[:, ibr_mu    ],  2, [t, 'branch mu'])

    t_end()

Example 46

Project: django-pyodbc Source File: ss_loaddata.py
    def handle(self, *fixture_labels, **options):
        from django.db.models import get_apps
        from django.core import serializers
        from django.db import connection, transaction
        from django.conf import settings

        self.style = no_style()

        verbosity = int(options.get('verbosity', 1))
        show_traceback = options.get('traceback', False)

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        object_count = 0
        models = set()

        humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed()
            transaction.enter_transaction_management()
            transaction.managed(True)

        self.disable_forward_ref_checks()

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None:   file,
            'gz':   gzip.GzipFile,
            'zip':  SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_fixtures = [os.path.join(os.path.dirname(app.__file__), 'fixtures') for app in get_apps()]
        for fixture_label in fixture_labels:
            parts = fixture_label.split('.')

            if len(parts) > 1 and parts[-1] in compression_types:
                compression_formats = [parts[-1]]
                parts = parts[:-1]
            else:
                compression_formats = compression_types.keys()

            if len(parts) == 1:
                fixture_name = parts[0]
                formats = serializers.get_public_serializer_formats()
            else:
                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                if format in serializers.get_public_serializer_formats():
                    formats = [format]
                else:
                    formats = []

            if formats:
                if verbosity > 1:
                    print "Loading '%s' fixtures..." % fixture_name
            else:
                self.enable_forward_ref_checks(cursor)
                sys.stderr.write(
                    self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format." %
                        (fixture_name, format)))
                transaction.rollback()
                transaction.leave_transaction_management()
                return

            if os.path.isabs(fixture_name):
                fixture_dirs = [fixture_name]
            else:
                fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']

            for fixture_dir in fixture_dirs:
                if verbosity > 1:
                    print "Checking %s for fixtures..." % humanize(fixture_dir)

                label_found = False
                for format in formats:
                    for compression_format in compression_formats:
                        if compression_format:
                            file_name = '.'.join([fixture_name, format,
                                                  compression_format])
                        else:
                            file_name = '.'.join([fixture_name, format])

                        if verbosity > 1:
                            print "Trying %s for %s fixture '%s'..." % \
                                (humanize(fixture_dir), file_name, fixture_name)
                        full_path = os.path.join(fixture_dir, file_name)
                        open_method = compression_types[compression_format]
                        try:
                            fixture = open_method(full_path, 'r')
                            if label_found:
                                fixture.close()
                                self.enable_forward_ref_checks(cursor)
                                print self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
                                    (fixture_name, humanize(fixture_dir)))
                                transaction.rollback()
                                transaction.leave_transaction_management()
                                return
                            else:
                                fixture_count += 1
                                objects_in_fixture = 0
                                if verbosity > 0:
                                    print "Installing %s fixture '%s' from %s." % \
                                        (format, fixture_name, humanize(fixture_dir))
                                try:
                                    objects = serializers.deserialize(format, fixture)
                                    for obj in objects:
                                        objects_in_fixture += 1
                                        self.handle_ref_checks(cursor, obj)
                                        models.add(obj.object.__class__)
                                        obj.save()
                                    object_count += objects_in_fixture
                                    label_found = True
                                except (SystemExit, KeyboardInterrupt):
                                    self.enable_forward_ref_checks(cursor)
                                    raise
                                except Exception:
                                    import traceback
                                    fixture.close()
                                    self.enable_forward_ref_checks(cursor)
                                    transaction.rollback()
                                    transaction.leave_transaction_management()
                                    if show_traceback:
                                        traceback.print_exc()
                                    else:
                                        sys.stderr.write(
                                            self.style.ERROR("Problem installing fixture '%s': %s\n" %
                                                 (full_path, ''.join(traceback.format_exception(sys.exc_type,
                                                     sys.exc_value, sys.exc_traceback)))))
                                    return
                                fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    self.enable_forward_ref_checks(cursor)
                                    sys.stderr.write(
                                        self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)" %
                                            (fixture_name)))
                                    transaction.rollback()
                                    transaction.leave_transaction_management()
                                    return

                        except Exception, e:
                            if verbosity > 1:
                                print "No %s fixture '%s' in %s." % \
                                    (format, fixture_name, humanize(fixture_dir))

        self.enable_forward_ref_checks(cursor)

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
            if sequence_sql:
                if verbosity > 1:
                    print "Resetting sequences"
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit()
            transaction.leave_transaction_management()

        if object_count == 0:
            if verbosity > 1:
                print "No fixtures found."
        else:
            if verbosity > 0:
                print "Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()

Example 47

Project: PYPOWER Source File: t_scale_load.py
def t_scale_load(quiet=False):
    """Tests for code in C{scale_load}.

    @author: Ray Zimmerman (PSERC Cornell)
    """
    n_tests = 275

    t_begin(n_tests, quiet)

    ppc = loadcase(join(dirname(__file__), 't_auction_case'))
    ppc['gen'][7, GEN_BUS] = 2    ## multiple d. loads per area, same bus as gen
    ppc['gen'][7, [QG, QMIN, QMAX]] = array([3, 0, 3])
    ## put it load before gen in matrix

    ppc['gen'] = vstack([ppc['gen'][7, :], ppc['gen'][:7, :], ppc['gen'][8, :]])
    ld = find(isload(ppc['gen']))
    a = [None] * 3
    lda = [None] * 3
    for k in range(3):
        a[k] = find(ppc['bus'][:, BUS_AREA] == k + 1)  ## buses in area k
        tmp = find( in1d(ppc['gen'][ld, GEN_BUS] - 1, a[k]) )
        lda[k] = ld[tmp]                       ## disp loads in area k

    area = [None] * 3
    for k in range(3):
        area[k] = {'fixed': {}, 'disp': {}, 'both': {}}
        area[k]['fixed']['p'] = sum(ppc['bus'][a[k], PD])
        area[k]['fixed']['q'] = sum(ppc['bus'][a[k], QD])
        area[k]['disp']['p'] = -sum(ppc['gen'][lda[k], PMIN])
        area[k]['disp']['qmin'] = -sum(ppc['gen'][lda[k], QMIN])
        area[k]['disp']['qmax'] = -sum(ppc['gen'][lda[k], QMAX])
        area[k]['disp']['q'] = area[k]['disp']['qmin'] + area[k]['disp']['qmax']
        area[k]['both']['p'] = area[k]['fixed']['p'] + area[k]['disp']['p']
        area[k]['both']['q'] = area[k]['fixed']['q'] + area[k]['disp']['q']

    total = {'fixed': {}, 'disp': {}, 'both': {}}
    total['fixed']['p'] = sum(ppc['bus'][:, PD])
    total['fixed']['q'] = sum(ppc['bus'][:, QD])
    total['disp']['p'] = -sum(ppc['gen'][ld, PMIN])
    total['disp']['qmin'] = -sum(ppc['gen'][ld, QMIN])
    total['disp']['qmax'] = -sum(ppc['gen'][ld, QMAX])
    total['disp']['q'] = total['disp']['qmin'] + total['disp']['qmax']
    total['both']['p'] = total['fixed']['p'] + total['disp']['p']
    total['both']['q'] = total['fixed']['q'] + total['disp']['q']

    ##-----  single load zone, one scale factor  -----
    load = array([2])
    t = 'all fixed loads (PQ) * 2 : '
    bus, _ = scale_load(load, ppc['bus'])
    t_is(sum(bus[:, PD]), load * total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), load * total['fixed']['q'], 8, [t, 'total fixed Q'])
    opt = {'which': 'FIXED'}

    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)

    t_is(sum(bus[:, PD]), load * total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), load * total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all fixed loads (P) * 2 : '
    opt = {'pq': 'P'}
    bus, _ = scale_load(load, ppc['bus'], None, None, opt)
    t_is(sum(bus[:, PD]), load * total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    opt = {'pq': 'P', 'which': 'FIXED'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), load * total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all loads (PQ) * 2 : '
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'])
    t_is(sum(bus[:, PD]), load * total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), load * total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), load * total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), load * total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), load * total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all loads (P) * 2 : '
    opt = {'pq': 'P'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), load * total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), load * total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all disp loads (PQ) * 2 : '
    opt = {'which': 'DISPATCHABLE'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), load * total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), load * total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), load * total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all disp loads (P) * 2 : '
    opt = {'pq': 'P', 'which': 'DISPATCHABLE'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), load * total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    ##-----  single load zone, one scale quantity  -----
    load = array([200.0])
    t = 'all fixed loads (PQ) => total = 200 : '
    opt = {'scale': 'QUANTITY'}
    bus, _ = scale_load(load, ppc['bus'], None, None, opt)
    t_is(sum(bus[:, PD]), load, 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), load / total['fixed']['p'] * total['fixed']['q'], 8, [t, 'total fixed Q'])
    opt = {'scale': 'QUANTITY', 'which': 'FIXED'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), load - total['disp']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), (load - total['disp']['p'])/total['fixed']['p']*total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all fixed loads (P) => total = 200 : '
    opt = {'scale': 'QUANTITY', 'pq': 'P'}
    bus, _ = scale_load(load, ppc['bus'], None, None, opt)
    t_is(sum(bus[:, PD]), load, 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    opt = {'scale': 'QUANTITY', 'pq': 'P', 'which': 'FIXED'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), load - total['disp']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all loads (PQ) => total = 200 : '
    opt = {'scale': 'QUANTITY'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), load / total['both']['p']*total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), load / total['both']['p']*total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), load / total['both']['p']*total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), load / total['both']['p']*total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), load / total['both']['p']*total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all loads (P) => total = 200 : '
    opt = {'scale': 'QUANTITY', 'pq': 'P'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), load / total['both']['p']*total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), load / total['both']['p']*total['disp']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all disp loads (PQ) => total = 200 : '
    opt = {'scale': 'QUANTITY', 'which': 'DISPATCHABLE'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), load - total['fixed']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), (load - total['fixed']['p'])/total['disp']['p']*total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), (load - total['fixed']['p'])/total['disp']['p']*total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    t = 'all disp loads (P) => total = 200 : '
    opt = {'scale': 'QUANTITY', 'pq': 'P', 'which': 'DISPATCHABLE'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    t_is(sum(bus[:, PD]), total['fixed']['p'], 8, [t, 'total fixed P'])
    t_is(sum(bus[:, QD]), total['fixed']['q'], 8, [t, 'total fixed Q'])
    t_is(-sum(gen[ld, PMIN]), load - total['fixed']['p'], 8, [t, 'total disp P'])
    t_is(-sum(gen[ld, QMIN]), total['disp']['qmin'], 8, [t, 'total disp Qmin'])
    t_is(-sum(gen[ld, QMAX]), total['disp']['qmax'], 8, [t, 'total disp Qmax'])

    ##-----  3 zones, area scale factors  -----
    t = 'area fixed loads (PQ) * [3 2 1] : '
    load = array([3, 2, 1])
    bus, _ = scale_load(load, ppc['bus'])
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k] * area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), load[k] * area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))

    opt = {'which': 'FIXED'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k] * area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), load[k] * area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    t = 'area fixed loads (P) * [3 2 1] : '
    load = array([3, 2, 1])
    opt = {'pq': 'P'}
    bus, _ = scale_load(load, ppc['bus'], None, None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k] * area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))

    opt = {'pq': 'P', 'which': 'FIXED'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k] * area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    t = 'all area loads (PQ) * [3 2 1] : '
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'])
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k] * area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), load[k] * area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), load[k] * area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), load[k] * area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), load[k] * area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))


    t = 'all area loads (P) * [3 2 1] : '
    opt = {'pq': 'P'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k] * area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), load[k] * area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    t = 'area disp loads (PQ) * [3 2 1] : '
    opt = {'which': 'DISPATCHABLE'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), load[k] * area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), load[k] * area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), load[k] * area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    t = 'area disp loads (P) * [3 2 1] : '
    opt = {'pq': 'P', 'which': 'DISPATCHABLE'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), load[k] * area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    ##-----  3 zones, area scale quantities  -----
    t = 'area fixed loads (PQ) => total = [100 80 60] : '
    load = array([100, 80, 60], float)
    opt = {'scale': 'QUANTITY'}
    bus, _ = scale_load(load, ppc['bus'], None, None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), load[k] / area[k]['fixed']['p'] * area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))

    opt = {'scale': 'QUANTITY', 'which': 'FIXED'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k] - area[k]['disp']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), (load[k] - area[k]['disp']['p']) / area[k]['fixed']['p'] * area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    t = 'area fixed loads (P) => total = [100 80 60] : '
    load = array([100, 80, 60], float)
    opt = {'scale': 'QUANTITY', 'pq': 'P'}
    bus, _ = scale_load(load, ppc['bus'], None, None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))

    opt = {'scale': 'QUANTITY', 'pq': 'P', 'which': 'FIXED'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k]-area[k]['disp']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    t = 'all area loads (PQ) => total = [100 80 60] : '
    opt = {'scale': 'QUANTITY'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k] / area[k]['both']['p'] * area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), load[k] / area[k]['both']['p'] * area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), load[k] / area[k]['both']['p'] * area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), load[k] / area[k]['both']['p'] * area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), load[k] / area[k]['both']['p'] * area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    t = 'all area loads (P) => total = [100 80 60] : '
    opt = {'scale': 'QUANTITY', 'pq': 'P'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), load[k] / area[k]['both']['p'] * area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), load[k] / area[k]['both']['p'] * area[k]['disp']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    t = 'area disp loads (PQ) => total = [100 80 60] : throws expected exception'
    load = array([100, 80, 60], float)
    opt = {'scale': 'QUANTITY', 'which': 'DISPATCHABLE'}
    err = 0
    try:
        bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    except ScalingError as e:
        expected = 'scale_load: impossible to make zone 2 load equal 80 by scaling non-existent dispatchable load'
        err = expected not in str(e)
    t_ok(err, t)

    t = 'area disp loads (PQ) => total = [100 74.3941 60] : '
    load = array([100, area[1]['fixed']['p'], 60], float)
    opt = {'scale': 'QUANTITY', 'which': 'DISPATCHABLE'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), load[k]-area[k]['fixed']['p'], 8, '%s area %d disp P' % (t, k))
        if k == 1:
            t_is(-sum(gen[lda[k], QMIN]), area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
            t_is(-sum(gen[lda[k], QMAX]), area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))
        else:
            t_is(-sum(gen[lda[k], QMIN]), (load[k] - area[k]['fixed']['p']) / area[k]['disp']['p'] * area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
            t_is(-sum(gen[lda[k], QMAX]), (load[k] - area[k]['fixed']['p']) / area[k]['disp']['p'] * area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    t = 'area disp loads (P) => total = [100 74.3941 60] : '
    opt = {'scale': 'QUANTITY', 'pq': 'P', 'which': 'DISPATCHABLE'}
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], None, opt)
    for k in range(len(load)):
        t_is(sum(bus[a[k], PD]), area[k]['fixed']['p'], 8, '%s area %d fixed P' % (t, k))
        t_is(sum(bus[a[k], QD]), area[k]['fixed']['q'], 8, '%s area %d fixed Q' % (t, k))
        t_is(-sum(gen[lda[k], PMIN]), load[k]-area[k]['fixed']['p'], 8, '%s area %d disp P' % (t, k))
        t_is(-sum(gen[lda[k], QMIN]), area[k]['disp']['qmin'], 8, '%s area %d disp Qmin' % (t, k))
        t_is(-sum(gen[lda[k], QMAX]), area[k]['disp']['qmax'], 8, '%s area %d disp Qmax' % (t, k))

    ##-----  explict single load zone  -----
    t = 'explicit single load zone'
    load_zone = zeros(ppc['bus'].shape[0])
    load_zone[[2, 3]] = 1
    load = array([2.0])
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], load_zone)
    Pd = ppc['bus'][:, PD]
    Pd[[2, 3]] = load * Pd[[2, 3]]
    t_is( bus[:, PD], Pd, 8, t)

    ##-----  explict multiple load zone  -----
    t = 'explicit multiple load zone'
    load_zone = zeros(ppc['bus'].shape[0])
    load_zone[[2, 3]] = 1
    load_zone[[6, 7]] = 2
    load = array([2, 0.5])
    bus, gen = scale_load(load, ppc['bus'], ppc['gen'], load_zone)
    Pd = ppc['bus'][:, PD]
    Pd[[2, 3]] = load[0] * Pd[[2, 3]]
    Pd[[6, 7]] = load[1] * Pd[[6, 7]]
    t_is( bus[:, PD], Pd, 8, t)

    t_end()

Example 48

Project: GAE-Bulk-Mailer Source File: loaddata.py
    def handle(self, *fixture_labels, **options):

        ignore = options.get('ignore')
        using = options.get('database')

        connection = connections[using]

        if not len(fixture_labels):
            raise CommandError(
                "No database fixture specified. Please provide the path of at "
                "least one fixture in the command line."
            )

        verbosity = int(options.get('verbosity'))
        show_traceback = options.get('traceback')

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        loaded_object_count = 0
        fixture_object_count = 0
        models = set()

        humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None:   open,
            'gz':   gzip.GzipFile,
            'zip':  SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(upath(path))
            else:
                # It's a models.py module
                app_module_paths.append(upath(app.__file__))

        app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths]

        try:
            with connection.constraint_checks_disabled():
                for fixture_label in fixture_labels:
                    parts = fixture_label.split('.')

                    if len(parts) > 1 and parts[-1] in compression_types:
                        compression_formats = [parts[-1]]
                        parts = parts[:-1]
                    else:
                        compression_formats = compression_types.keys()

                    if len(parts) == 1:
                        fixture_name = parts[0]
                        formats = serializers.get_public_serializer_formats()
                    else:
                        fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                        if format in serializers.get_public_serializer_formats():
                            formats = [format]
                        else:
                            formats = []

                    if formats:
                        if verbosity >= 2:
                            self.stdout.write("Loading '%s' fixtures..." % fixture_name)
                    else:
                        raise CommandError(
                            "Problem installing fixture '%s': %s is not a known serialization format." %
                                (fixture_name, format))

                    if os.path.isabs(fixture_name):
                        fixture_dirs = [fixture_name]
                    else:
                        fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']

                    for fixture_dir in fixture_dirs:
                        if verbosity >= 2:
                            self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))

                        label_found = False
                        for combo in product([using, None], formats, compression_formats):
                            database, format, compression_format = combo
                            file_name = '.'.join(
                                p for p in [
                                    fixture_name, database, format, compression_format
                                ]
                                if p
                            )

                            if verbosity >= 3:
                                self.stdout.write("Trying %s for %s fixture '%s'..." % \
                                    (humanize(fixture_dir), file_name, fixture_name))
                            full_path = os.path.join(fixture_dir, file_name)
                            open_method = compression_types[compression_format]
                            try:
                                fixture = open_method(full_path, 'r')
                            except IOError:
                                if verbosity >= 2:
                                    self.stdout.write("No %s fixture '%s' in %s." % \
                                        (format, fixture_name, humanize(fixture_dir)))
                            else:
                                try:
                                    if label_found:
                                        raise CommandError("Multiple fixtures named '%s' in %s. Aborting." %
                                            (fixture_name, humanize(fixture_dir)))

                                    fixture_count += 1
                                    objects_in_fixture = 0
                                    loaded_objects_in_fixture = 0
                                    if verbosity >= 2:
                                        self.stdout.write("Installing %s fixture '%s' from %s." % \
                                            (format, fixture_name, humanize(fixture_dir)))

                                    objects = serializers.deserialize(format, fixture, using=using, ignorenonexistent=ignore)

                                    for obj in objects:
                                        objects_in_fixture += 1
                                        if router.allow_syncdb(using, obj.object.__class__):
                                            loaded_objects_in_fixture += 1
                                            models.add(obj.object.__class__)
                                            try:
                                                obj.save(using=using)
                                            except (DatabaseError, IntegrityError) as e:
                                                e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
                                                        'app_label': obj.object._meta.app_label,
                                                        'object_name': obj.object._meta.object_name,
                                                        'pk': obj.object.pk,
                                                        'error_msg': force_text(e)
                                                    },)
                                                raise

                                    loaded_object_count += loaded_objects_in_fixture
                                    fixture_object_count += objects_in_fixture
                                    label_found = True
                                except Exception as e:
                                    if not isinstance(e, CommandError):
                                        e.args = ("Problem installing fixture '%s': %s" % (full_path, e),)
                                    raise
                                finally:
                                    fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    raise CommandError(
                                        "No fixture data found for '%s'. (File format may be invalid.)" %
                                            (fixture_name))

            # Since we disabled constraint checks, we must manually check for
            # any invalid keys that might have been added
            table_names = [model._meta.db_table for model in models]
            try:
                connection.check_constraints(table_names=table_names)
            except Exception as e:
                e.args = ("Problem installing fixtures: %s" % e,)
                raise

        except (SystemExit, KeyboardInterrupt):
            raise
        except Exception as e:
            if commit:
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
            raise

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(no_style(), models)
            if sequence_sql:
                if verbosity >= 2:
                    self.stdout.write("Resetting sequences\n")
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)

        if verbosity >= 1:
            if fixture_object_count == loaded_object_count:
                self.stdout.write("Installed %d object(s) from %d fixture(s)" % (
                    loaded_object_count, fixture_count))
            else:
                self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" % (
                    loaded_object_count, fixture_object_count, fixture_count))

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()

Example 49

Project: auto-sklearn Source File: smbo.py
    def run_smbo(self, max_iters=1000):
        global evaluator

        self.watcher.start_task('SMBO')

        # == first things first: load the datamanager
        self.reset_data_manager()
        
        # == Initialize SMBO stuff
        # first create a scenario
        seed = self.seed # TODO
        num_params = len(self.config_space.get_hyperparameters())
        # allocate a run history
        run_history = RunHistory()
        meta_runhistory = RunHistory()
        meta_runs_dataset_indices = {}
        num_run = self.start_num_run
        instance_id = self.dataset_name + SENTINEL

        # == Train on subset
        #    before doing anything, let us run the default_cfg
        #    on a subset of the available data to ensure that
        #    we at least have some models
        #    we will try three different ratios of decreasing magnitude
        #    in the hope that at least on the last one we will be able
        #    to get a model
        n_data = self.datamanager.data['X_train'].shape[0]
        subset_ratio = 10000. / n_data
        if subset_ratio >= 0.5:
            subset_ratio = 0.33
            subset_ratios = [subset_ratio, subset_ratio * 0.10]
        else:
            subset_ratios = [subset_ratio, 500. / n_data]
        self.logger.info("Training default configurations on a subset of "
                         "%d/%d data points." %
                         (int(n_data * subset_ratio), n_data))

        # the time limit for these function evaluations is rigorously
        # set to only 1/2 of a full function evaluation
        subset_time_limit = max(5, int(self.func_eval_time_limit / 2))
        # the configs we want to run on the data subset are:
        # 1) the default configs
        # 2) a set of configs we selected for training on a subset
        subset_configs = [self.config_space.get_default_configuration()] \
                          + self.collect_additional_subset_defaults()
        subset_config_succesful = [False] * len(subset_configs)
        for subset_config_id, next_config in enumerate(subset_configs):
            for i, ratio in enumerate(subset_ratios):
                self.reset_data_manager()
                n_data_subsample = int(n_data * ratio)

                # run the config, but throw away the result afterwards
                # since this cfg was evaluated only on a subset
                # and we don't want  to confuse SMAC
                self.logger.info("Starting to evaluate %d on SUBSET "
                                 "with size %d and time limit %ds.",
                                 num_run, n_data_subsample,
                                 subset_time_limit)
                self.logger.info(next_config)
                _info = eval_with_limits(
                    datamanager=self.datamanager, backend=self.backend,
                    config=next_config, seed=seed, num_run=num_run,
                    resampling_strategy=self.resampling_strategy,
                    resampling_strategy_args=self.resampling_strategy_args,
                    memory_limit=self.memory_limit,
                    func_eval_time_limit=subset_time_limit,
                    subsample=n_data_subsample,
                    logger=self.logger)
                (duration, result, _, additional_run_info, status) = _info
                self.logger.info("Finished evaluating %d. configuration on SUBSET. "
                                 "Duration %f; loss %f; status %s; additional run "
                                 "info: %s ", num_run, duration, result,
                                 str(status), additional_run_info)

                num_run += 1
                if i < len(subset_ratios) - 1:
                    if status != StatusType.SUCCESS:
                        # Do not increase num_run here, because we will try
                        # the same configuration with less data
                        self.logger.info("A CONFIG did not finish "
                                         " for subset ratio %f -> going smaller",
                                         ratio)
                        continue
                    else:
                        self.logger.info("Finished SUBSET training successfully"
                                         " with ratio %f", ratio)
                        subset_config_succesful[subset_config_id] = True
                        break
                else:
                    if status != StatusType.SUCCESS:
                        self.logger.info("A CONFIG did not finish "
                                         " for subset ratio %f.",
                                         ratio)
                        continue
                    else:
                        self.logger.info("Finished SUBSET training successfully"
                                         " with ratio %f", ratio)
                        subset_config_succesful[subset_config_id] = True
                        break

        # Use the first non-failing configuration from the subsets as the new
        #  default configuration -> this guards us against the random forest
        # failing on large, sparse datasets
        default_cfg = None
        for subset_config_id, next_config in enumerate(subset_configs):
            if subset_config_succesful[subset_config_id]:
                default_cfg = next_config
                break
        if default_cfg is None:
            default_cfg = self.config_space.get_default_configuration()

        # == METALEARNING suggestions
        # we start by evaluating the defaults on the full dataset again
        # and add the suggestions from metalearning behind it

        if self.metadata_directory is None:
            metalearning_directory = os.path.dirname(
                autosklearn.metalearning.__file__)
            # There is no multilabel data in OpenML
            if self.task == MULTILABEL_CLASSIFICATION:
                meta_task = BINARY_CLASSIFICATION
            else:
                meta_task = self.task
            metadata_directory = os.path.join(
                metalearning_directory, 'files',
                '%s_%s_%s' % (METRIC_TO_STRING[self.metric],
                              TASK_TYPES_TO_STRING[meta_task],
                              'sparse' if self.datamanager.info['is_sparse']
                              else 'dense'))
            self.metadata_directory = metadata_directory

        self.logger.info('Metadata directory: %s', self.metadata_directory)
        meta_base = MetaBase(self.config_space, self.metadata_directory)

        metafeature_calculation_time_limit = int(
            self.total_walltime_limit / 4)
        metafeature_calculation_start_time = time.time()
        meta_features = self._calculate_metafeatures_with_limits(
            metafeature_calculation_time_limit)
        metafeature_calculation_end_time = time.time()
        metafeature_calculation_time_limit = \
            metafeature_calculation_time_limit - (
            metafeature_calculation_end_time -
            metafeature_calculation_start_time)

        if metafeature_calculation_time_limit < 1:
            self.logger.warning('Time limit for metafeature calculation less '
                                'than 1 seconds (%f). Skipping calculation '
                                'of metafeatures for encoded dataset.',
                                metafeature_calculation_time_limit)
            meta_features_encoded = None
        else:
            with warnings.catch_warnings():
                warnings.showwarning = self._send_warnings_to_log
                self.datamanager.perform1HotEncoding()
            meta_features_encoded = \
                self._calculate_metafeatures_encoded_with_limits(
                    metafeature_calculation_time_limit)

        # In case there is a problem calculating the encoded meta-features
        if meta_features is None:
            if meta_features_encoded is not None:
                meta_features = meta_features_encoded
        else:
            if meta_features_encoded is not None:
                meta_features.metafeature_values.update(
                    meta_features_encoded.metafeature_values)

        if meta_features is not None:
            meta_base.add_dataset(instance_id, meta_features)
            # Do mean imputation of the meta-features - should be done specific
            # for each prediction model!
            all_metafeatures = meta_base.get_metafeatures(
                features=list(meta_features.keys()))
            all_metafeatures.fillna(all_metafeatures.mean(), inplace=True)

            with warnings.catch_warnings():
                warnings.showwarning = self._send_warnings_to_log
                metalearning_configurations = self.collect_metalearning_suggestions(
                    meta_base)
            if metalearning_configurations is None:
                metalearning_configurations = []
            self.reset_data_manager()

            self.logger.info('%s', meta_features)

            # Convert meta-features into a dictionary because the scenario
            # expects a dictionary
            meta_features_dict = {}
            for dataset, series in all_metafeatures.iterrows():
                meta_features_dict[dataset] = series.values
            meta_features_list = []
            for meta_feature_name in all_metafeatures.columns:
                meta_features_list.append(meta_features[meta_feature_name].value)
            meta_features_list = np.array(meta_features_list).reshape((1, -1))
            self.logger.info(list(meta_features_dict.keys()))

            meta_runs = meta_base.get_all_runs(METRIC_TO_STRING[self.metric])
            meta_runs_index = 0
            try:
                meta_durations = meta_base.get_all_runs('runtime')
                read_runtime_data = True
            except KeyError:
                read_runtime_data = False
                self.logger.critical('Cannot read runtime data.')
                if self.acquisition_function == 'EIPS':
                    self.logger.critical('Reverting to acquisition function EI!')
                    self.acquisition_function = 'EI'

            for meta_dataset in meta_runs.index:
                meta_dataset_start_index = meta_runs_index
                for meta_configuration in meta_runs.columns:
                    if np.isfinite(meta_runs.loc[meta_dataset, meta_configuration]):
                        try:
                            config = meta_base.get_configuration_from_algorithm_index(
                                meta_configuration)
                            cost = meta_runs.loc[meta_dataset, meta_configuration]
                            if read_runtime_data:
                                runtime = meta_durations.loc[meta_dataset,
                                                             meta_configuration]
                            else:
                                runtime = 1
                            # TODO read out other status types!
                            meta_runhistory.add(config, cost, runtime,
                                                StatusType.SUCCESS,
                                                instance_id=meta_dataset)
                            meta_runs_index += 1
                        except:
                            # TODO maybe add warning
                            pass

                meta_runs_dataset_indices[meta_dataset] = (
                    meta_dataset_start_index, meta_runs_index)
        else:
            if self.acquisition_function == 'EIPS':
                self.logger.critical('Reverting to acquisition function EI!')
                self.acquisition_function = 'EI'
            meta_features_list = []
            meta_features_dict = {}
            metalearning_configurations = []

        self.scenario = AutoMLScenario(config_space=self.config_space,
                                       limit=self.total_walltime_limit,
                                       cutoff_time=self.func_eval_time_limit,
                                       metafeatures=meta_features_dict,
                                       output_dir=self.backend.temporary_directory,
                                       shared_model=self.shared_mode)

        types = get_types(self.config_space, self.scenario.feature_array)
        if self.acquisition_function == 'EI':
            rh2EPM = RunHistory2EPM4Cost(num_params=num_params,
                                         scenario=self.scenario,
                                         success_states=None,
                                         impute_censored_data=False,
                                         impute_state=None)
            model = RandomForestWithInstances(types,
                                              instance_features=meta_features_list,
                                              seed=1, num_trees=10)
            smac = SMBO(self.scenario, model=model,
                        rng=seed)
        elif self.acquisition_function == 'EIPS':
            rh2EPM = RunHistory2EPM4EIPS(num_params=num_params,
                                         scenario=self.scenario,
                                         success_states=None,
                                         impute_censored_data=False,
                                         impute_state=None)
            model = UncorrelatedMultiObjectiveRandomForestWithInstances(
                ['cost', 'runtime'], types, num_trees = 10,
                instance_features=meta_features_list, seed=1)
            acquisition_function = EIPS(model)
            smac = SMBO(self.scenario,
                        acquisition_function=acquisition_function,
                        model=model, runhistory2epm=rh2EPM, rng=seed)
        else:
            raise ValueError('Unknown acquisition function value %s!' %
                             self.acquisition_function)

        # Build a runtime model
        # runtime_rf = RandomForestWithInstances(types,
        #                                        instance_features=meta_features_list,
        #                                        seed=1, num_trees=10)
        # runtime_rh2EPM = RunHistory2EPM4EIPS(num_params=num_params,
        #                                      scenario=self.scenario,
        #                                      success_states=None,
        #                                      impute_censored_data=False,
        #                                      impute_state=None)
        # X_runtime, y_runtime = runtime_rh2EPM.transform(meta_runhistory)
        # runtime_rf.train(X_runtime, y_runtime[:, 1].flatten())
        X_meta, Y_meta = rh2EPM.transform(meta_runhistory)
        # Transform Y_meta on a per-dataset base
        for meta_dataset in meta_runs_dataset_indices:
            start_index, end_index = meta_runs_dataset_indices[meta_dataset]
            end_index += 1  # Python indexing
            Y_meta[start_index:end_index, 0]\
                [Y_meta[start_index:end_index, 0] >2.0] =  2.0
            dataset_minimum = np.min(Y_meta[start_index:end_index, 0])
            Y_meta[start_index:end_index, 0] = 1 - (
                (1. - Y_meta[start_index:end_index, 0]) /
                (1. - dataset_minimum))
            Y_meta[start_index:end_index, 0]\
                  [Y_meta[start_index:end_index, 0] > 2] = 2

        # == first, evaluate all metelearning and default configurations
        finished = False
        for i, next_config in enumerate(([default_cfg] +
                                          metalearning_configurations)):
            # Do not evaluate default configurations more than once
            if i >= len([default_cfg]) and next_config in [default_cfg]:
                continue

            config_name = 'meta-learning' if i >= len([default_cfg]) \
                else 'default'

            self.logger.info("Starting to evaluate %d. configuration "
                             "(%s configuration) with time limit %ds.",
                             num_run, config_name, self.func_eval_time_limit)
            self.logger.info(next_config)
            self.reset_data_manager()
            info = eval_with_limits(datamanager=self.datamanager,
                                    backend=self.backend,
                                    config=next_config,
                                    seed=seed, num_run=num_run,
                                    resampling_strategy=self.resampling_strategy,
                                    resampling_strategy_args=self.resampling_strategy_args,
                                    memory_limit=self.memory_limit,
                                    func_eval_time_limit=self.func_eval_time_limit,
                                    logger=self.logger)
            (duration, result, _, additional_run_info, status) = info
            run_history.add(config=next_config, cost=result,
                            time=duration, status=status,
                            instance_id=instance_id, seed=seed,
                            additional_info=additional_run_info)
            run_history.update_cost(next_config, result)
            self.logger.info("Finished evaluating %d. configuration. "
                             "Duration %f; loss %f; status %s; additional run "
                             "info: %s ", num_run, duration, result,
                             str(status), additional_run_info)
            num_run += 1
            if smac.incuembent is None:
                smac.incuembent = next_config
            elif result < run_history.get_cost(smac.incuembent):
                smac.incuembent = next_config

            if self.scenario.shared_model:
                pSMAC.write(run_history=run_history,
                            output_directory=self.scenario.output_dir,
                            num_run=self.seed)

            if self.watcher.wall_elapsed(
                    'SMBO') > self.total_walltime_limit:
                finished = True

            if finished:
                break

        # == after metalearning run SMAC loop
        smac.runhistory = run_history
        smac_iter = 0
        while not finished:
            if self.scenario.shared_model:
                pSMAC.read(run_history=run_history,
                           output_directory=self.scenario.output_dir,
                           configuration_space=self.config_space,
                           logger=self.logger)

            next_configs = []
            time_for_choose_next = -1
            try:
                X_cfg, Y_cfg = rh2EPM.transform(run_history)

                if not run_history.empty():
                    # Update costs by normalization
                    dataset_minimum = np.min(Y_cfg[:, 0])
                    Y_cfg[:, 0] = 1 - ((1. - Y_cfg[:, 0]) /
                                       (1. - dataset_minimum))
                    Y_cfg[:, 0][Y_cfg[:, 0] > 2] = 2

                if len(X_meta) > 0 and len(X_cfg) > 0:
                    pass
                    #X_cfg = np.concatenate((X_meta, X_cfg))
                    #Y_cfg = np.concatenate((Y_meta, Y_cfg))
                elif len(X_meta) > 0:
                    X_cfg = X_meta.copy()
                    Y_cfg = Y_meta.copy()
                elif len(X_cfg) > 0:
                    X_cfg = X_cfg.copy()
                    Y_cfg = Y_cfg.copy()
                else:
                    raise ValueError('No training data for SMAC random forest!')

                self.logger.info('Using %d training points for SMAC.' %
                                 X_cfg.shape[0])
                choose_next_start_time = time.time()
                next_configs_tmp = smac.choose_next(X_cfg, Y_cfg,
                                                    num_interleaved_random=110,
                                                    num_configurations_by_local_search=10,
                                                    num_configurations_by_random_search_sorted=100)
                time_for_choose_next = time.time() - choose_next_start_time
                self.logger.info('Used %g seconds to find next '
                                 'configurations' % (time_for_choose_next))
                next_configs.extend(next_configs_tmp)
            # TODO put Exception here!
            except Exception as e:
                self.logger.error(e)
                self.logger.error("Error in getting next configurations "
                                  "with SMAC. Using random configuration!")
                next_config = self.config_space.sample_configuration()
                next_configs.append(next_config)

            models_fitted_this_iteration = 0
            start_time_this_iteration = time.time()
            for next_config in next_configs:
                x_runtime = impute_inactive_values(next_config)
                x_runtime = impute_inactive_values(x_runtime).get_array()
                # predicted_runtime = runtime_rf.predict_marginalized_over_instances(
                #     x_runtime.reshape((1, -1)))
                # predicted_runtime = np.exp(predicted_runtime[0][0][0]) - 1

                self.logger.info("Starting to evaluate %d. configuration (from "
                                 "SMAC) with time limit %ds.", num_run,
                                 self.func_eval_time_limit)
                self.logger.info(next_config)
                self.reset_data_manager()
                info = eval_with_limits(datamanager=self.datamanager,
                                        backend=self.backend,
                                        config=next_config,
                                        seed=seed, num_run=num_run,
                                        resampling_strategy=self.resampling_strategy,
                                        resampling_strategy_args=self.resampling_strategy_args,
                                        memory_limit=self.memory_limit,
                                        func_eval_time_limit=self.func_eval_time_limit,
                                        logger=self.logger)
                (duration, result, _, additional_run_info, status) = info
                run_history.add(config=next_config, cost=result,
                                time=duration, status=status,
                                instance_id=instance_id, seed=seed,
                                additional_info=additional_run_info)
                run_history.update_cost(next_config, result)

                #self.logger.info('Predicted runtime %g, true runtime %g',
                #                 predicted_runtime, duration)

                # TODO add unittest to make sure everything works fine and
                # this does not get outdated!
                if smac.incuembent is None:
                    smac.incuembent = next_config
                elif result < run_history.get_cost(smac.incuembent):
                    smac.incuembent = next_config

                self.logger.info("Finished evaluating %d. configuration. "
                                 "Duration: %f; loss: %f; status %s; additional "
                                 "run info: %s ", num_run, duration, result,
                                 str(status), additional_run_info)
                smac_iter += 1
                num_run += 1

                models_fitted_this_iteration += 1
                time_used_this_iteration = time.time() - start_time_this_iteration

                if max_iters is not None:
                    finished = (smac_iter >= max_iters)

                if self.watcher.wall_elapsed(
                        'SMBO') > self.total_walltime_limit:
                    finished = True

                if models_fitted_this_iteration >= 2 and \
                        time_for_choose_next > 0 and \
                        time_used_this_iteration > time_for_choose_next:
                    break
                elif time_for_choose_next <= 0 and \
                        models_fitted_this_iteration >= 1:
                    break
                elif models_fitted_this_iteration >= 50:
                    break

                if finished:
                    break

            if self.scenario.shared_model:
                pSMAC.write(run_history=run_history,
                            output_directory=self.scenario.output_dir,
                            num_run=self.seed)

        self.runhistory = run_history

Example 50

Project: dirigible-spreadsheet Source File: test_2814_PublicWorksheets.py
    def test_public_worksheets_visible_readonly_and_copiable_for_others(self):
        # * Harold logs in and creates a new sheet
        sheet_id = self.login_and_create_new_sheet()

        # * He gives the sheet a catchy name
        self.set_sheet_name('spaceshuttle')

        # * He enters some formulae n stuff
        self.enter_cell_text(2, 3, '23')
        self.enter_cell_text(2, 4, '=my_add_function(B3)')
        self.prepend_usercode('my_add_function = lambda x : x + 2')
        self.wait_for_cell_value(2, 4, '25')

        # * He notes that the tooltip for the security icon indicates that the
        # sheet is private
        self.waitForButtonToIndicateSheetIsPublic(False)

        # * He clicks on the security icon
        self.selenium.click('id=id_security_button')

        # He sees a tickbox, currently unticked, saying make worksheet public
        self.wait_for_element_visibility(
                'id=id_security_form', True)
        self.wait_for_element_visibility(
                'id=id_security_form_public_sheet_checkbox', True)

        self.assertEquals(
            self.selenium.get_value('id=id_security_form_public_sheet_checkbox'),
            'off'
        )
        # He ticks it and dismisses the dialog
        self.selenium.click('id=id_security_form_public_sheet_checkbox')
        self.selenium.click('id=id_security_form_ok_button')

        # * He notes that the tooltip for the security icon indicates that the
        # sheet is public
        self.waitForButtonToIndicateSheetIsPublic(True)

        # He notes down the URL and emails it to his colleague Harriet
        harolds_url = self.browser.current_url

        # He logs out
        self.logout()

        # * Later on, Harriet logs into teh Dirigible and heads on over to
        #   Harold's spreadsheet
        self.login(self.get_my_usernames()[1])
        self.go_to_url(harolds_url)

        # She sees the values n stuff
        self.wait_for_grid_to_appear()
        self.wait_for_cell_value(2, 4, '25')

        # * She notices that all toolbar icons are missing,
        # apart from download-as-csv
        map(
            lambda e: self.wait_for_element_presence(e, False),
            [
                'id=id_import_button',
                'id=id_cut_button',
                'id=id_copy_button',
                'id=id_paste_button',
                'id=id_security_button',
            ]
        )
        self.wait_for_element_visibility('id=id_export_button', True)

        # * She tries to edit some formulae, but can't
        self.selenium.double_click(
                self.get_cell_locator(1, 1)
        )
        self.selenium.focus(
                self.get_cell_locator(1, 1)
        )
        time.sleep(1)
        self.wait_for_element_presence(
                self.get_active_cell_editor_locator(),
                False
        )

        # * she tries to edit the cell again, using the formula bar, but cannot
        self.assertEquals(
            self.selenium.get_attribute(self.get_formula_bar_locator() + '@readonly'),
            'true'
        )

        # * She tries to edit some usercode, but can't
        original_code = self.get_usercode()
        self.selenium.get_eval('window.editor.focus()')
        self.human_key_press(key_codes.LETTER_A)
        time.sleep(1)
        self.wait_for_usercode_editor_content(original_code)

        # * She tries to edit the sheet name, but can't

        # * mouses over the sheet name and notes that the appearance
        #   does not change to indicate that it's editable
        self.selenium.mouse_over('id=id_sheet_name')
        time.sleep(1)
        self.wait_for(
            lambda: self.get_css_property('#id_sheet_name', 'background-color') == 'transparent',
            lambda: 'ensure sheet name background stays normal')

        # * He clicks on the sheet name, the sheetname edit textarea does
        #   not appear,
        self.selenium.click('id=id_sheet_name')
        time.sleep(1)
        self.wait_for(
            lambda: not self.is_element_present('id=edit-id_sheet_name'),
            lambda: 'ensure editable sheetname does not appear')

        def download_as_csv():
            self.selenium.click('id=id_export_button')
            self.wait_for_element_visibility('id=id_export_dialog', True)
            download_url = self.selenium.get_attribute('id=id_export_csv_excel_version@href')
            download_url = urljoin(self.browser.current_url, download_url)

            stream = self.get_url_with_session_cookie(download_url)
            self.assertEquals(stream.info().gettype(), "text/csv")
            self.assertEquals(
                    stream.info()['Content-Disposition'],
                    'attachment; filename=spaceshuttle.csv'
            )

            expected_file_name = path.join(
                    path.dirname(__file__),
                    "test_data", "public_sheet_csv_file.csv"
            )
            with open(expected_file_name) as expected_file:
                self.assertEquals(
                    stream.read().replace("\r\n", "\n"),
                    expected_file.read().replace("\r\n", "\n")
                )

        # * She confirms that she can download a csv of the sheet
        download_as_csv()

        # * She uses some l33t haxx0ring skillz to try and send a
        #   setcellformula Ajax call directly
        # It doesn't work.
        with self.assertRaises(HTTPError):
            response = self.get_url_with_session_cookie(
                    urljoin(harolds_url, '/set_cell_formula/'),
                    data={'column':3, 'row': 4, 'formula': '=jeffk'}
            )

        # * "Aha!" she says, as she notices a link allowing her to copy the sheet,
        self.wait_for_element_visibility('id_copy_sheet_link', True)
        # which she then clicks
        self.selenium.click('id=id_copy_sheet_link')

        # She is taken to a sheet of her own
        self.selenium.wait_for_page_to_load(PAGE_LOAD_TIMEOUT)
        self.wait_for_grid_to_appear()

        # It looks a lot like Harold's but has a different url
        harriets_url = self.browser.current_url
        self.assertFalse(harriets_url == harolds_url)
        self.wait_for_cell_value(2, 4, '25')

        # And she is able to change cell formulae
        self.enter_cell_text(2, 3, '123')
        self.wait_for_cell_value(2, 4, '125')

        # And she is able to change usercode
        self.append_usercode('worksheet[2, 4].value += 100')
        self.wait_for_cell_value(2, 4, '225')

        # And she is well pleased. So much so that she emails two
        # friends about these two sheets (and they tell two
        # friends, and they tell two friends, and so on, and so
        # on.  $$$$)
        self.logout()

        # * Helga is a Dirigible user, but she isn't logged in.
        #   She goes to Harold's page, and sees that it is good.
        self.go_to_url(harolds_url)
        self.wait_for_grid_to_appear()
        self.wait_for_cell_value(2, 4, '25')

        # She clicks on the big copy button, and is taken to the
        # login form
        self.selenium.click('id=id_copy_sheet_link')
        self.selenium.wait_for_page_to_load(PAGE_LOAD_TIMEOUT)
        self.wait_for_element_visibility('id_login_form_wrap', True)

        # She logs in, and is taken straight to her new copy of
        # Harold's sheet
        self.login(
                self.get_my_usernames()[2],
                already_on_login_page=True
        )
        self.wait_for_grid_to_appear()

        helgas_url = self.browser.current_url
        self.assertFalse(helgas_url == harolds_url)
        self.assertFalse(helgas_url == harriets_url)
        self.wait_for_cell_value(2, 4, '25')

        # Helga makes some edits, which she considers superior to
        # Harriet's
        self.enter_cell_text(2, 3, '1000')
        self.append_usercode('worksheet[2, 4].value += 1000')
        self.wait_for_cell_value(2, 4, '2002')

        # Helga now decides to go and see Harriet's sheet, to
        # laugh at the inferiority of Harriet's fork
        # Her access is denied.
        self.assert_HTTP_error(harriets_url, 403)

        # * Harriet's other friend, Hugh, is not a Dirigible user.... yet.
        # He goes to Harold's sheet and sees that it is good
        self.logout()
        self.go_to_url(harolds_url)
        self.wait_for_grid_to_appear()
        self.wait_for_cell_value(2, 4, '25')

        # So good that he clicks the copy button too, despite never
        # having heard of this Dirigible thingy
        self.selenium.click('id=id_copy_sheet_link')
        self.selenium.wait_for_page_to_load(PAGE_LOAD_TIMEOUT)

        # He is taken to the login form,
        self.wait_for_element_visibility('id_login_form_wrap', True)

        # on which he spots a nice friendly link inviting him to register.
        # It says 'free' and everyfink.
        self.wait_for_element_to_appear('id=id_login_signup_link')
        self.wait_for_element_to_appear('id=id_login_signup_blurb')
        self.assertTrue("free" in self.get_text('id=id_login_signup_blurb'))

        # Hugh goes through the whole registration rigmarole,
        self.selenium.click('id=id_login_signup_link')
        self.selenium.wait_for_page_to_load(PAGE_LOAD_TIMEOUT)
        username = self.get_my_username() + "_x"
        self.email_address = 'harold.testuser-%[email protected]' % (username,)
        password = "p4ssw0rd"
        self.selenium.type('id=id_username', username)
        self.selenium.type('id=id_email', self.email_address)
        self.selenium.type('id=id_password1', password)
        self.selenium.type('id=id_password2', password)
        self.click_link('id_signup_button')

        email_from, email_to, subject, message = self.pop_email_for_client(self.email_address)
        self.assertEquals(subject, 'Dirigible Beta Sign-up')
        confirm_url_re = re.compile(
            r'<(http://projectdirigible\.com/signup/activate/[^>]+)>'
        )
        match = confirm_url_re.search(message)
        self.assertTrue(match)
        confirmation_url = match.group(1).replace('projectdirigible.com', SERVER_IP)

        # * Hugh then logs in
        self.go_to_url(confirmation_url)
        self.login(username, password, already_on_login_page=True)

        # and has his socks knocked off by the presence of the copy of Harold's
        # sheet in his dashboard
        self.selenium.click('link=spaceshuttle')

        # and it has the copied content
        self.selenium.wait_for_page_to_load(PAGE_LOAD_TIMEOUT)
        self.wait_for_grid_to_appear()
        self.wait_for_cell_value(2, 4, '25')

        # Harold logs in and sees that his original sheet is unharmed by all of
        # the other users editing theirs
        self.login(self.get_my_usernames()[0])
        self.go_to_url(harolds_url)
        self.wait_for_grid_to_appear()
        self.wait_for_cell_value(2, 4, '25')
See More Examples - Go to Next Page
Page 1 Selected Page 2 Page 3