sys.modules.keys

Here are the examples of the python api sys.modules.keys taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

169 Examples 7

Example 101

View license
def mpl_style_cb(key):
    warnings.warn(pc_mpl_style_deprecation_warning, FutureWarning,
                  stacklevel=5)

    import sys
    from pandas.tools.plotting import mpl_stylesheet
    global style_backup

    val = cf.get_option(key)

    if 'matplotlib' not in sys.modules.keys():
        if not val:  # starting up, we get reset to None
            return val
        raise Exception("matplotlib has not been imported. aborting")

    import matplotlib.pyplot as plt

    if val == 'default':
        style_backup = dict([(k, plt.rcParams[k]) for k in mpl_stylesheet])
        plt.rcParams.update(mpl_stylesheet)
    elif not val:
        if style_backup:
            plt.rcParams.update(style_backup)

    return val

Example 102

Project: hifive
Source File: create_hic_heatmap.py
View license
def run(args):
    if 'mpi4py' in sys.modules.keys():
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank()
        num_procs = comm.Get_size()
    else:
        comm = None
        rank = 0
        num_procs = 1
    if not args.image is None and args.pdf and "pyx" not in sys.modules.keys():
        if rank == 0:
            print >> sys.stderr, ("-p/--pdf requires the package 'pyx'"),
        sys.exit(1)
    if args.chroms is None:
        chroms = []
    else:
        chroms = args.chroms.split(',')
        if len(chroms) == 1 and chroms[0] == '':
            chroms = []
    hic = HiC(args.project, 'r', silent=args.silent)
    hic.write_heatmap(args.output, binsize=args.binsize, includetrans=args.trans,
                      datatype=args.datatype, chroms=chroms, dynamically_binned=args.dynamic,
                      expansion_binsize=args.expbinsize, minobservations=args.minobs,
                      searchdistance=args.search, removefailed=args.remove, format=args.format)
    if rank > 0:
        sys.exit(0)
    if not args.image is None:
        if args.format == 'txt':
            if rank == 0:
                print >> sys.stderr, ("Plotting is only available for non-txt formats.\n"),
            return None
        kwargs = {}
        for arg in args.keywords:
            temp = arg.split("=")
            if temp[1] in ["True", "TRUE", "true"]:
                temp[1] = True
            elif temp[1] in ["False", "FALSE", "false"]:
                temp[1] = False
            elif temp[1][0] == "(":
                temp[1] = temp[1].strip('()').split(',')
                for i in range(len(temp[1])):
                    temp[1][i] = int(temp[1][i])
                temp[1] = tuple(temp[1])
            elif temp[1][0] == "[":
                temp[1] = temp[1].strip('[]').split(',')
                for i in range(len(temp[1])):
                    temp[1][i] = int(temp[1][i])
            else:
                try:
                    temp[1] = int(temp[1])
                except:
                    try:
                        temp[1] = float(temp[1])
                    except:
                        # strip off extra characters introduced by galaxy into color format
                        temp[1] = temp[1].replace('__pd__','')
            kwargs[temp[0]] = temp[1]
        if 'symmetricscaling' not in kwargs:
            if args.datatype == 'enrichment':
                kwargs['symmetricscaling'] = True
            else:
                kwargs['symmetricscaling'] = False
        img, minscore, maxscore = plot_hic_heatmap(args.output, returnscale=True, silent=args.silent,
                                                   format=args.format, **kwargs)
        if not args.pdf:
            img_format = args.image.split('.')[-1].upper()
            if img_format not in ['PNG', 'TIF', 'JPG', 'JPEG']:
                img_format = 'PNG'
            img.save(args.image, img_format)
        else:
            unit.set(defaultunit="cm")
            text.set(mode="latex")
            text.preamble(r"\usepackage{times}")
            text.preamble(r"\usepackage{sansmath}")
            text.preamble(r"\sansmath")
            text.preamble(r"\renewcommand*\familydefault{\sfdefault}")
            hm = h5py.File(args.output, 'r')
            chroms = hm['chromosomes'][...]
            sizes = [0]
            minsize = 999999999
            for chrom in chroms:
                sizes.append(hm['%s.positions' % chrom].shape[0])
                minsize = min(minsize, sizes[-1])
            if len(sizes) > 2:
                sizes[1] += 0.5
                sizes[-1] += 0.5
                if len(sizes) > 3:
                    for i in range(2, len(sizes) - 1):
                        sizes[i] += 1.0
            for i in range(1, len(sizes)):
                sizes[i] += sizes[i - 1]
            height = width = max(5.0, sizes[-1] * 0.5 / minsize)
            for i in range(len(sizes)):
                sizes[i] = sizes[i] / sizes[-1] * height
            c = canvas.canvas()
            c.insert(bitmap.bitmap(0, 0, img, width=width))
            if args.legend:
                if 'min_color' in kwargs:
                    min_color = kwargs['min_color']
                else:
                    min_color = "0000ff"
                if 'mid_color' in kwargs:
                    mid_color = kwargs['mid_color']
                else:
                    mid_color = "ffffff"
                if 'max_color' in kwargs:
                    max_color = kwargs['max_color']
                else:
                    max_color = "ff0000"
                if 'logged' in kwargs:
                    logged = kwargs['logged']
                else:
                    logged = True
                c.insert(plot_key(min_score=minscore, max_score=maxscore, height=(height * 0.05),
                                  width=width, orientation='top', num_ticks=5, min_color=min_color,
                                  mid_color=mid_color, max_color=max_color, log_display=False),
                                  [trafo.translate(0, height * 1.05)])
                if logged:
                    label = "Log2 "
                else:
                    label = ""
                if args.datatype == 'enrichment':
                    c.text(width * 0.5, height * 1.1 + 0.75, "%sEnrichment" % label,
                           [text.halign.center, text.valign.bottom, text.size(-2)])
                elif args.datatype == 'raw':
                    c.text(width * 0.5, height * 1.1 + 0.75, "%sCounts" % label,
                           [text.halign.center, text.valign.bottom, text.size(-2)])
                else:
                    c.text(width * 0.5, height * 1.1 + 0.75, "%sNormalized Counts" % label,
                           [text.halign.center, text.valign.bottom, text.size(-2)])
            if args.names:
                for i, chrom in enumerate(chroms):
                    c.text(width + 0.25, height - (sizes[i] + sizes[i + 1]) / 2, 'chr%s' % chrom,
                           [text.halign.left, text.valign.middle, text.size(-2)])
            c.writePDFfile(args.image)
            if len(args.image.split('.')) <= 1 or args.image.split('.')[-1] != 'pdf':
                subprocess.call('mv %s.pdf %s' % (args.image, args.image), shell=True)

Example 103

Project: hifive
Source File: create_hic_project.py
View license
def run(args):
    if 'mpi4py' in sys.modules.keys():
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank()
        num_procs = comm.Get_size()
    else:
        comm = None
        rank = 0
        num_procs = 1
    if rank == 0:
        hic = HiC(args.output, 'w', silent=args.silent)
        hic.load_data(args.data)
        hic.filter_fends(mininteractions=args.minint, mindistance=args.mindist, maxdistance=args.maxdist)
        hic.save()
        for i in range(1, num_procs):
            comm.send(1, dest=i, tag=11)
    else:
        comm.recv(source=0, tag=11)
        hic = HiC(args.output, 'r', silent=True)
    hic.find_distance_parameters(minsize=args.minbin, numbins=args.numbins)
    if rank == 0:
        hic.save()
    return None

Example 104

Project: hifive
Source File: get_fivec_interval.py
View license
def run(args):
    if not args.image is None and args.pdf and "pyx" not in sys.modules.keys():
        print sys.stderr, ("-p/--pdf requires the package 'pyx'"),
        return 1
    fivec = FiveC(args.project, 'r', silent=args.silent)
    if args.binsize == 0:
        arraytype = 'compact'
    else:
        arraytype = 'upper'
    kwargs = {}
    for arg in args.keywords:
        temp = arg.split("=")
        if temp[1] in ["True", "TRUE", "true"]:
            temp[1] = True
        elif temp[1] in ["False", "FALSE", "false"]:
            temp[1] = False
        elif temp[1][0] == "(":
            temp[1] = temp[1].strip('()').split(',')
            for i in range(len(temp[1])):
                temp[1][i] = int(temp[1][i])
            temp[1] = tuple(temp[1])
        elif temp[1][0] == "[":
            temp[1] = temp[1].strip('[]').split(',')
            for i in range(len(temp[1])):
                temp[1][i] = int(temp[1][i])
        else:
            try:
                temp[1] = int(temp[1])
            except:
                # strip off extra characters introduced by galaxy into color format
                temp[1] = temp[1].replace('__pd__','')
    chrom = fivec.frags['regions']['chromosome'][args.region]
    if args.region2 is None:
        temp = fivec.cis_heatmap(region=args.region, binsize=args.binsize, start=args.start,
                                 stop=args.stop, datatype=args.datatype, arraytype=arraytype,
                                 returnmapping=True, skipfiltered=True, dynamically_binned=args.dynamic,
                                 expansion_binsize=args.expbinsize, minobservations=args.minobs,
                                 searchdistance=args.search, removefailed=args.remove)
    else:
        chrom2 = fivec.frags['regions']['chromosome'][args.region2]
        temp = fivec.trans_heatmap(region1=args.region, region2=args.region2, binsize=args.binsize, start1=args.start,
                                 stop1=args.stop, start2=args.start2, stop2=args.stop2, datatype=args.datatype,
                                 arraytype='full', returnmapping=True, skipfiltered=True,
                                 dynamically_binned=args.dynamic, expansion_binsize=args.expbinsize,
                                 minobservations=args.minobs, searchdistance=args.search, removefailed=args.remove)
    output = open(args.output, 'w')
    if args.region2 is None:
        if args.binsize == 0:
            data = temp[0]
            xmapping = temp[1][:, :2]
            ymapping = temp[2][:, :2]
            all_data = []
            for i in range(xmapping.shape[0]):
                for j in range(ymapping.shape[0]):
                    if data[i, j, 0] <= 0.0:
                        continue
                    if xmapping[i, 0] < ymapping[j, 0]:
                        all_data.append((xmapping[i, 0], xmapping[i, 1], ymapping[j, 0], ymapping[j, 1],
                                         numpy.log2(data[i, j, 0] / data[i, j, 1])))
                    else:
                        all_data.append((ymapping[j, 0], ymapping[j, 1], xmapping[i, 0], xmapping[i, 1],
                                         numpy.log2(data[i, j, 0] / data[i, j, 1])))
            all_data = numpy.array(all_data, dtype=numpy.dtype([('start1', numpy.int32), ('stop1', numpy.int32),
                                                                ('start2', numpy.int32), ('stop2', numpy.int32),
                                                                ('value', numpy.float32)]))
            order = numpy.lexsort((all_data['start2'], all_data['start1']))
            for i in order:
                print >> output, "chr%s\t%i\t%i\tchr%s\t%i\t%i\t%f" % (chrom, all_data['start1'][i],
                    all_data['stop1'][i], chrom, all_data['start2'][i], all_data['stop2'][i], all_data['value'][i])
        else:
            data = temp[0]
            mapping = temp[1][:, :2]
            pos = 0
            for i in range(mapping.shape[0] - 1):
                for j in range(i + 1, mapping.shape[0]):
                    if data[pos, 0] > 0.0 and data[pos, 1] > 0.0:
                        print >> output, "chr%s\t%i\t%i\tchr%s\t%i\t%i\t%f" % (chrom, mapping[i, 0], mapping[i, 1],
                            chrom, mapping[j, 0], mapping[j, 1], numpy.log2(data[pos, 0] / data[pos, 1]))
                    pos += 1
    else:
        data, mapping1, mapping2 = temp
        for i in range(mapping1.shape[0]):
            for j in range(mapping2.shape[0]):
                if data[i, j, 0] <= 0.0:
                    continue
                print >> output, "chr%s\t%i\t%i\tchr%s\t%i\t%i\t%f" % (chrom, mapping1[i, 0], mapping1[i, 1], chrom2,
                                                                       mapping2[j, 0], mapping2[j, 1],
                                                                       numpy.log2(data[i, j, 0] / data[i, j, 1]))
    output.close()
    if not args.image is None:
        if args.datatype == 'enrichment':
            symmetricscaling = True
        else:
            symmetricscaling = False
        if 'symmetricscaling' in kwargs:
            symmetricscaling = kwargs['symmetricscaling']
        if not args.region2 is None:
            img, minscore, maxscore = plot_full_array(data, returnscale=True, symmetricscaling=symmetricscaling,
                                                      silent=args.silent, **kwargs)
            offset = 0.0
            width = 5.0
            height = width / mapping1.shape[0] * mapping2.shape[0]
        elif arraytype == 'compact':
            img, minscore, maxscore = plot_full_array(data, returnscale=True, symmetricscaling=symmetricscaling,
                                                      silent=args.silent, **kwargs)
            offset = 0.0
            width = 5.0
            height = width / xmapping.shape[0] * ymapping.shape[0]
        else:
            if args.rotate:
                img, minscore, maxscore = plot_diagonal_from_upper_array(data, returnscale=True,
                                          symmetricscaling=symmetricscaling, silent=args.silent, **kwargs)
                offset = 2.5 / (mapping.shape[0] * 2 - 2)
                height = 2.5
                width = 5.0
            else:
                img, minscore, maxscore = plot_upper_array(data, returnscale=True, symmetricscaling=symmetricscaling,
                                                           silent=args.silent, **kwargs)
                offset = 0.0
                height = width = 5.0
        if args.pdf:
            c = canvas.canvas()
            c1 = canvas.canvas([canvas.clip(path.rect(0, 0, width, height))])
            c1.insert(bitmap.bitmap(-offset, -offset, img, width=width))
            c.insert(c1)
            if args.region2 is None:
                if args.ticks and args.binsize > 0:
                    c.stroke(path.line(0, 0, width, 0))
                    xmin = (mapping[0, 0] + mapping[0, 1]) / 2
                    xmax = (mapping[-1, 0] + mapping[-1, 1]) / 2
                    order = int(floor(log10(xmax - xmin))) - 1
                    step = int(floor((xmax - xmin) / (10.0 ** order * width))) * 10 ** order
                    values = numpy.arange(((xmin - 1) / step + 1) * step, (xmax / step) * step + 1, step)
                    ticks = (values - float(mapping[0, 0] + mapping[0, 1]) / 2) / (mapping[-1, 0] -
                                                                                   mapping[0, 0]) * width
                    for i in range(values.shape[0]):
                        c.stroke(path.line(ticks[i], 0, ticks[i], -0.25), [style.linewidth.Thin])
                        c.text(ticks[i], -0.3, "%0.2e" % values[i],
                               [text.valign.middle, text.halign.left, text.size(-2), trafo.rotate(-90)])
                    if not args.rotate:
                        c.stroke(path.line(width, 0, width, height))
                        for i in range(values.shape[0]):
                            c.stroke(path.line(width, height - ticks[i], width + 0.25, height - ticks[i]),
                                     [style.linewidth.Thin])
                            c.text(width + 0.3, height - ticks[i], "%0.2e" % values[i],
                                   [text.valign.middle, text.halign.left, text.size(-2)])
            elif args.ticks:
                c.stroke(path.line(0, 0, width, 0))
                xmin = (mapping1[0, 0] + mapping1[0, 1]) / 2
                xmax = (mapping1[-1, 0] + mapping1[-1, 1]) / 2
                order = int(floor(log10(xmax - xmin))) - 1
                step = int(floor((xmax - xmin) / (10.0 ** order * width))) * 10 ** order
                values = numpy.arange(((xmin - 1) / step + 1) * step, (xmax / step) * step + 1, step)
                ticks = (values - float(mapping1[0, 0] + mapping1[0, 1]) / 2) / (mapping1[-1, 0] -
                                                                               mapping1[0, 0]) * width
                for i in range(values.shape[0]):
                    c.stroke(path.line(ticks[i], 0, ticks[i], -0.25), [style.linewidth.Thin])
                    c.text(ticks[i], -0.3, "%0.2e" % values[i],
                           [text.valign.middle, text.halign.left, text.size(-2), trafo.rotate(-90)])

                c.stroke(path.line(width, 0, width, height))
                xmin = (mapping2[0, 0] + mapping2[0, 1]) / 2
                xmax = (mapping2[-1, 0] + mapping2[-1, 1]) / 2
                order = int(floor(log10(xmax - xmin))) - 1
                step = int(floor((xmax - xmin) / (10.0 ** order * width))) * 10 ** order
                values = numpy.arange(((xmin - 1) / step + 1) * step, (xmax / step) * step + 1, step)
                ticks = (values - float(mapping2[0, 0] + mapping2[0, 1]) / 2) / (mapping2[-1, 0] -
                                                                               mapping2[0, 0]) * height
                for i in range(values.shape[0]):
                    c.stroke(path.line(width, height - ticks[i], width + 0.25, height - ticks[i]),
                             [style.linewidth.Thin])
                    c.text(width + 0.3, height - ticks[i], "%0.2e" % values[i],
                           [text.valign.middle, text.halign.left, text.size(-2)])
            if args.legend:
                if 'min_color' in kwargs:
                    min_color = kwargs['min_color']
                else:
                    min_color = "0000ff"
                if 'mid_color' in kwargs:
                    mid_color = kwargs['mid_color']
                else:
                    mid_color = "ffffff"
                if 'max_color' in kwargs:
                    max_color = kwargs['max_color']
                else:
                    max_color = "ff0000"
                if 'logged' in kwargs:
                    logged = kwargs['logged']
                else:
                    logged = True
                c.insert(plot_key(min_score=minscore, max_score=maxscore, height=0.25, width=width,
                                  orientation='top', num_ticks=5, min_color=min_color,
                                  mid_color=mid_color, max_color=max_color,
                                  log_display=False), [trafo.translate(0, height + 0.25)])
                if logged:
                    label = "Log2 "
                else:
                    label = ""
                if args.datatype == 'enrichment':
                    c.text(width * 0.5, height + 0.8, "%sEnrichment" % label, [text.halign.center, text.valign.bottom,
                                                                       text.size(-2)])
                elif args.datatype == 'raw':
                    c.text(width + 0.5, height + 0.8, "%sCounts" % label, [text.halign.center, text.valign.bottom,
                                                                   text.size(-2)])
                else:
                    c.text(width * 0.5, height + 0.8, "%sNormalized Counts" % label,
                           [text.halign.center, text.valign.bottom, text.size(-2)])

            c.writePDFfile(args.image)
            if len(args.image.split('.')) <= 1 or args.image.split('.')[-1] != 'pdf':
                subprocess.call('mv %s.pdf %s' % (args.image, args.image), shell=True)

        else:
            img_format = args.image.split('.')[-1].upper()
            if img_format not in ['PNG', 'TIF', 'JPG', 'JPEG']:
                img_format = 'PNG'
            img.save(args.image, img_format)

Example 105

Project: hifive
Source File: get_hic_interval.py
View license
def run(args):
    if not args.image is None and args.pdf and "pyx" not in sys.modules.keys():
        parser.error("-p/--pdf requires the package 'pyx'")
    hic = HiC(args.project, 'r', silent=args.silent)
    if 'binned' in hic.fends['/'].attrs and hic.fends['/'].attrs['binned'] is not None:
        binned = True
        chr_indices = 'bin_indices'
        fends = 'bins'
    else:
        binned = False
        chr_indices = 'chr_indices'
        fends = 'fends'
    if args.stop == 0 or args.stop is None:
        maxstop = hic.fends[fends]['stop'][hic.fends[chr_indices][hic.chr2int[args.chrom] + 1] - 1]
    else:
        maxstop = args.stop
    if args.stop is None:
        args.stop = maxstop
    if args.start is None:
        args.start = hic.fends[fends]['start'][hic.fends[chr_indices][hic.chr2int[args.chrom]]]
    if not args.chrom2 is None:
        if args.stop2 == 0 or args.stop2 is None:
            maxstop2 = hic.fends[fends]['stop'][hic.fends[chr_indices][hic.chr2int[args.chrom2] + 1] - 1]
        else:
            maxstop2 = args.stop2
        if args.stop2 is None:
            args.stop2 = maxstop2
        if args.start2 is None:
            args.start2 = hic.fends[fends]['start'][hic.fends[chr_indices][hic.chr2int[args.chrom2]]]
    else:
        if args.maxdist is None:
            args.maxdist = 0
        if args.maxdist == 0 or args.maxdist >= (maxstop - args.start) / 2:
            arraytype = 'upper'
        else:
            arraytype = 'compact'
    kwargs = {}
    for arg in args.keywords:
        temp = arg.split("=")
        if temp[1] in ["True", "TRUE", "true"]:
            temp[1] = True
        elif temp[1] in ["False", "FALSE", "false"]:
            temp[1] = False
        elif temp[1][0] == "(":
            temp[1] = temp[1].strip('()').split(',')
            for i in range(len(temp[1])):
                temp[1][i] = int(temp[1][i])
            temp[1] = tuple(temp[1])
        elif temp[1][0] == "[":
            temp[1] = temp[1].strip('[]').split(',')
            for i in range(len(temp[1])):
                temp[1][i] = int(temp[1][i])
        else:
            try:
                temp[1] = int(temp[1])
            except:
                # strip off extra characters introduced by galaxy into color format
                temp[1] = temp[1].replace('__pd__','')
        kwargs[temp[0]] = temp[1]
    if not args.chrom2 is None:
        data, mapping1, mapping2 = hic.trans_heatmap(chrom1=args.chrom, chrom2=args.chrom2, binsize=args.binsize,
                                                     start1=args.start, stop1=args.stop, start2=args.start2,
                                                     stop2=args.stop2, datatype=args.datatype,
                                                     maxdistance=args.maxdist, returnmapping=True, skipfiltered=True,
                                                     dynamically_binned=args.dynamic,
                                                     expansion_binsize=args.expbinsize, minobservations=args.minobs,
                                                     searchdistance=args.search, removefailed=args.remove)
    else:
        data, mapping = hic.cis_heatmap(chrom=args.chrom, binsize=args.binsize, start=args.start,
                                        stop=args.stop, datatype=args.datatype, arraytype=arraytype,
                                        maxdistance=args.maxdist, returnmapping=True, skipfiltered=True,
                                        dynamically_binned=args.dynamic, expansion_binsize=args.expbinsize,
                                        minobservations=args.minobs, searchdistance=args.search,
                                        removefailed=args.remove)
    output = open(args.output, 'w')
    if args.matrix:
        if args.chrom2 is None:
            diag = int(hic.binned is not None)
            if arraytype == 'upper':
                temp = numpy.zeros((mapping.shape[0], mapping.shape[0]), dtype=numpy.float64)
                indices = numpy.triu_indices(mapping.shape[0], 1 - diag)
                where = numpy.where(data[:, 1] > 0)[0]
                temp[indices[0][where], indices[1][where]] = data[where, 0] / data[where, 1]
                temp[indices[1][where], indices[0][where]] += data[where, 0] / data[where, 1]
            else:
                temp = numpy.zeros((mapping.shape[0], mapping.shape[0]), dtype=numpy.float64)
                for i in range(mapping.shape[0] - 1 + diag):
                    where = numpy.where(data[i, :, 1] > 0)[0]
                    temp[i, where + i + 1 - diag] = data[i, where, 0] / data[i, where, 1]
                indices = numpy.triu_indices(mapping.shape[0], 1 - diag)
                temp[indices[1], indices[0]] += temp[indices]
        else:
            temp = numpy.zeros((data.shape[0], data.shape[1]), dtype=numpy.float64)
            where = numpy.where(data[:, :, 1] > 0)
            temp[where] = data[where[0], where[1], 0] / data[where[0], where[1], 1]
        if args.datatype == 'raw':
            for i in range(temp.shape[0]):
                tempout = []
                for j in range(temp.shape[1]):
                    tempout.append("%i" % temp[i, j])
                print >> output, '\t'.join(tempout)
        else:
            for i in range(temp.shape[0]):
                tempout = []
                for j in range(temp.shape[1]):
                    tempout.append("%0.6f" % temp[i, j])
                print >> output, '\t'.join(tempout)
    else:
        if args.chrom2 is None:
            diag = binned
            if arraytype == 'upper':
                pos = 0
                for i in range(mapping.shape[0] - 1 + diag):
                    for j in range(i + 1 - diag, mapping.shape[0]):
                        if data[pos, 0] > 0.0 and data[pos, 1] > 0.0:
                            print >> output, "chr%s\t%i\t%i\tchr%s\t%i\t%i\t%f" % (args.chrom, mapping[i, 0],
                                                                           mapping[i, 1], args.chrom,
                                                                           mapping[j, 0], mapping[j, 1],
                                                                           numpy.log2(data[pos, 0] / data[pos, 1]))
                        pos += 1
            else:
                for i in range(mapping.shape[0] - 1 + diag):
                    for pos in range(min(mapping.shape[0] - i - 1 + diag, data.shape[1])):
                        j = i + pos + 1 - diag
                        if data[i, pos, 0] > 0.0 and data[i, pos, 1] > 0.0:
                            print >> output, "chr%s\t%i\t%i\tchr%s\t%i\t%i\t%f" % (args.chrom, mapping[i, 0],
                                                                            mapping[i, 1], args.chrom,
                                                                            mapping[j, 0], mapping[j, 1],
                                                                            numpy.log2(data[i, pos, 0] /
                                                                            data[i, pos, 1]))
        else:
            for i in range(mapping1.shape[0]):
                for j in range(mapping2.shape[0]):
                    if data[i, j, 0] > 0.0 and data[i, j, 1] > 0.0:
                        print >> output, "chr%s\t%i\t%i\tchr%s\t%i\t%i\t%f" % (args.chrom,
                                                                        mapping1[i, 0], mapping1[i, 1],
                                                                        args.chrom2, mapping2[j, 0], mapping2[j, 1],
                                                                        numpy.log2(data[i, j, 0] / data[i, j, 1]))
    output.close()
    if not args.image is None:
        width = max(5.0, (args.stop - args.start) / 1000000.)
        if args.datatype == 'enrichment':
            symmetricscaling = True
        else:
            symmetricscaling = False
        if 'symmetricscaling' in kwargs:
            symmetricscaling = kwargs['symmetricscaling']
        if not args.chrom2 is None:
            img, minscore, maxscore = plot_full_array(data, returnscale=True, symmetricscaling=symmetricscaling,
                                                      silent=args.silent, **kwargs)
            offset = 0.0
            height = (width / data.shape[0]) * data.shape[1]
        elif arraytype == 'compact':
            if args.rotate:
                img, minscore, maxscore = plot_diagonal_from_compact_array(data, returnscale=True,
                                          symmetricscaling=symmetricscaling, silent=args.silent,
                                          diagonal_included=diag, **kwargs)
                offset = width / 2. / (data.shape[0] * 2 - 1 + diag)
                height = width / (data.shape[0] * 2.0 - 2) * data.shape[1]
            else:
                img, minscore, maxscore = plot_compact_array(data, returnscale=True,
                                          symmetricscaling=symmetricscaling, silent=args.silent,
                                          diagonal_included=diag, **kwargs)
                offset = 0.0
                height = width
        else:
            if args.rotate:
                img, minscore, maxscore = plot_diagonal_from_upper_array(data, returnscale=True,
                                          symmetricscaling=symmetricscaling, silent=args.silent,
                                          diagonal_included=diag, **kwargs)
                offset = width / 2. / (mapping.shape[0] * 2 - 1 + diag)
                height = width / 2.
            else:
                img, minscore, maxscore = plot_upper_array(data, returnscale=True,
                                          symmetricscaling=symmetricscaling, silent=args.silent,
                                          diagonal_included=diag, **kwargs)
                offset = 0.0
                height = width
        if args.pdf:
            c = canvas.canvas()
            if args.chrom2 is None:
                c1 = canvas.canvas([canvas.clip(path.rect(0, 0, width, height))])
                c1.insert(bitmap.bitmap(-offset, -offset, img, width=width))
            else:
                c1 = canvas.canvas([canvas.clip(path.rect(0, 0, width, height))])
                c1.insert(bitmap.bitmap(-offset, -offset, img, width=width))
            c.insert(c1)
            if args.ticks and args.binsize > 0:
                if args.chrom2 is None:
                    c.stroke(path.line(0, 0, width, 0))
                    xmin = (mapping[0, 0] + mapping[0, 1]) / 2
                    xmax = (mapping[-1, 0] + mapping[-1, 1]) / 2
                    #order = int(floor(log10(xmax - xmin))) - 1
                    #step = int(floor((xmax - xmin) / (10.0 ** order))) * 10 ** order
                    
                    order = int(floor(log10((xmax - xmin) / (width * 2.0))))
                    step = int(floor((xmax - xmin) / (width * 2.0) / (10.0 ** order))) * 10 ** order
                    values = numpy.arange(((xmin - 1) / step + 1) * step, (xmax / step) * step + 1, step)
                    ticks = (values - float(mapping[0, 0] + mapping[0, 1]) / 2) / (mapping[-1, 0] -
                                                                                   mapping[0, 0]) * width
                    for i in range(values.shape[0]):
                        c.stroke(path.line(ticks[i], 0, ticks[i], -0.25), [style.linewidth.Thin])
                        c.text(ticks[i], -0.3, "%0.2e" % values[i],
                               [text.valign.middle, text.halign.left, text.size(-2), trafo.rotate(-90)])
                    if not args.rotate:
                        c.stroke(path.line(width, 0, width, height))
                        for i in range(values.shape[0]):
                            c.stroke(path.line(width, height - ticks[i], width + 0.25, height - ticks[i]), [style.linewidth.Thin])
                            c.text(width + 0.3, height - ticks[i], "%0.2e" % values[i], [text.valign.middle, text.halign.left,
                                                                              text.size(-2)])
                else:
                    c.stroke(path.line(0, 0, width, 0))
                    xmin = (mapping1[0, 0] + mapping1[0, 1]) / 2
                    xmax = (mapping1[-1, 0] + mapping1[-1, 1]) / 2
                    order = int(floor(log10((xmax - xmin) / (width * 2.0))))
                    step = int(floor((xmax - xmin) / (width * 2.0) / (10.0 ** order))) * 10 ** order
                    values = numpy.arange(((xmin - 1) / step + 1) * step, (xmax / step) * step + 1, step)
                    ticks = (values - float(mapping1[0, 0] + mapping1[0, 1]) / 2) / (mapping1[-1, 0] -
                                                                                     mapping1[0, 0]) * width
                    for i in range(values.shape[0]):
                        c.stroke(path.line(ticks[i], 0, ticks[i], -0.25), [style.linewidth.Thin])
                        c.text(ticks[i], -0.3, "%0.2e" % values[i],
                               [text.valign.middle, text.halign.left, text.size(-2), trafo.rotate(-90)])
                    c.stroke(path.line(0, 0, width, 0))
                    xmin = (mapping2[0, 0] + mapping2[0, 1]) / 2
                    xmax = (mapping2[-1, 0] + mapping2[-1, 1]) / 2
                    order = int(floor(log10((xmax - xmin) / (width * 2.0))))
                    step = int(floor((xmax - xmin) / (width * 2.0) / (10.0 ** order))) * 10 ** order
                    values = numpy.arange(((xmin - 1) / step + 1) * step, (xmax / step) * step + 1, step)
                    ticks = (values - float(mapping2[0, 0] + mapping2[0, 1]) / 2) / (mapping2[-1, 0] -
                                                                                     mapping2[0, 0]) * height
                    for i in range(values.shape[0]):
                        c.stroke(path.line(width, height - ticks[i], width + 0.25, height - ticks[i]), [style.linewidth.Thin])
                        c.text(width + 0.3, height - ticks[i], "%0.2e" % values[i],
                               [text.valign.middle, text.halign.left, text.size(-2)])
            if args.legend:
                if 'min_color' in kwargs:
                    min_color = kwargs['min_color']
                else:
                    min_color = "0000ff"
                if 'mid_color' in kwargs:
                    mid_color = kwargs['mid_color']
                else:
                    mid_color = "ffffff"
                if 'max_color' in kwargs:
                    max_color = kwargs['max_color']
                else:
                    max_color = "ff0000"
                if 'logged' in kwargs:
                    logged = kwargs['logged']
                else:
                    logged = True
                c.insert(plot_key(min_score=minscore, max_score=maxscore, height=0.25, width=min(5., width),
                                  orientation='top', num_ticks=5, min_color=min_color,
                                  mid_color=mid_color, max_color=max_color,
                                  log_display=False), [trafo.translate(width * 0.5 - min(2.5, width * 0.5), height + 0.25)])
                if logged:
                    label = "Log2 "
                else:
                    label = ""
                if args.datatype == 'enrichment':
                    c.text(width * 0.5, height + 0.8, "%sEnrichment" % label, [text.halign.center, text.valign.bottom,
                                                                       text.size(-2)])
                elif args.datatype == 'raw':
                    c.text(width * 0.5, height + 0.8, "%sCounts" % label, [text.halign.center, text.valign.bottom,
                                                                   text.size(-2)])
                else:
                    c.text(width * 0.5, height + 0.8, "%sNormalized Counts" % label,
                           [text.halign.center, text.valign.bottom, text.size(-2)])

            c.writePDFfile(args.image)
            if len(args.image.split('.')) <= 1 or args.image.split('.')[-1] != 'pdf':
                subprocess.call('mv %s.pdf %s' % (args.image, args.image), shell=True)

        else:
            img_format = args.image.split('.')[-1].upper()
            if img_format not in ['PNG', 'TIF', 'JPG', 'JPEG']:
                img_format = 'PNG'
            img.save(args.image, img_format)

Example 106

Project: hifive
Source File: normalize_hic_project.py
View license
def run(args):
    if 'mpi4py' in sys.modules.keys():
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank()
        num_procs = comm.Get_size()
    else:
        comm = None
        rank = 0
        num_procs = 1
    if args.chroms is None:
        chroms = []
    else:
        chroms = args.chroms.split(',')
        if len(chroms) == 1 and chroms[0] == '':
            chroms = []
    if args.algorithm.count('binning') > 0:
        model = args.model.split(',')
        modelbins = args.modelbins.split(',')
        parameters = args.parameters.split(',')
        for i in range(len(modelbins)):
            try:
                modelbins[i] = int(modelbins[i])
            except:
                if rank == 0:
                    print sys.stderr, ("Not all arguments in -n/--modelbins could be converted to integers.")
                return 1
        if len(model) != len(modelbins):
            if rank == 0:
                print sys.stderr, ("-v/--model, -n/--modelbins, and -u/--parameter-types must be equal lengths.")
            return 1
    hic = HiC(args.project, 'r', silent=args.silent)
    precorrect = False
    if args.algorithm in ['binning', 'binning-express', 'binning-probability']:
        hic.find_binning_fend_corrections(mindistance=args.mindist, maxdistance=args.maxdist,
                                          chroms=chroms, num_bins=modelbins, model=model, parameters=parameters,
                                          usereads=args.binreads, learning_threshold=args.threshold,
                                          max_iterations=args.biniter, pseudocounts=args.pseudo)
        precorrect = True
    if args.algorithm in ['probability', 'binning-probability']:
        hic.find_probability_fend_corrections(mindistance=args.mindist, maxdistance=args.maxdist,
                                              minchange=args.change, max_iterations=args.probiter,
                                              learningstep=args.step, chroms=chroms,
                                              precalculate=args.precalc, precorrect=precorrect,
                                              model=args.probmodel)
    elif args.algorithm in ['express', 'binning-express']:
        hic.find_express_fend_corrections(iterations=args.expiter, mindistance=args.mindist,
                                          maxdistance=args.maxdist, remove_distance=args.nodist,
                                          usereads=args.expreads, mininteractions=args.minint,
                                          chroms=chroms, minchange=args.change, precorrect=precorrect,
                                          binary=args.binary, kr=args.kr)
    if rank == 0:
        hic.save(args.output)

Example 107

Project: mirage
Source File: api_v2.py
View license
def list_available_modules(hostname):
    """
    Gets all available modules (for given host) and returns a list with them.
    :param hostname:
    :return:
    """
    modules_list = []
    module = Module(hostname)
    # getting keys
    keys = get_keys('{0}:modules:*'.format(module.host()))

    names = [x.rpartition(':')[-1] for x in keys]

    for name in names:
        loaded_sys_versions = [x for x in sys.modules.keys() if '{0}_v'.format(name) in x]
        lastest_code_version = module.latest_version(name)
        source_code = Module(hostname).get_source(name)
        obj = {
            'name': name,
            'latest_code_version': lastest_code_version,
            'loaded_sys_versions': loaded_sys_versions,
            'source_raw': source_code,
            'href': '/api/v2/modules/objects/%s' % name
        }
        modules_list.append(obj)

    return {
        'version': version,
        'data': modules_list
    }

Example 108

Project: hifive
Source File: fivec_data.py
View license
    def load_data_from_bam(self, fragfilename, filelist):
        """
        Read interaction counts from pairs of BAM files and place in h5dict.

        :param fragfilename: This specifies the file name of the :class:`Fragment` object to associate with the dataset.
        :type fragfilename: str.
        :param filelist: A list containing lists of paired read end files.
        :type filelist: list
        :returns: None

        :Attributes: * **fragfilename** (*str.*) - A string containing the relative path of the fragment file.
                     * **cis_data** (*ndarray*) - A numpy array of type int32 and shape N x 3 where N is the number of valid non-zero intra-regional fragment pairings observed in the data. The first column contains the fragment index (from the 'fragments' array in the Fragment object) of the upstream fragment, the second column contains the idnex of the downstream fragment, and the third column contains the number of reads observed for that fragment pair.
                     * **cis_indices** (*ndarray*) - A numpy array of type int64 and a length of the number of fragments + 1. Each position contains the first entry for the correspondingly-indexed fragment in the first column of 'cis_data'. For example, all of the downstream cis interactions for the fragment at index 5 in the Fragment object 'fragments' array are in cis_data[cis_indices[5]:cis_indices[6], :]. 
                     * **trans_data** (*ndarray*) - A numpy array of type int32 and shape N x 3 where N is the number of valid non-zero inter-regional fragment pairings observed in the data. The first column contains the fragment index (from the 'fragments' array in the Fragment object) of the upstream fragment (upstream also refers to the lower indexed chromosome in this context), the second column contains the idnex of the downstream fragment, and the third column contains the number of reads observed for that fragment pair.
                     * **trans_indices** (*ndarray*) - A numpy array of type int64 and a length of the number of fragments + 1. Each position contains the first entry for the correspondingly-indexed fragment in the first column of 'trans_data'. For example, all of the downstream trans interactions for the fragment at index 5 in the Fragment object 'fragments' array are in cis_data[cis_indices[5]:cis_indices[6], :].
                     * **frags** (*filestream*) - A filestream to the hdf5 Fragment file such that all saved Fragment attributes can be accessed through this class attribute.

        When data is loaded the 'history' attribute is updated to include the history of the Fragment file that becomes associated with it.
        """
        self.history += "FiveCData.load_data_from_counts(fragfilename='%s', filelist=%s) - " % (fragfilename, str(filelist))
        if 'pysam' not in sys.modules.keys():
            if not self.silent:
                print >> sys.stderr, ("The pysam module must be installed to use this function.")
            self.history += 'Error: pysam module missing\n'
            return None
        # determine if fragment file exists and if so, load it
        if not os.path.exists(fragfilename):
            if not self.silent:
                print >> sys.stderr, ("The fragment file %s was not found. No data was loaded.\n") % (fragfilename),
            self.history += "Error: '%s' not found\n" % fragfilename
            return None
        self.fragfilename = "%s/%s" % (os.path.relpath(os.path.dirname(os.path.abspath(fragfilename)),
                                       os.path.dirname(self.file)), os.path.basename(fragfilename))
        self.frags = h5py.File(fragfilename, 'r')
        strands = self.frags['fragments']['strand'][...]
        chr2int = {}
        for i, j in enumerate(self.frags['chromosomes'][:]):
            chr2int[j] = i
        # create fragment name dictionary
        names = {}
        for i in range(self.frags['fragments'].shape[0]):
            names[self.frags['fragments']['name'][i]] = i
        # load data from all files, skipping if either fragment not not in the fragment file.
        if isinstance(filelist[0], str):
            filelist = [[filelist[0], filelist[1]]]
        total_reads = 0
        data = {}
        for filepair in filelist:
            # determine which files have both mapped ends present
            present = True
            if not os.path.exists(filepair[0]):
                if not self.silent:
                    print >> sys.stderr, ("%s could not be located.") % (filepair[0]),
                self.history += "'%s' not found, " % filepair[0]
                present = False
            if not os.path.exists(filepair[1]):
                if not self.silent:
                    print >> sys.stderr, ("%s could not be located.") % (filepair[1]),
                self.history += "'%s' not found, " % filepair[1]
                present = False
            if not present:
                if not self.silent:
                    print >> sys.stderr, ("No data for one or both ends could be located. Skipping this run.\n")
            reads = 0
            unpaired = {}
            # load first half of paired ends
            if not self.silent:
                print >> sys.stderr, ("Loading data from %s...") % (filepair[0].split('/')[-1]),
            input = pysam.Samfile(filepair[0], 'rb')
            for read in input.fetch(until_eof=True):
                # Only consider reads with an alignment
                if read.is_unmapped:
                    continue
                # if mapping name not in fragment names, skip
                seq_name = input.getrname(read.tid)
                if seq_name not in names:
                    continue
                # skip multiply-aligned reads
                for tag in read.tags:
                    if tag[0] == 'XS':
                        break
                else:
                    unpaired[read.qname] = names[seq_name]
            input.close()
            if not self.silent:
                print >> sys.stderr, ("Done\n"),
            # load second half of paired ends
            if not self.silent:
                print >> sys.stderr, ("Loading data from %s...") % (filepair[1].split('/')[-1]),
            input = pysam.Samfile(filepair[1], 'rb')
            for read in input.fetch(until_eof=True):
                # Only consinder reads whose paired end was valid
                if read.qname not in unpaired:
                    continue
                # Only consider reads with an alignment
                if read.is_unmapped:
                    continue
                # if mapping name not in fragment names, skip
                seq_name = input.getrname(read.tid)
                if seq_name not in names:
                    continue
                # skip multiply-aligned reads
                for tag in read.tags:
                    if tag[0] == 'XS':
                        break
                else:
                    # if both ends map to the same orientation, skip
                    if strands[unpaired[read.qname]] != strands[names[seq_name]]:
                        pair = (min(unpaired[read.qname], names[seq_name]),
                                max(unpaired[read.qname], names[seq_name]))
                        if pair not in data:
                            data[pair] = 0
                        data[pair] += 1
                        reads += 1
                    del unpaired[read.qname]
            input.close()
            if not self.silent:
                print >> sys.stderr, ("Done\n"),
            if not self.silent:
                print >> sys.stderr, ("Read %i validly_mapped read paired.\n") % (reads),
            total_reads += reads           
        if len(data) == 0:
            if not self.silent:
                print >> sys.stderr, ("No valid data was loaded.\n"),
            self.history += "Error: no valid data loaded\n"
            return None
        if not self.silent:
            print >> sys.stderr, ("%i total validly-mapped read pairs loaded. %i unique pairs\n") %\
                             (total_reads,len(data)),
        self._parse_fragment_pairs(data)
        self.history += 'Success\n'
        return None

Example 109

Project: hifive
Source File: hic_domains.py
View license
    def __init__(self, hic, binsize, chroms=[], out_fname=None, silent=False):
        self.hic = hic
        self.binsize = binsize
        if 'mpi4py' in sys.modules.keys():
            self.comm = MPI.COMM_WORLD
            self.rank = self.comm.Get_rank()
            self.num_procs = self.comm.Get_size()
        else:
            self.comm = None
            self.rank = 0
            self.num_procs = 1
        if self.rank == 0:
            self.silent = silent
        else:
            self.silent = True
        if self.rank == 0:
            if out_fname is not None:
                storage = h5py.File(out_fname, 'a')
            else:
                storage = None
            needed = []
            if chroms == "" or (isinstance(chroms, list) and len(chroms) == 0):
                chroms = hic.fends['chromosomes'][...]
            elif isinstance(chroms, str):
                chroms = [chroms]
            self.chroms = chroms
            for chrom in chroms:
                if storage is None or ("%s.correlations" % chrom not in storage and
                                       "%s.enrichments" % chrom not in storage):
                    needed.append(chrom)
            if len(needed) > 0:
                node_ranges = numpy.round(numpy.linspace(0, len(needed), self.num_procs + 1)).astype(numpy.int32)
                for i in range(1, self.num_procs):
                    self.comm.send(needed[node_ranges[i]:node_ranges[i + 1]], dest=i, tag=11)
                node_needed = needed[:node_ranges[1]]
            else:
                node_needed = []
                for i in range(1, self.num_procs):
                    self.comm.send([], dest=i, tag=11)
        else:
            node_needed = self.comm.recv(source=0, tag=11)
        data = {}
        self.positions = {}
        if hic.binned is None:
            chr_indices = hic.fends['chr_indices'][...]
        else:
            chr_indices = hic.fends['bin_indices'][...]
        for chrom in node_needed:
            if self.rank == 0 and not self.silent:
                print >> sys.stderr, ("\r%s\rHeatmapping %s") % (' '*80, chrom),
            chrint = hic.chr2int[chrom]
            startfend = chr_indices[chrint]
            while hic.filter[startfend] == 0:
                startfend += 1
            if hic.binned is None:
                start = (hic.fends['fends']['mid'][startfend] / binsize) * binsize
            else:
                start = (hic.fends['bins']['mid'][startfend] / binsize) * binsize
            stopfend = chr_indices[chrint + 1]
            while hic.filter[stopfend - 1] == 0:
                stopfend -= 1
            if hic.binned is None:
                stop = ((hic.fends['fends']['mid'][stopfend - 1] - 1) / binsize + 1) * binsize
            else:
                stop = ((hic.fends['bins']['mid'][stopfend - 1] - 1) / binsize + 1) * binsize
            temp = hic.cis_heatmap(chrom, binsize=binsize / 2, start=start, stop=stop, datatype='enrichment',
                                   arraytype='upper', returnmapping=True, silent=True)
            temp1 = hic_binning.bin_cis_array(temp[0], temp[1], binsize, arraytype='upper', returnmapping=True,
                                              silent=True, diagonal_included=(hic.binned is not None))
            hic_binning.dynamically_bin_cis_array(temp[0], temp[1], temp1[0], temp1[1], minobservation=5, silent=True,
                                                  diagonal_included=(hic.binned is not None))
            indices = numpy.triu_indices(temp1[1].shape[0], int(hic.binned is None))
            hm = numpy.zeros((temp1[1].shape[0], temp1[1].shape[0], 2), dtype=numpy.float32)
            hm[indices[0], indices[1], :] = temp1[0]
            hm[indices[1], indices[0], :] = temp1[0]
            valid = numpy.where(numpy.sum(hm[:, :, 1], axis=0) > 0)[0]
            hm = hm[valid, :, :][:, valid, :]
            self.positions[chrom] = temp1[1][valid, :]
            data[chrom] = hm
        if self.rank == 0:
            for i in range(1, self.num_procs):
                data.update(self.comm.recv(source=i, tag=11))
                self.positions.update(self.comm.recv(source=i, tag=11))
            if storage is not None:
                for chrom in data:
                    storage.create_dataset(name="%s.counts" % chrom, data=data[chrom][:, :, 0])
                    storage.create_dataset(name="%s.expected" % chrom, data=data[chrom][:, :, 1])
                    where = numpy.where(data[chrom][:, :, 1] > 0)
                    data[chrom][where[0], where[1], 0] = numpy.log(data[chrom][where[0], where[1], 0] /
                                                                   data[chrom][where[0], where[1], 1])
                    data[chrom] = data[chrom][:, :, 0]
                    storage.create_dataset(name="%s.enrichments" % chrom, data=data[chrom])
                    storage.create_dataset(name="%s.positions" % chrom, data=self.positions[chrom])
        else:
            self.comm.send(data, dest=0, tag=11)
            del data
            self.comm.send(self.positions, dest=0, tag=11)
        if self.rank == 0:
            for chrom in chroms:
                if chrom not in data and "%s.correlations" % chrom not in storage:
                    data[chrom] = storage["%s.enrichments" % chrom][...]
                if chrom not in self.positions:
                    self.positions[chrom] = storage["%s.positions" % chrom][...]
            correlations = {}
            for chrom in data:
                if not self.silent:
                    print >> sys.stderr, ("\r%s\rCorrelating %s") % (' '*80, chrom),
                cdata = numpy.copy(data[chrom])
                for i in range(1, self.num_procs):
                    self.comm.send(1, dest=i, tag=11)
                    self.comm.send(cdata, dest=i, tag=11)
                corr = numpy.zeros(cdata.shape, dtype=numpy.float32)
                self.find_correlations(cdata, corr)
                if storage is not None:
                    storage.create_dataset(name="%s.correlations" % chrom, data=corr)
                correlations[chrom] = corr
            for i in range(1, self.num_procs):
                self.comm.send(0, dest=i, tag=11)
        else:
            task = self.comm.recv(source=0, tag=11)
            while task == 1:
                data = self.comm.recv(source=0, tag=11)
                self.find_correlations(data)
                task = self.comm.recv(source=0, tag=11)
        if self.rank == 0:
            self.eigenv = {}
            for chrom in chroms:
                if chrom not in correlations:
                    correlations[chrom] = storage["%s.correlations" % chrom][...]
                self.eigenv[chrom] = scipy.sparse.linalg.eigs(correlations[chrom], k=1)[1][:, 0]
                storage.create_dataset(name="%s.eigenv", data=self.eigenv[chrom])
            storage.close()
            self.find_clusters()
            if not self.silent:
                print >> sys.stderr, ("\r%s\r") % (' '*80),

Example 110

Project: freezegun
Source File: api.py
View license
    def start(self):
        if self.tick:
            time_to_freeze = TickingDateTimeFactory(self.time_to_freeze, real_datetime.now())
        else:
            time_to_freeze = FrozenDateTimeFactory(self.time_to_freeze)

        # Change the modules
        datetime.datetime = FakeDatetime
        datetime.date = FakeDate
        fake_time = FakeTime(time_to_freeze, time.time)
        fake_localtime = FakeLocalTime(time_to_freeze, time.localtime)
        fake_gmtime = FakeGMTTime(time_to_freeze, time.gmtime)
        fake_strftime = FakeStrfTime(time_to_freeze, time.strftime)
        time.time = fake_time
        time.localtime = fake_localtime
        time.gmtime = fake_gmtime
        time.strftime = fake_strftime

        copyreg.dispatch_table[real_datetime] = pickle_fake_datetime
        copyreg.dispatch_table[real_date] = pickle_fake_date

        # Change any place where the module had already been imported
        to_patch = [
            ('real_date', real_date, 'FakeDate', FakeDate),
            ('real_datetime', real_datetime, 'FakeDatetime', FakeDatetime),
            ('real_gmtime', real_gmtime, 'FakeGMTTime', fake_gmtime),
            ('real_localtime', real_localtime, 'FakeLocalTime', fake_localtime),
            ('real_strftime', real_strftime, 'FakeStrfTime', fake_strftime),
            ('real_time', real_time, 'FakeTime', fake_time),
        ]
        real_names = tuple(real_name for real_name, real, fake_name, fake in to_patch)
        self.fake_names = tuple(fake_name for real_name, real, fake_name, fake in to_patch)
        self.reals = dict((id(fake), real) for real_name, real, fake_name, fake in to_patch)
        fakes = dict((id(real), fake) for real_name, real, fake_name, fake in to_patch)
        add_change = self.undo_changes.append

        # Save the current loaded modules
        self.modules_at_start = set(sys.modules.keys())

        with warnings.catch_warnings():
            warnings.filterwarnings('ignore')

            for mod_name, module in list(sys.modules.items()):
                if mod_name is None or module is None:
                    continue
                elif mod_name.startswith(self.ignore):
                    continue
                elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
                    continue
                for module_attribute in dir(module):
                    if module_attribute in real_names:
                        continue
                    try:
                        attribute_value = getattr(module, module_attribute)
                    except (ImportError, AttributeError, TypeError):
                        # For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
                        continue
                    fake = fakes.get(id(attribute_value))
                    if fake:
                        setattr(module, module_attribute, fake)
                        add_change((module, module_attribute, attribute_value))

        datetime.datetime.times_to_freeze.append(time_to_freeze)
        datetime.datetime.tz_offsets.append(self.tz_offset)

        datetime.date.dates_to_freeze.append(time_to_freeze)
        datetime.date.tz_offsets.append(self.tz_offset)

        return time_to_freeze

Example 111

Project: freezegun
Source File: api.py
View license
    def stop(self):
        datetime.datetime.times_to_freeze.pop()
        datetime.datetime.tz_offsets.pop()
        datetime.date.dates_to_freeze.pop()
        datetime.date.tz_offsets.pop()

        if not datetime.datetime.times_to_freeze:
            datetime.datetime = real_datetime
            datetime.date = real_date
            copyreg.dispatch_table.pop(real_datetime)
            copyreg.dispatch_table.pop(real_date)
            for module, module_attribute, original_value in self.undo_changes:
                setattr(module, module_attribute, original_value)
            self.undo_changes = []

            # Restore modules loaded after start()
            modules_to_restore = set(sys.modules.keys()) - self.modules_at_start
            self.modules_at_start = set()
            with warnings.catch_warnings():
                warnings.simplefilter('ignore')
                for mod_name in modules_to_restore:
                    module = sys.modules.get(mod_name, None)
                    if mod_name is None or module is None:
                        continue
                    elif mod_name.startswith(self.ignore):
                        continue
                    elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
                        continue
                    for module_attribute in dir(module):

                        if module_attribute in self.fake_names:
                            continue
                        try:
                            attribute_value = getattr(module, module_attribute)
                        except (ImportError, AttributeError, TypeError):
                            # For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
                            continue

                        real = self.reals.get(id(attribute_value))
                        if real:
                            setattr(module, module_attribute, real)

        time.time = time.time.previous_time_function
        time.gmtime = time.gmtime.previous_gmtime_function
        time.localtime = time.localtime.previous_localtime_function
        time.strftime = time.strftime.previous_strftime_function

Example 112

Project: calliope
Source File: utils.py
View license
def _load_function(source):
    """
    Returns a function from a module, given a source string of the form:

        'module.submodule.subsubmodule.function_name'

    """
    module_string, function_string = source.rsplit('.', 1)
    modules = [i for i in sys.modules.keys() if 'calliope' in i]
    # Check if module already loaded, if so, don't re-import it
    if (module_string in modules):
        module = sys.modules[module_string]
    elif ('calliope.' + module_string) in modules:
        module = sys.modules['calliope.' + module_string]
    # Else load the module
    else:
        try:
            module = importlib.import_module(module_string)
        except ImportError:
            module = importlib.import_module('calliope.' + module_string)
    return getattr(module, function_string)

Example 113

Project: cmt
Source File: mayaunittestui.py
View license
    def __init__(self):
        """Creates an instance and installs as the global importer."""
        self.previous_modules = set(sys.modules.keys())

Example 114

Project: freezegun
Source File: test_class_import.py
View license
def test_import_after_start():
    with freeze_time('2012-01-14'):
        assert 'tests.another_module' not in sys.modules.keys()
        from tests import another_module

        # Reals
        assert another_module.get_datetime() is datetime.datetime
        assert another_module.get_datetime() is FakeDatetime
        assert another_module.get_date() is datetime.date
        assert another_module.get_date() is FakeDate
        assert another_module.get_time() is time.time
        assert isinstance(another_module.get_time(), FakeTime)
        assert another_module.get_localtime() is time.localtime
        assert isinstance(another_module.get_localtime(), FakeLocalTime)
        assert another_module.get_gmtime() is time.gmtime
        assert isinstance(another_module.get_gmtime(), FakeGMTTime)
        assert another_module.get_strftime() is time.strftime
        assert isinstance(another_module.get_strftime(), FakeStrfTime)

        # Fakes
        assert another_module.get_fake_datetime() is FakeDatetime
        assert another_module.get_fake_date() is FakeDate
        assert another_module.get_fake_time() is FakeTime
        assert another_module.get_fake_localtime() is FakeLocalTime
        assert another_module.get_fake_gmtime() is FakeGMTTime
        assert another_module.get_fake_strftime() is FakeStrfTime

    # Reals
    assert another_module.get_datetime() is datetime.datetime
    assert not another_module.get_datetime() is FakeDatetime
    assert another_module.get_date() is datetime.date
    assert not another_module.get_date() is FakeDate
    assert another_module.get_time() is time.time
    assert not isinstance(another_module.get_time(), FakeTime)
    assert another_module.get_localtime() is time.localtime
    assert not isinstance(another_module.get_localtime(), FakeLocalTime)
    assert another_module.get_gmtime() is time.gmtime
    assert not isinstance(another_module.get_gmtime(), FakeGMTTime)
    assert another_module.get_strftime() is time.strftime
    assert not isinstance(another_module.get_strftime(), FakeStrfTime)

    # Fakes
    assert another_module.get_fake_datetime() is FakeDatetime
    assert another_module.get_fake_date() is FakeDate
    assert another_module.get_fake_time() is FakeTime
    assert another_module.get_fake_localtime() is FakeLocalTime
    assert another_module.get_fake_gmtime() is FakeGMTTime
    assert another_module.get_fake_strftime() is FakeStrfTime

Example 115

Project: cmt
Source File: mayaunittestui.py
View license
    def uninstall(self):
        for modname in sys.modules.keys():
            if modname not in self.previous_modules:
                # Force reload when modname next imported
                del(sys.modules[modname])

Example 116

Project: glymur
Source File: test_printing.py
View license
    @unittest.skipIf('lxml' not in sys.modules.keys(), "No lxml")
    @unittest.skipIf(sys.hexversion < 0x03000000,
                     "Only trusting python3 for printing non-ascii chars")
    def test_xml_latin1(self):
        """Should be able to print an XMLBox with utf-8 encoding (latin1)."""
        # Seems to be inconsistencies between different versions of python2.x
        # as to what gets printed.
        #
        # 2.7.5 (fedora 19) prints xml entities.
        # 2.7.3 seems to want to print hex escapes.
        text = u"""<flow>Strömung</flow>"""
        if sys.hexversion < 0x03000000:
            xml = ET.parse(StringIO(text.encode('utf-8')))
        else:
            xml = ET.parse(StringIO(text))

        xmlbox = glymur.jp2box.XMLBox(xml=xml)
        actual = str(xmlbox)
        if sys.hexversion < 0x03000000:
            expected = ("XML Box (xml ) @ (-1, 0)\n"
                        "    <flow>Str\xc3\xb6mung</flow>")
        else:
            expected = ("XML Box (xml ) @ (-1, 0)\n"
                        "    <flow>Strömung</flow>")
        self.assertEqual(actual, expected)

Example 117

Project: mutagen
Source File: setup.py
View license
    def run(self):
        try:
            from coverage import coverage
        except ImportError:
            raise SystemExit(
                "Missing 'coverage' module. See "
                "https://pypi.python.org/pypi/coverage or try "
                "`apt-get install python-coverage python3-coverage`")

        for key in list(sys.modules.keys()):
            if key.startswith('mutagen'):
                del(sys.modules[key])

        cov = coverage()
        cov.start()

        cmd = self.reinitialize_command("test")
        cmd.ensure_finalized()
        cmd.run()

        dest = os.path.join(os.getcwd(), "coverage")

        cov.stop()
        cov.html_report(
            directory=dest,
            ignore_errors=True,
            include=["mutagen/*"],
            omit=["mutagen/_senf/*"])

        print("Coverage summary: file://%s/index.html" % dest)

Example 118

Project: Javatar
Source File: utils.py
View license
    def run(self, edit, util_type="", text="", region=None, dest=None):
        """
        Run specified utility

        @param edit: edit object from Sublime Text buffer
        @param util_type: utility selector
        @param text: text to be used with edit object
        @param region: replace region (use with replace utility)
        @param dest: command description (use on dest method)
        """
        if util_type == "insert":
            self.view.insert(edit, 0, text)
        elif util_type == "add":
            self.view.insert(edit, self.view.size(), text)
        elif util_type == "replace":
            if isinstance(region, list) or isinstance(region, tuple):
                region = sublime.Region(region[0], region[1])
            self.view.replace(edit, region, text)
        elif util_type == "clear":
            self.view.erase(edit, sublime.Region(0, self.view.size()))
        elif util_type == "set_read_only":
            self.view.set_read_only(True)
        elif util_type == "clear_read_only":
            self.view.set_read_only(False)
        elif util_type == "parser_test" and Constant.is_debug():
            for cl in JavaStructure().classes_in_file(self.view.file_name()):
                print("Class: " + cl["name"])
                for ctor in JavaStructure().constructors_in_class(cl):
                    params = []
                    for param in ctor["params"]:
                        params.append(param["type"] + " " + param["name"])
                    print("  Constructor: " + ctor["name"] + "(" + ", ".join(params) + ")")
                for field in JavaStructure().fields_in_class(cl):
                    print("  Field: " + field["type"] + " " + field["name"])
                for method in JavaStructure().methods_in_class(cl):
                    params = []
                    for param in method["params"]:
                        params.append(param["type"] + " " + param["name"])
                    print("  Method: " + method["returnType"] + " " + method["name"] + "(" + ", ".join(params) + ")")
        elif util_type == "remote_hash":
            sublime.active_window().show_input_panel(
                "URL:", "", self.remote_hash, None, None
            )
        elif util_type == "hash":
            Logger().none(
                hashlib.sha256(
                    self.view.substr(
                        sublime.Region(0, self.view.size())
                    ).encode("utf-8")
                ).hexdigest()
            )
        elif util_type == "tojson":
            jsonObj = sublime.decode_value(
                self.view.substr(sublime.Region(0, self.view.size()))
            )
            self.view.replace(
                edit,
                sublime.Region(0, self.view.size()),
                sublime.encode_value(jsonObj, True)
            )
        elif util_type == "json_test" and Constant.is_debug():
            panel = JSONPanel(
                window=self.view.window(),
                on_done=self.on_done,
                on_cancel=self.on_cancel
            )
            view = panel.open("JSONTest.json")
            sublime.set_timeout(
                lambda: view.run_command(
                    "javatar_utils", {"util_type": "insert", "text": "{\n}"}
                ),
                50
            )
        elif util_type == "parse":
            sublime.active_window().show_input_panel(
                "Parse Parameter:", "", self.parse_code, None, None
            )
        elif util_type == "reload" and Constant.is_debug():
            ActionHistory().add_action(
                "javatar.commands.utils.utils.reload", "Reload Javatar"
            )
            Logger().info("Reloading Javatar...")
            Constant.reset()
            for mod in tuple(sys.modules.keys()):
                if mod.lower().startswith("javatar"):
                    Logger().info("Reloading module " + mod + "...")
                    reload(sys.modules[mod])
            from ..Javatar import plugin_loaded
            plugin_loaded()

Example 119

Project: conan
Source File: tools.py
View license
    def run(self, command_line, user_io=None, ignore_error=False):
        """ run a single command as in the command line.
            If user or password is filled, user_io will be mocked to return this
            tuple if required
        """
        self.init_dynamic_vars(user_io)

        command = Command(self.client_cache, self.user_io, self.runner, self.remote_manager, self.search_manager)
        args = shlex.split(command_line)
        current_dir = os.getcwd()
        os.chdir(self.current_folder)

        old_modules = list(sys.modules.keys())
        try:
            error = command.run(args)
        finally:
            os.chdir(current_dir)
            # Reset sys.modules to its prev state. A .copy() DOES NOT WORK
            added_modules = set(sys.modules).difference(old_modules)
            for added in added_modules:
                sys.modules.pop(added, None)

        if not ignore_error and error:
            logger.error(self.user_io.out)
            raise Exception("Command failed:\n%s" % command_line)
        return error

Example 120

Project: Javatar
Source File: constant.py
View license
    @staticmethod
    def check_conflicts(old_ref=None):
        file_header = [
            mod
            for mod in sys.modules.keys()
            if mod.lower().startswith("fileheader")
        ]
        sublime_linter = [
            mod
            for mod in sys.modules.keys()
            if mod.lower().startswith("sublimelinter")
        ]
        message = ""
        if sublime_linter:
            from ..extensions.linter import JavatarLinter
            JavatarLinter
            Logger().info(
                "SublimeLinter is installed. Javatar linter now enabled"
            )
        else:
            message += ("SublimeLinter is not installed." +
                        " Javatar linter now disabled")
        if file_header:
            msg = (
                "FileHeader is installed. Javatar might conflicts with" +
                " FileHeader when create a new file"
            )
            if message:
                message += "    "
            message += msg
            Logger().warning(msg)
        StatusManager().hide_status(old_ref)
        StatusManager().show_status(
            message,
            ref=old_ref,
            scrolling=StatusManager().SCROLL,
            delay=25000,
            must_see=True
        )

Example 121

Project: pry
Source File: test.py
View license
    def __init__(self, dirname, filename, magic):
        modname = filename[:-3]
        TestContainer.__init__(self, name=os.path.join(dirname, modname))
        self.dirname, self.filename = dirname, filename
        m = __import__(modname)
        # When pry starts up, it loads the libpry module. In order for the
        # instantiation stuff in libpry to be counted in coverage, we need to
        # go through and re-execute them. We don't "reload", since this will
        # create a new suite of class instances, and break our code.
        # begin nocover
        if magic:
            for k in sys.modules.keys():
                if "libpry" in k and sys.modules[k]:
                    n = sys.modules[k].__file__
                    if n.endswith("pyc"):
                        execfile(n[:-1])
                    elif n.endswith("py"):
                        execfile(n)
        # end nocover
        # Force a reload to stop Python caching modules that happen to have 
        # the same name
        reload(m)
        if hasattr(m, "tests"):
            self.addChildrenFromList(m.tests)

Example 122

Project: pyfilesystem
Source File: test_importhook.py
View license
    def _check_imports_are_working(self):
        try:
            import fsih_hello
            self.assertEquals(fsih_hello.message,"hello world!")
            try:
                import fsih_helo
            except ImportError:
                pass
            else:
                assert False, "ImportError not raised"
            import fsih_pkg
            import fsih_pkg.sub1
            self.assertEquals(fsih_pkg.sub1.message,"hello world!")
            self.assertEquals(fsih_pkg.sub1.a,42)
            import fsih_pkg.sub2
            self.assertEquals(fsih_pkg.sub2.message,"hello world!")
            self.assertEquals(fsih_pkg.sub2.a,42 * 2)
            try:
                import fsih_pkg.sub3
            except ImportError:
                pass
            else:
                assert False, "ImportError not raised"
        finally:
            for k in sys.modules.keys():
                if k.startswith("fsih_"):
                    del sys.modules[k]

Example 123

Project: glymur
Source File: test_printing.py
View license
    @unittest.skipIf('lxml' not in sys.modules.keys(), "No lxml")
    @unittest.skipIf(sys.hexversion < 0x03000000,
                     "Only trusting python3 for printing non-ascii chars")
    def test_xml_cyrrilic(self):
        """Should be able to print XMLBox with utf-8 encoding (cyrrillic)."""
        # Seems to be inconsistencies between different versions of python2.x
        # as to what gets printed.
        #
        # 2.7.5 (fedora 19) prints xml entities.
        # 2.7.3 seems to want to print hex escapes.
        text = u"""<country>Россия</country>"""
        if sys.hexversion < 0x03000000:
            xml = ET.parse(StringIO(text.encode('utf-8')))
        else:
            xml = ET.parse(StringIO(text))

        xmlbox = glymur.jp2box.XMLBox(xml=xml)
        actual = str(xmlbox)
        if sys.hexversion < 0x03000000:
            expected = ("XML Box (xml ) @ (-1, 0)\n"
                        ("    <country>&#1056;&#1086;&#1089;&#1089;"
                         "&#1080;&#1103;</country>"))
        else:
            expected = ("XML Box (xml ) @ (-1, 0)\n"
                        "    <country>Россия</country>")

        self.assertEqual(actual, expected)

Example 124

Project: python-proboscis
Source File: run_tests.py
View license
    def store_modules(self):
        self.module_names = list(sys.modules.keys())

Example 125

Project: connectomeviewer
Source File: build_helpers.py
View license
    def run(self):
        """ build and install nipy in a temporary location. """
        install = self.distribution.get_command_obj('install')
        install.install_scripts = self.temp_install_dir
        install.install_base    = self.temp_install_dir
        install.install_platlib = self.temp_install_dir 
        install.install_purelib = self.temp_install_dir 
        install.install_data    = self.temp_install_dir 
        install.install_lib     = self.temp_install_dir 
        install.install_headers = self.temp_install_dir 
        install.run()

        # Horrible trick to reload nipy with our temporary instal
        for key in sys.modules.keys():
            if key.startswith('nipy'):
                sys.modules.pop(key, None)
        sys.path.append(os.path.abspath(self.temp_install_dir))
        # Pop the cwd
        sys.path.pop(0)
        import nipy

Example 126

Project: duecredit
Source File: injector.py
View license
    def _handle_fresh_imports(self, name, import_level_prefix, level):
        """Check which modules were imported since last point we checked and add them to the queue
        """
        new_imported_modules = set(sys.modules.keys()) - self._processed_modules - self.__queue_to_process
        if new_imported_modules:
            lgr.log(4, "%s%d new modules were detected upon import of %s (level=%s)",
                    import_level_prefix, len(new_imported_modules), name, level)
            # lgr.log(2, "%s%d new modules were detected: %s, upon import of %s (level=%s)",
            #        import_level_prefix, len(new_imported_modules), new_imported_modules, name, level)
        for imported_mod in new_imported_modules:
            if imported_mod in self.__queue_to_process:
                # we saw it already
                continue
            # lgr.log(1, "Name %r was imported as %r (path: %s). fromlist: %s, level: %s",
            #        name, mod.__name__, getattr(mod, '__path__', None), fromlist, level)
            # package
            package = imported_mod.split('.', 1)[0]
            if package != imported_mod \
                    and package not in self._processed_modules \
                    and package not in self.__queue_to_process:
                # if its parent package wasn't yet imported before
                lgr.log(3, "%sParent of %s, %s wasn't yet processed, adding to the queue",
                        import_level_prefix, imported_mod, package)
                self.__queue_to_process.add(package)
            self.__queue_to_process.add(imported_mod)

Example 127

Project: scons
Source File: SConfTests.py
View license
    def _resetSConfState(self):
        # Ok, this is tricky, and i do not know, if everything is sane.
        # We try to reset scons' state (including all global variables)
        import SCons.SConsign
        SCons.SConsign.write() # simulate normal scons-finish
        for n in sys.modules.keys():
            if n.split('.')[0] == 'SCons' and n[:12] != 'SCons.compat':
                m = sys.modules[n]
                if isinstance(m, ModuleType):
                    # if this is really a scons module, clear its namespace
                    del sys.modules[n]
                    m.__dict__.clear()
        # we only use SCons.Environment and SCons.SConf for these tests.
        import SCons.Environment
        import SCons.SConf
        self.Environment = SCons.Environment
        self.SConf = SCons.SConf
        # and we need a new environment, cause references may point to
        # old modules (well, at least this is safe ...)
        self.scons_env = self.Environment.Environment()
        self.scons_env.AppendENVPath('PATH', os.environ['PATH'])

        # we want to do some autodetection here
        # this stuff works with
        #    - cygwin on Windows (using cmd.exe, not bash)
        #    - posix
        #    - msvc on Windows (hopefully)
        if (not self.scons_env.Detect( self.scons_env.subst('$CXX') ) or
            not self.scons_env.Detect( self.scons_env.subst('$CC') ) or
            not self.scons_env.Detect( self.scons_env.subst('$LINK') )):
            raise Exception("This test needs an installed compiler!")
        if self.scons_env['CXX'] == 'g++':
            global existing_lib
            existing_lib = 'm'

        if sys.platform in ['cygwin', 'win32']:
             # On Windows, SCons.Platform.win32 redefines the builtin
             # file() and open() functions to close the file handles.
             # This interferes with the unittest.py infrastructure in
             # some way.  Just sidestep the issue by restoring the
             # original builtin functions whenever we have to reset
             # all of our global state.

             import builtins
             import SCons.Platform.win32

             builtins.file = SCons.Platform.win32._builtin_file
             builtins.open = SCons.Platform.win32._builtin_open

Example 128

Project: flumotion
Source File: reload.py
View license
def reloadFlumotion():
    """Properly reload all flumotion-related modules currently loaded."""
    needs_reload = lambda name: name.startswith('flumotion')
    for name in filter(needs_reload, sys.modules.keys()):
        if not name in sys.modules:
            log.warning("reload", "hm, %s disappeared from the modules" % name)
            continue
        module = sys.modules[name]
        if not module:
            log.log("reload", "hm, module '%s' == None" % name)
            continue
        log.log("reload", "rebuilding %s" % module)
        try:
            rebuild(module, doLog=0)
        except SyntaxError, msg:
            from flumotion.common import errors
            raise errors.ReloadSyntaxError(msg)

    # FIXME: ignores programmatic FLU_DEBUG changes over the life of a
    # process
    reinitialize = {'flumotion.extern.log.log':
                    lambda mod: mod.init('FLU_DEBUG')}
    for name in reinitialize:
        if name in sys.modules:
            reinitialize[name](sys.modules[name])

Example 129

Project: idapython
Source File: py_idaapi.py
View license
def require(modulename, package=None):
    """
    Load, or reload a module.

    When under heavy development, a user's tool might consist of multiple
    modules. If those are imported using the standard 'import' mechanism,
    there is no guarantee that the Python implementation will re-read
    and re-evaluate the module's Python code. In fact, it usually doesn't.
    What should be done instead is 'reload()'-ing that module.

    This is a simple helper function that will do just that: In case the
    module doesn't exist, it 'import's it, and if it does exist,
    'reload()'s it.

    For more information, see: <http://www.hexblog.com/?p=749>.
    """
    if modulename in sys.modules.keys():
        reload(sys.modules[modulename])
    else:
        import importlib
        import inspect
        m = importlib.import_module(modulename, package)
        frame_obj, filename, line_number, function_name, lines, index = inspect.stack()[1]
        importer_module = inspect.getmodule(frame_obj)
        if importer_module is None: # No importer module; called from command line
            importer_module = sys.modules['__main__']
        setattr(importer_module, modulename, m)
        sys.modules[modulename] = m

Example 130

Project: spiderosm
Source File: misc.py
View license
def module_loaded_q(mod_name):
    return mod_name in sys.modules.keys()

Example 131

Project: glymur
Source File: test_printing.py
View license
    @unittest.skipIf('lxml' not in sys.modules.keys(), "No lxml")
    def test_bom(self):
        """
        Byte order markers are illegal in UTF-8.  Issue 185

        Original test file was input/nonregression/issue171.jp2
        """
        fptr = BytesIO()

        s = "<?xpacket begin='\ufeff' id='W5M0MpCehiHzreSzNTczkc9d'?>"
        s += "<stuff>goes here</stuff>"
        s += "<?xpacket end='w'?>"
        data = s.encode('utf-8')
        fptr.write(data)
        fptr.seek(0)

        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            box = glymur.jp2box.XMLBox.parse(fptr, 0, 8 + len(data))
            # No need to verify, it's enough that we don't error out.
            str(box)

Example 132

Project: C-PAC
Source File: build_helpers.py
View license
    def run(self):
        """ build and install nipy in a temporary location. """
        install = self.distribution.get_command_obj('install')
        install.install_scripts = self.temp_install_dir
        install.install_base    = self.temp_install_dir
        install.install_platlib = self.temp_install_dir 
        install.install_purelib = self.temp_install_dir 
        install.install_data    = self.temp_install_dir 
        install.install_lib     = self.temp_install_dir 
        install.install_headers = self.temp_install_dir 
        install.run()

        # Horrible trick to reload nipy with our temporary instal
        for key in sys.modules.keys():
            if key.startswith('nipy'):
                sys.modules.pop(key, None)
        sys.path.append(os.path.abspath(self.temp_install_dir))
        # Pop the cwd
        sys.path.pop(0)
        import nipy

Example 133

Project: deepTools
Source File: utilities.py
View license
def bam_total_reads(bam_handle, chroms_to_ignore):
    """Count the total number of mapped reads in a BAM file, filtering
    the chromosome given in chroms_to_ignore list
    """
    if chroms_to_ignore:
        import pysam

        lines = pysam.idxstats(bam_handle.filename)
        lines = toString(lines)
        if type(lines) is str:
            lines = lines.strip().split('\n')
        if len(lines) == 0:
            # check if this is a test running under nose
            # in which case it will fail.
            if len([val for val in sys.modules.keys() if val.find("nose") >= 0]):
                sys.stderr.write("To run this code inside a test use disable "
                                 "output buffering `nosetest -s`\n".format(bam_handle.filename))
            else:
                sys.stderr.write("Error running idxstats on {}\n".format(bam_handle.filename))
        tot_mapped_reads = 0
        for line in lines:
            chrom, _len, nmapped, _nunmapped = line.split('\t')
            if chrom not in chroms_to_ignore:
                tot_mapped_reads += int(nmapped)

    else:
        tot_mapped_reads = bam_handle.mapped

    return tot_mapped_reads

Example 134

Project: hifive
Source File: complete_hic_project.py
View license
def run(args):
    if 'mpi4py' in sys.modules.keys():
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank()
        num_procs = comm.Get_size()
    else:
        comm = None
        rank = 0
        num_procs = 1
    if args.chroms is None:
        chroms = []
    else:
        chroms = args.chroms.split(',')
        if len(chroms) == 1 and chroms[0] == '':
            chroms = []
    if args.matrix is not None and args.binned is None:
        if rank == 0:
            print sys.stderr, ("Loading data from matrices is only supported for binned data.\n")
        return 1
    if args.algorithm.count('binning') > 0:
        if args.binned is not None:
            if rank == 0:
                print sys.stderr, ("This normalization algorithm is not currently supported for binned data.\n")
            return 1
        model = args.model.split(',')
        modelbins = args.modelbins.split(',')
        parameters = args.parameters.split(',')
        for i in range(len(modelbins)):
            try:
                modelbins[i] = int(modelbins[i])
            except:
                if rank == 0:
                    print sys.stderr, ("Not all arguments in -n/--modelbins could be converted to integers.\n")
                return 1
        if len(model) != len(modelbins) or len(model) != len(parameters):
            if rank == 0:
                print sys.stderr, ("-v/--model, -n/--modelbins, and -u/--parameter-types must be equal lengths.\n")
            return 1
    if args.binned == 0 and args.bed is None:
        if rank == 0:
            print  >> sys.stderr, ("Non-uniforming binning (binned=0) must have a bed file to read bin partitions from.\n"),
        return None
    elif args.binned is None or args.binned < 1 and args.length is not None:
        if rank == 0:
            print  >> sys.stderr, ("Binning from a chromosome length file needs a positive integer value for binning.\n"),
        return None
    if args.prefix is None:
        fend_fname, data_fname, project_fname = args.output
    else:
        fend_fname = "%s.fends" % args.prefix
        data_fname = "%s.hcd" % args.prefix
        project_fname = "%s.hcp" % args.prefix
    if rank == 0:
        fends = Fend(fend_fname, mode='w', binned=args.binned, silent=args.silent)
        if args.bed is not None:
            if args.binned is not None and args.binned == 0:
                fends.load_bins(args.bed, genome_name=args.genome, format='bed')
            else:
                fends.load_fends(args.bed, genome_name=args.genome, re_name=args.re, format="bed")
        elif args.fend is not None:
            fends.load_fends(args.fend, genome_name=args.genome, re_name=args.re, format="fend")
        else:
            fends.load_bins(args.length, genome_name=args.genome, format='len')
        fends.save()
        del fends
        data = HiCData(data_fname, 'w', silent=args.silent)
        if not args.bam is None: 
            data.load_data_from_bam(fend_fname, args.bam, args.insert, args.skipdups)
        elif not args.raw is None: 
            data.load_data_from_raw(fend_fname, args.raw, args.insert, args.skipdups)
        elif not args.mat is None: 
            data.load_data_from_mat(fend_fname, args.mat, args.insert)
        elif not args.matrix is None:
            data.load_binned_data_from_matrices(fend_fname, args.matrix, format=None)
        data.save()
        del data
        for i in range(1, num_procs):
            comm.send(1, dest=i, tag=11)
    else:
        comm.recv(source=0, tag=11)
    hic = HiC(project_fname, 'w', silent=args.silent)
    hic.load_data(data_fname)
    hic.filter_fends(mininteractions=args.minint, mindistance=args.mindist, maxdistance=args.maxdist)
    hic.find_distance_parameters(minsize=args.minbin, numbins=args.numbins)
    precorrect = False
    if args.algorithm in ['binning', 'binning-express', 'binning-probability']:
        hic.find_binning_fend_corrections(mindistance=args.mindist, maxdistance=args.maxdist, parameters=parameters,
                                             chroms=chroms, num_bins=modelbins, model=model, usereads=args.binreads,
                                             learning_threshold=args.threshold, max_iterations=args.biniter,
                                             pseudocounts=args.pseudo)
        precorrect = True
    if args.algorithm in ['probability', 'binning-probability']:
        hic.find_probability_fend_corrections(mindistance=args.mindist, maxdistance=args.maxdist,
                                              minchange=args.change, max_iterations=args.probiter,
                                              learningstep=args.step, chroms=chroms,
                                              precalculate=args.precalc, precorrect=precorrect)
    elif args.algorithm in ['express', 'binning-express']:
        hic.find_express_fend_corrections(iterations=args.expiter, mindistance=args.mindist,
                                          maxdistance=args.maxdist, remove_distance=args.nodist,
                                          usereads=args.expreads, mininteractions=args.minint,
                                          chroms=chroms, minchange=args.change, precorrect=precorrect,
                                          binary=args.binary, kr=args.kr)
    if rank == 0:
        hic.save()

Example 135

Project: QSTK
Source File: gui.py
View license
    def __init__(self):
        self._afterid = 0
        self._progress = [None]
        self._cancel = [0]
        self._filename = None
        self._init_dir = None

        # Store a copy of sys.modules, so that we can restore it
        # later.  This is useful for making sure that we reload
        # everything when we re-build its documentation.  This will
        # *not* reload the modules that are present when the EpydocGUI
        # is created, but that should only contain some builtins, some
        # epydoc modules, Tkinter, pickle, and thread..
        self._old_modules = sys.modules.keys()

        # Create the main window.
        self._root = Tk()
        self._root['background']=BG_COLOR
        self._root.bind('<Control-q>', self.destroy)
        self._root.bind('<Alt-q>', self.destroy)
        self._root.bind('<Alt-x>', self.destroy)
        self._root.bind('<Control-x>', self.destroy)
        #self._root.bind('<Control-d>', self.destroy)
        self._root.title('Epydoc')
        self._rootframe = Frame(self._root, background=BG_COLOR,
                               border=2, relief='raised')
        self._rootframe.pack(expand=1, fill='both', padx=2, pady=2)

        # Set up the basic frames.  Do not pack the options frame or
        # the messages frame; the GUI has buttons to expand them.
        leftframe = Frame(self._rootframe, background=BG_COLOR)
        leftframe.pack(expand=1, fill='both', side='left')
        optsframe = Frame(self._rootframe, background=BG_COLOR)
        mainframe = Frame(leftframe, background=BG_COLOR)
        mainframe.pack(expand=1, fill='both', side='top')
        ctrlframe = Frame(mainframe, background=BG_COLOR)
        ctrlframe.pack(side="bottom", fill='x', expand=0)
        msgsframe = Frame(leftframe, background=BG_COLOR)

        self._optsframe = optsframe
        self._msgsframe = msgsframe

        # Initialize all the frames, etc.
        self._init_menubar()
        self._init_progress_bar(mainframe)
        self._init_module_list(mainframe)
        self._init_options(optsframe, ctrlframe)
        self._init_messages(msgsframe, ctrlframe)
        self._init_bindings()

        # Set up logging
        self._logger = GUILogger(self._progress, self._cancel)
        log.register_logger(self._logger)

        # Open the messages pane by default.
        self._messages_toggle()

Example 136

View license
    def __init__(self, app_id, api_key, hosts=None, _transport=None):
        """
        Algolia Search Client initialization

        @param app_id the application ID you have in your admin interface
        @param api_key a valid API key for the service
        @param hosts_array the list of hosts that you have received for the service
        """
        self._transport = Transport() if _transport is None else _transport

        if not hosts:
            fallbacks = [
                '%s-1.algolianet.com' % app_id,
                '%s-2.algolianet.com' % app_id,
                '%s-3.algolianet.com' % app_id,
            ]
            random.shuffle(fallbacks)

            self._transport.read_hosts = ['%s-dsn.algolia.net' % app_id]
            self._transport.read_hosts.extend(fallbacks)
            self._transport.write_hosts = ['%s.algolia.net' % app_id]
            self._transport.write_hosts.extend(fallbacks)
        else:
            self._transport.read_hosts = hosts
            self._transport.write_hosts = hosts

        self._transport.headers = {
            'X-Algolia-Application-Id': app_id,
            'Content-Type': 'gzip',
            'Accept-Encoding': 'gzip',
            'User-Agent': 'Algolia for Python (%s)' % VERSION
        }

        self._app_id = app_id
        self.api_key = api_key

        # Fix for AppEngine bug when using urlfetch_stub
        if 'google.appengine.api.apiproxy_stub_map' in sys.modules.keys():
            self.headers.pop('Accept-Encoding', None)

Example 137

Project: ispyd
Source File: python.py
View license
    def do_modules(self, pattern):
        """modules
        Display the currently loaded Python modules. By default this will
        only display the root module for any packages. If you wish to see
        all modules, including sub modules of packages, use 'modules *'.
	The value '*' can be replaced with any glob pattern to be more
	selective. For example 'modules ispyd.*' will list just the sub
	modules for this package."""

        if pattern:
            result = []
            for name in sys.modules.keys():
                if fnmatch.fnmatch(name, pattern):
                    result.append(name)
            print >> self.stdout, sorted(result)
        else:
            result = []
            for name in sys.modules.keys():
                if not '.' in name:
                    result.append(name)
            print >> self.stdout, sorted(result)

Example 138

Project: QSTK
Source File: gui.py
View license
    def _go(self, *e):
        if len(self._module_list.get(0,'end')) == 0:
            self._root.bell()
            return

        if self._progress[0] != None:
            self._cancel[0] = 1
            return

        # Construct the argument list for document().
        opts = self._getopts()
        self._progress[0] = 0.0
        self._cancel[0] = 0
        args = (opts, self._cancel, self._progress)

        # Clear the messages window.
        self._messages['state'] = 'normal'
        self._messages.delete('0.0', 'end')
        self._messages['state'] = 'disabled'
        self._logger.clear()

        # Restore the module list.  This will force re-loading of
        # anything that we're documenting.
        for m in sys.modules.keys():
            if m not in self._old_modules:
                del sys.modules[m]

        # [xx] Reset caches??
    
        # Start documenting
        start_new_thread(document, args)

        # Start the progress bar.
        self._go_button['text'] = 'Stop'
        self._afterid += 1
        dt = 300 # How often to update, in milliseconds
        self._update(dt, self._afterid)

Example 139

Project: aliyun-cli
Source File: aliyunOpenApiData.py
View license
    def getInstance(self, operation,cmdName,version=None):
        if self.path is None:
            return None
        moduleName=operation+'Request'
        try:
            fp, pathname, desc = imp.find_module(moduleName,[self.path])
            imp.load_module(moduleName, fp, pathname, desc)
            modules_keys=sys.modules.keys()
            for key in modules_keys:
                if key==moduleName:
                    try:
                        module = sys.modules[moduleName]
                        mInstance= getattr(module, moduleName)()
                        className=getattr(module,moduleName)
                        return mInstance,className
                    except Exception as err:
                        print err
        except Exception as err:
            pass
        return None, None

Example 140

Project: glymur
Source File: test_jp2box_uuid.py
View license
    def test_append_xmp_uuid(self):
        """Should be able to append an XMP UUID box."""
        the_uuid = uuid.UUID('be7acfcb-97a9-42e8-9c71-999491e3afac')
        raw_data = SimpleRDF.encode('utf-8')
        with tempfile.NamedTemporaryFile(suffix='.jp2') as tfile:
            shutil.copyfile(self.jp2file, tfile.name)
            jp2 = Jp2k(tfile.name)
            ubox = glymur.jp2box.UUIDBox(the_uuid=the_uuid, raw_data=raw_data)
            jp2.append(ubox)

            # Should be two UUID boxes now.
            expected_ids = ['jP  ', 'ftyp', 'jp2h', 'uuid', 'jp2c', 'uuid']
            actual_ids = [b.box_id for b in jp2.box]
            self.assertEqual(actual_ids, expected_ids)

            # The data should be an XMP packet, which gets interpreted as
            # an ElementTree.
            if 'lxml' in sys.modules.keys():
                self.assertTrue(isinstance(jp2.box[-1].data,
                                           lxml.etree._ElementTree))
            else:
                self.assertTrue(isinstance(jp2.box[-1].data,
                                           ET.ElementTree))

Example 141

Project: glymur
Source File: test_printing.py
View license
    @unittest.skipIf('lxml' not in sys.modules.keys(), "No lxml")
    def test_suppress_codestream_old_option(self):
        """
        Verify printing with codestream suppressed, deprecated
        """
        jp2 = Jp2k(self.jp2file)
        with warnings.catch_warnings():
            warnings.simplefilter('ignore')
            glymur.set_printoptions(codestream=False)

        actual = str(jp2)

        # Get rid of the file line, that's kind of volatile.
        actual = '\n'.join(actual.splitlines()[1:])

        expected = fixtures.nemo_dump_no_codestream
        self.assertEqual(actual, expected)

        with warnings.catch_warnings():
            warnings.simplefilter('ignore')
            opt = glymur.get_printoptions()['codestream']
        self.assertFalse(opt)

Example 142

Project: robothon
Source File: session.py
View license
def save(variables=None, file=SAVEFILE, dictionary=None, verbose=False):

    """saves variables from a numpy session to a file.  Variables
    which won't pickle are "proxied" if possible.

    'variables'       a string of comma seperated variables: e.g. "a,b,c"
                      Defaults to dictionary.keys().

    'file'            a filename or file object for the session file.

    'dictionary'      the dictionary in which to look up the variables.
                      Defaults to the caller's globals()

    'verbose'         print additional debug output when True.
    """

    global VERBOSE
    VERBOSE = verbose

    _update_proxy_types()

    if isinstance(file, str):
        file = open(file, "wb")

    if dictionary is None:
        dictionary = _callers_globals()

    if variables is None:
        keys = dictionary.keys()
    else:
        keys = variables.split(",")

    source_modules = _callers_modules() + sys.modules.keys()

    p = pickle.Pickler(file, protocol=2)

    _verbose("variables:",keys)
    for k in keys:
        v = dictionary[k]
        _verbose("saving", k, type(v))
        try:  # Try to write an ordinary pickle
            p.dump(v)
            _verbose("pickled", k)
        except (pickle.PicklingError, TypeError, SystemError):
            # Use proxies for stuff that won't pickle
            if isinstance(v, type(sys)): # module
                proxy = _ModuleProxy(v.__name__, save=True)
            else:
                try:
                    module, name = _locate(source_modules, v)
                except ObjectNotFound:
                    _errout("warning: couldn't find object",k,
                            "in any module... skipping.")
                    continue
                else:
                    proxy = _ObjectProxy(module, name, type(v), save=True)
            p.dump(proxy)
    o = _SaveSession(keys, save=True)
    p.dump(o)
    file.close()

Example 143

Project: habitat
Source File: dynamicloader.py
View license
def load(loadable, force_reload=False):
    """
    Attempts to dynamically load *loadable*

    *loadable*: a class, a function, a module, or a string that is a
    dotted-path to one a class function or module

    Some examples::

        load(MyClass) # returns MyClass
        load(MyFunction) # returns MyFunction
        load("mypackage") # returns the mypackage module
        load("packagea.packageb") # returns the packageb module
        load("packagea.packageb.aclass") # returns aclass

    """

    old_modules = sys.modules.keys()

    if isinstance(loadable, basestring):
        if len(loadable) <= 0:
            raise ValueError("loadable(str) must have non zero length")

        components = loadable.split(".")

        if "" in components or len(components) == 0:
            raise ValueError("loadable(str) contains empty components")

        name_loaded = loadable

        try:
            # This will work if it is a module
            __import__(loadable)
            loadable = sys.modules[loadable]
        except ImportError:
            # This will work if it is a class or a function
            module_name = ".".join(components[:-1])
            target_name = components[-1]

            __import__(module_name)

            try:
                loadable = getattr(sys.modules[module_name], target_name)
            except KeyError:
                raise ImportError("Couldn't import " + loadable)

        # If neither worked; an error will have been raised.

        name_real = fullname(loadable)
        if name_real != name_loaded:
            logger.debug("loaded {0} => {1}".format(name_loaded, name_real))
        else:
            logger.debug("loaded {0}".format(name_real))

    # If force_reload is set, but it's the first time we've loaded this
    # loadable anyway, there's no point calling reload().

    # There could be a race condition between already_loaded and __import__,
    # however the worst that could happen is for already_loaded to be False
    # when infact by the time __import__ was reached, it had been loaded by
    # another thread. In this case the side effect is that reload may be
    # called on it. No bad effects, just a slight performance hit from double
    # loading. No big deal.

    if inspect.isclass(loadable) or inspect.isfunction(loadable):
        already_loaded = loadable.__module__ in old_modules

        if force_reload and already_loaded:
            logger.debug("reloading {0}".format(fullname(loadable)))

            # Reload the module and then find the new version of loadable
            module = sys.modules[loadable.__module__]
            reload(module)
            loadable = getattr(module, loadable.__name__)
    elif inspect.ismodule(loadable):
        already_loaded = loadable.__name__ in old_modules

        if force_reload and already_loaded:
            logger.debug("reloading {0}".format(fullname(loadable)))

            # Module objects are updated in place.
            reload(loadable)
    else:
        raise TypeError("load() takes a string, class, function or module")

    return loadable

Example 144

Project: imageio
Source File: testing.py
View license
def _clear_imageio():
    # Remove ourselves from sys.modules to force an import
    for key in list(sys.modules.keys()):
        if key.startswith('imageio'):
            del sys.modules[key]

Example 145

View license
    def setUp(self):
        if 'safe.metadata' in sys.modules.keys():
            del sys.modules['safe.metadata']
        self.assertFalse('safe.metadata' in sys.modules.keys())
        os.environ['LANG'] = 'id'

Example 146

Project: physt
Source File: matplotlib.py
View license
def _get_cmap(kwargs):
    """Get the colour map for plots that support it.

    Parameters
    ----------
    cmap : str or colors.Colormap
        A map or an instance of cmap. This can also be a seaborn palette
        (if seaborn is installed).

    Returns
    -------
    colors.Colormap
    """
    cmap = kwargs.pop("cmap", "Greys")
    if isinstance(cmap, str):
        try:
            cmap = plt.get_cmap(cmap)
        except BaseException as exc:
            try:
                # Trick to use seaborn palettes without clearing the seaborn style
                import sys
                if "seaborn" in sys.modules.keys():
                    sns = sys.modules["seaborn"]
                else:
                    import seaborn.apionly as sns
                cmap = sns.color_palette(as_cmap=True)
            except ImportError:
                raise exc
    return cmap

Example 147

Project: MCEdit-Unified
Source File: brush.py
View license
    def tryImport(self, name, dir):
        """
        Imports a brush module. Called by importBrushModules
        :param name, name of the module to import.
        """
        if dir != "stock-brushes":
            embeded = False
        else:
            embeded = True
        try:
            path = os.path.join(dir, (name + ".py"))
            if type(path) == unicode and DEF_ENC != "UTF-8":
                path = path.encode(DEF_ENC)
            globals()[name] = m = imp.load_source(name, path)
            if not embeded:
                old_trn_path = albow.translate.getLangPath()
                if "trn" in sys.modules.keys():
                    del sys.modules["trn"]
                import albow.translate as trn
                trn_path = os.path.join(directories.brushesDir, name)
                if os.path.exists(trn_path):
                    trn.setLangPath(trn_path)
                    trn.buildTranslation(config.settings.langCode.get())
                m.trn = trn
                albow.translate.setLangPath(old_trn_path)
                albow.translate.buildTranslation(config.settings.langCode.get())
                self.editor.mcedit.set_update_ui(True)
                self.editor.mcedit.set_update_ui(False)
            m.materials = self.editor.level.materials
            m.tool = self
            m.createInputs(m)
            return m
        except Exception, e:
            print traceback.format_exc()
            alert(_(u"Exception while importing brush mode {}. See console for details.\n\n{}").format(name, e))
            return object()

Example 148

View license
def custom_import_install():
    if __builtin__.__import__ == NATIVE_IMPORTER:
        INVALID_MODULES.update(sys.modules.keys())
        __builtin__.__import__ = custom_importer

Example 149

Project: MCEdit-Unified
Source File: brush.py
View license
    def tryImport(self, name, dir):
        """
        Imports a brush module. Called by importBrushModules
        :param name, name of the module to import.
        """
        if dir != "stock-brushes":
            embeded = False
        else:
            embeded = True
        try:
            path = os.path.join(dir, (name + ".py"))
            if type(path) == unicode and DEF_ENC != "UTF-8":
                path = path.encode(DEF_ENC)
            globals()[name] = m = imp.load_source(name, path)
            if not embeded:
                old_trn_path = albow.translate.getLangPath()
                if "trn" in sys.modules.keys():
                    del sys.modules["trn"]
                import albow.translate as trn
                trn_path = os.path.join(directories.brushesDir, name)
                if os.path.exists(trn_path):
                    trn.setLangPath(trn_path)
                    trn.buildTranslation(config.settings.langCode.get())
                m.trn = trn
                albow.translate.setLangPath(old_trn_path)
                albow.translate.buildTranslation(config.settings.langCode.get())
                self.editor.mcedit.set_update_ui(True)
                self.editor.mcedit.set_update_ui(False)
            m.materials = self.editor.level.materials
            m.tool = self
            m.createInputs(m)
            return m
        except Exception, e:
            print traceback.format_exc()
            alert(_(u"Exception while importing brush mode {}. See console for details.\n\n{}").format(name, e))
            return object()

Example 150

View license
def custom_import_install():
    if __builtin__.__import__ == NATIVE_IMPORTER:
        INVALID_MODULES.update(sys.modules.keys())
        __builtin__.__import__ = custom_importer