sys.getrecursionlimit

Here are the examples of the python api sys.getrecursionlimit taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

71 Examples 7

Example 51

Project: PipelineConstructionSet Source File: arguments.py
Function: expand_args
def expandArgs( *args, **kwargs ) :
    """
    'Flattens' the arguments list: recursively replaces any iterable argument in *args by a tuple of its
    elements that will be inserted at its place in the returned arguments.

    By default will return elements depth first, from root to leaves.  Set postorder or breadth to control order.

    :Keywords:
        depth : int
            will specify the nested depth limit after which iterables are returned as they are

        type
            for type='list' will only expand lists, by default type='all' expands any iterable sequence

        postorder : bool
             will return elements depth first, from leaves to roots

        breadth : bool
            will return elements breadth first, roots, then first depth level, etc.

    For a nested list represent trees::

        a____b____c
        |    |____d
        e____f
        |____g

    preorder(default) :

        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], limit=1 )
        ('a', 'b', ['c', 'd'], 'e', 'f', 'g')
        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'] )
        ('a', 'b', 'c', 'd', 'e', 'f', 'g')

    postorder :

        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], postorder=True, limit=1)
        ('b', ['c', 'd'], 'a', 'f', 'g', 'e')
        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], postorder=True)
        ('c', 'd', 'b', 'a', 'f', 'g', 'e')

    breadth :

        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], limit=1, breadth=True)
        ('a', 'e', 'b', ['c', 'd'], 'f', 'g')
        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], breadth=True)
        ('a', 'e', 'b', 'f', 'g', 'c', 'd')


    Note that with default depth (unlimited) and order (preorder), if passed a pymel Tree
    result will be the equivalent of doing a preorder traversal : [k for k in iter(theTree)] """

    tpe = kwargs.get('type', 'all')
    limit = kwargs.get('limit', sys.getrecursionlimit())
    postorder = kwargs.get('postorder', False)
    breadth = kwargs.get('breadth', False)
    if tpe=='list' or tpe==list :
        def _expandArgsTest(arg): return type(arg)==list
    elif tpe=='all' :
        def _expandArgsTest(arg): return isIterable(arg)
    else :
        raise ValueError, "unknown expand type=%s" % str(tpe)

    if postorder :
        return postorderArgs (limit, _expandArgsTest, *args)
    elif breadth :
        return breadthArgs (limit, _expandArgsTest, *args)
    else :
        return preorderArgs (limit, _expandArgsTest, *args)

Example 52

Project: PipelineConstructionSet Source File: arguments.py
Function: iterateargs
def iterateArgs( *args, **kwargs ) :
    """ Iterates through all arguments list: recursively replaces any iterable argument in *args by a tuple of its
    elements that will be inserted at its place in the returned arguments.

    By default will return elements depth first, from root to leaves.  Set postorder or breadth to control order.

    :Keywords:
        depth : int
            will specify the nested depth limit after which iterables are returned as they are

        type
            for type='list' will only expand lists, by default type='all' expands any iterable sequence

        postorder : bool
             will return elements depth first, from leaves to roots

        breadth : bool
            will return elements breadth first, roots, then first depth level, etc.

    For a nested list represent trees::

        a____b____c
        |    |____d
        e____f
        |____g

    preorder(default) :

        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], limit=1 ))
        ('a', 'b', ['c', 'd'], 'e', 'f', 'g')
        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'] ))
        ('a', 'b', 'c', 'd', 'e', 'f', 'g')

    postorder :

        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], postorder=True, limit=1 ))
        ('b', ['c', 'd'], 'a', 'f', 'g', 'e')
        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], postorder=True))
        ('c', 'd', 'b', 'a', 'f', 'g', 'e')

    breadth :

        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], limit=1, breadth=True))
        ('a', 'e', 'b', ['c', 'd'], 'f', 'g')
        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], breadth=True))
        ('a', 'e', 'b', 'f', 'g', 'c', 'd')

    Note that with default depth (-1 for unlimited) and order (preorder), if passed a pymel Tree
    result will be the equivalent of using a preorder iterator : iter(theTree) """

    tpe = kwargs.get('type', 'all')
    limit = kwargs.get('limit', sys.getrecursionlimit())
    postorder = kwargs.get('postorder', False)
    breadth = kwargs.get('breadth', False)
    if tpe=='list' or tpe==list :
        def _iterateArgsTest(arg): return type(arg)==list
    elif tpe=='all' :
        def _iterateArgsTest(arg): return isIterable(arg)
    else :
        raise ValueError, "unknown expand type=%s" % str(tpe)

    if postorder :
        for arg in postorderIterArgs (limit, _iterateArgsTest, *args) :
            yield arg
    elif breadth :
        for arg in breadthIterArgs (limit, _iterateArgsTest, *args) :
            yield arg
    else :
        for arg in preorderIterArgs (limit, _iterateArgsTest, *args) :
            yield arg

Example 53

Project: astrodendro Source File: test_recursion.py
Function: setup_method
    def setup_method(self, method):
        self._oldlimit = sys.getrecursionlimit()
        sys.setrecursionlimit(100)  # Reduce recursion limit dramatically (default is 1000)
        size = 10000  # number of leaves desired in the dendrogram
        self._make_data(size)

Example 54

Project: hebel Source File: serial.py
Function: save
def save(filepath, obj, on_overwrite = 'ignore'):
    """
    Serialize `object` to a file denoted by `filepath`.

    Parameters
    ----------
    filepath : str
        A filename. If the suffix is `.joblib` and joblib can be
        imported, `joblib.dump` is used in place of the regular
        pickling mechanisms; this results in much faster saves by
        saving arrays as separate .npy files on disk. If the file
        suffix is `.npy` than `numpy.save` is attempted on `obj`.
        Otherwise, (c)pickle is used.

    obj : object
        A Python object to be serialized.

    on_overwrite: A string specifying what to do if the file already
                exists.
                ignore: just overwrite it
                backup: make a copy of the file (<filepath>.bak) and
                        delete it when done saving the new copy.
                        this allows recovery of the old version of
                        the file if saving the new one fails
    """


    filepath = preprocess(filepath)

    if os.path.exists(filepath):
        if on_overwrite == 'backup':
            backup = filepath + '.bak'
            shutil.move(filepath, backup)
            save(filepath, obj)
            try:
                os.remove(backup)
            except Exception, e:
                warnings.warn("Got an error while traing to remove "+backup+":"+str(e))
            return
        else:
            assert on_overwrite == 'ignore'


    try:
        _save(filepath, obj)
    except RuntimeError, e:
        """ Sometimes for large theano graphs, pickle/cPickle exceed the
            maximum recursion depth. This seems to me like a fundamental
            design flaw in pickle/cPickle. The workaround I employ here
            is the one recommended to someone who had a similar problem
            on stackexchange:

            http://stackoverflow.com/questions/2134706/hitting-maximum-recursion-depth-using-pythons-pickle-cpickle

            Obviously this does not scale and could cause a crash
            but I don't see another solution short of writing our
            own implementation of pickle.
        """
        if str(e).find('recursion') != -1:
            warnings.warn('pylearn2.utils.save encountered the following '
                          'error: ' + str(e) +
                          '\nAttempting to resolve this error by calling ' +
                          'sys.setrecusionlimit and retrying')
            old_limit = sys.getrecursionlimit()
            try:
                sys.setrecursionlimit(50000)
                _save(filepath, obj)
            finally:
                sys.setrecursionlimit(old_limit)

Example 55

Project: pdfmasher Source File: input.py
Function: traverse
def traverse(path_to_html_file, max_levels=sys.maxsize, verbose=0, encoding=None):
    '''
    Recursively traverse all links in the HTML file.

    :param max_levels: Maximum levels of recursion. Must be non-negative. 0
                       implies that no links in the root HTML file are followed.
    :param encoding:   Specify character encoding of HTML files. If `None` it is
                       auto-detected.
    :return:           A pair of lists (breadth_first, depth_first). Each list contains
                       :class:`HTMLFile` objects.
    '''
    assert max_levels >= 0
    level = 0
    flat =  [HTMLFile(path_to_html_file, level, encoding, verbose)]
    next_level = list(flat)
    while level < max_levels and len(next_level) > 0:
        level += 1
        nl = []
        for hf in next_level:
            rejects = []
            for link in hf.links:
                if link.path is None or link.path in flat:
                    continue
                try:
                    nf = HTMLFile(link.path, level, encoding, verbose, referrer=hf)
                    if nf.is_binary:
                        raise IgnoreFile('%s is a binary file'%nf.path, -1)
                    nl.append(nf)
                    flat.append(nf)
                except IgnoreFile as err:
                    rejects.append(link)
                    if not err.doesnt_exist or verbose > 1:
                        print(repr(err))
            for link in rejects:
                hf.links.remove(link)

        next_level = list(nl)
    orec = sys.getrecursionlimit()
    sys.setrecursionlimit(500000)
    try:
        return flat, list(depth_first(flat[0], flat))
    finally:
        sys.setrecursionlimit(orec)

Example 56

Project: ironpython3 Source File: test_parrot.py
Function: test_main
def test_main(type="short"):
    oldRecursionDepth = sys.getrecursionlimit()
    try:
        sys.setrecursionlimit(1001)
        t0 = clock()
        import b0
        import b1
        import b2
        import b3
        import b4
        import b5
        import b6
        print 'import time = %.2f' % (clock()-t0)
    
        tests = [b0,b1,b2,b3,b4,b5,b6]
        N = { "short" : 1, "full" : 1, "medium" : 2, "long" : 4 }[type]
    
        results = {}
    
        t0 = clock()
        for i in range(N):
            for test in tests:
                ts0 = clock()
                test.main()
                tm = (clock()-ts0)
                results.setdefault(test, []).append(tm)
                print '%.2f sec running %s' % ( tm, test.__name__)
    
        for test in tests:
            print '%s = %f -- %r' % (test.__name__, sum(results[test])/N, results[test])
    
        print 'all done in %.2f sec' % (clock()-t0)
    finally:
        sys.setrecursionlimit(oldRecursionDepth)

Example 57

Project: tryalgo Source File: biconnected_components.py
def cut_nodes_edges2(graph):
    """Bi-connected components, alternative recursive implementation

    :param graph: undirected graph. in listlist format. Cannot be in listdict format.
    :assumes: graph has about 10^4 vertices at most, otherwise memory limit is reached
    :returns: a tuple with the list of cut-nodes and the list of cut-edges
    :complexity: `O(|V|+|E|)` in average, `O(|V|+|E|^2)` in worst case due to use of dictionary
    """
    N = len(graph)
    recursionlimit = getrecursionlimit()
    setrecursionlimit(max(1000, N + 42))  # 5 est la vraie constante, mais ça fait foirer les builds
    edges = set((i, j) for i in range(N) for j in graph[i] if i <= j)
    nodes = set()
    NOT = -2  # pas encore visité ; avec -1 on a un gros bug à cause de `marked[v] != prof - 1`
    FIN = -3  # déjà visité
    marked = [NOT] * N # si c'est positif ou nul, cela vaut la profondeur dans le DFS
    def DFS(n, prof=0):
        """Parcourt récursivement le graphe, met à jour la liste des arêtes et renvoie
        le premier sommet dans l'ordre de parcours auquel on peut revenir."""
        if marked[n] == FIN:  # seulement lorsqu'il y a plusieurs composantes connexes
            return
        if marked[n] != NOT:
            return marked[n]
        marked[n] = prof
        m = float('inf')
        count = 0  # utile seulement pour prof==0
        for v in graph[n]:
            if marked[v] != FIN and marked[v] != prof - 1:
                count += 1
                r = DFS(v, prof+1)
                if r <= prof:
                    edges.discard(tuple(sorted((n, v))))
                if prof and r >= prof:  # seulement si on n'est pas dans la racine
                    nodes.add(n)
                m = min(m, r)
        if prof == 0 and count >= 2:  # la racine est un point d'articulation ssi elle a plus de deux fils
            nodes.add(n)
        marked[n] = FIN
        return m
    for r in range(N):
        DFS(r)  # on pourrait compter les composantes connexes en ajoutant 1 si c'est différent de None
    setrecursionlimit(recursionlimit)
    return nodes, edges

Example 58

Project: calibre Source File: input.py
Function: traverse
def traverse(path_to_html_file, max_levels=sys.maxint, verbose=0, encoding=None):
    '''
    Recursively traverse all links in the HTML file.

    :param max_levels: Maximum levels of recursion. Must be non-negative. 0
                       implies that no links in the root HTML file are followed.
    :param encoding:   Specify character encoding of HTML files. If `None` it is
                       auto-detected.
    :return:           A pair of lists (breadth_first, depth_first). Each list contains
                       :class:`HTMLFile` objects.
    '''
    assert max_levels >= 0
    level = 0
    flat =  [HTMLFile(path_to_html_file, level, encoding, verbose)]
    next_level = list(flat)
    while level < max_levels and len(next_level) > 0:
        level += 1
        nl = []
        for hf in next_level:
            rejects = []
            for link in hf.links:
                if link.path is None or link.path in flat:
                    continue
                try:
                    nf = HTMLFile(link.path, level, encoding, verbose, referrer=hf)
                    if nf.is_binary:
                        raise IgnoreFile('%s is a binary file'%nf.path, -1)
                    nl.append(nf)
                    flat.append(nf)
                except IgnoreFile as err:
                    rejects.append(link)
                    if not err.doesnt_exist or verbose > 1:
                        print repr(err)
            for link in rejects:
                hf.links.remove(link)

        next_level = list(nl)
    orec = sys.getrecursionlimit()
    sys.setrecursionlimit(500000)
    try:
        return flat, list(depth_first(flat[0], flat))
    finally:
        sys.setrecursionlimit(orec)

Example 59

Project: pylearn2 Source File: serial.py
def save(filepath, obj, on_overwrite='ignore'):
    """
    Serialize `object` to a file denoted by `filepath`.

    Parameters
    ----------
    filepath : str
        A filename. If the suffix is `.joblib` and joblib can be
        imported, `joblib.dump` is used in place of the regular
        pickling mechanisms; this results in much faster saves by
        saving arrays as separate .npy files on disk. If the file
        suffix is `.npy` than `numpy.save` is attempted on `obj`.
        Otherwise, (c)pickle is used.

    obj : object
        A Python object to be serialized.

    on_overwrite : str, optional
        A string specifying what to do if the file already exists.
        Possible values include:

        - "ignore" : Just overwrite the existing file.
        - "backup" : Make a backup copy of the file (<filepath>.bak).
          Save the new copy. Then delete the backup copy. This allows
          recovery of the old version of the file if saving the new one
          fails.
    """
    filepath = preprocess(filepath)

    if os.path.exists(filepath):
        if on_overwrite == 'backup':
            backup = filepath + '.bak'
            shutil.move(filepath, backup)
            save(filepath, obj)
            try:
                os.remove(backup)
            except Exception as e:
                warnings.warn("Got an error while trying to remove " + backup
                              + ":" + str(e))
            return
        else:
            assert on_overwrite == 'ignore'

    try:
        _save(filepath, obj)
    except RuntimeError as e:
        """ Sometimes for large theano graphs, pickle/cPickle exceed the
            maximum recursion depth. This seems to me like a fundamental
            design flaw in pickle/cPickle. The workaround I employ here
            is the one recommended to someone who had a similar problem
            on stackexchange:

            http://stackoverflow.com/questions/2134706/hitting-maximum-recursion-depth-using-pythons-pickle-cpickle

            Obviously this does not scale and could cause a crash
            but I don't see another solution short of writing our
            own implementation of pickle.
        """
        if str(e).find('recursion') != -1:
            logger.warning('pylearn2.utils.save encountered the following '
                           'error: ' + str(e) +
                           '\nAttempting to resolve this error by calling ' +
                           'sys.setrecusionlimit and retrying')
            old_limit = sys.getrecursionlimit()
            try:
                sys.setrecursionlimit(50000)
                _save(filepath, obj)
            finally:
                sys.setrecursionlimit(old_limit)

Example 60

Project: pymel Source File: arguments.py
Function: expand_args
def expandArgs(*args, **kwargs):
    """
    'Flattens' the arguments list: recursively replaces any iterable argument in *args by a tuple of its
    elements that will be inserted at its place in the returned arguments.

    By default will return elements depth first, from root to leaves.  Set postorder or breadth to control order.

    :Keywords:
        depth : int
            will specify the nested depth limit after which iterables are returned as they are

        type
            for type='list' will only expand lists, by default type='all' expands any iterable sequence

        postorder : bool
             will return elements depth first, from leaves to roots

        breadth : bool
            will return elements breadth first, roots, then first depth level, etc.

    For a nested list represent trees::

        a____b____c
        |    |____d
        e____f
        |____g

    preorder(default) :

        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], limit=1 )
        ('a', 'b', ['c', 'd'], 'e', 'f', 'g')
        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'] )
        ('a', 'b', 'c', 'd', 'e', 'f', 'g')

    postorder :

        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], postorder=True, limit=1)
        ('b', ['c', 'd'], 'a', 'f', 'g', 'e')
        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], postorder=True)
        ('c', 'd', 'b', 'a', 'f', 'g', 'e')

    breadth :

        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], limit=1, breadth=True)
        ('a', 'e', 'b', ['c', 'd'], 'f', 'g')
        >>> expandArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], breadth=True)
        ('a', 'e', 'b', 'f', 'g', 'c', 'd')


    Note that with default depth (unlimited) and order (preorder), if passed a pymel Tree
    result will be the equivalent of doing a preorder traversal : [k for k in iter(theTree)] """

    tpe = kwargs.get('type', 'all')
    limit = kwargs.get('limit', sys.getrecursionlimit())
    postorder = kwargs.get('postorder', False)
    breadth = kwargs.get('breadth', False)
    if tpe == 'list' or tpe == list:
        def _expandArgsTest(arg):
            return type(arg) == list
    elif tpe == 'all':
        def _expandArgsTest(arg):
            return isIterable(arg)
    else:
        raise ValueError, "unknown expand type=%s" % str(tpe)

    if postorder:
        return postorderArgs(limit, _expandArgsTest, *args)
    elif breadth:
        return breadthArgs(limit, _expandArgsTest, *args)
    else:
        return preorderArgs(limit, _expandArgsTest, *args)

Example 61

Project: pymel Source File: arguments.py
Function: iterateargs
def iterateArgs(*args, **kwargs):
    """ Iterates through all arguments list: recursively replaces any iterable argument in *args by a tuple of its
    elements that will be inserted at its place in the returned arguments.

    By default will return elements depth first, from root to leaves.  Set postorder or breadth to control order.

    :Keywords:
        depth : int
            will specify the nested depth limit after which iterables are returned as they are

        type
            for type='list' will only expand lists, by default type='all' expands any iterable sequence

        postorder : bool
             will return elements depth first, from leaves to roots

        breadth : bool
            will return elements breadth first, roots, then first depth level, etc.

    For a nested list represent trees::

        a____b____c
        |    |____d
        e____f
        |____g

    preorder(default) :

        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], limit=1 ))
        ('a', 'b', ['c', 'd'], 'e', 'f', 'g')
        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'] ))
        ('a', 'b', 'c', 'd', 'e', 'f', 'g')

    postorder :

        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], postorder=True, limit=1 ))
        ('b', ['c', 'd'], 'a', 'f', 'g', 'e')
        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], postorder=True))
        ('c', 'd', 'b', 'a', 'f', 'g', 'e')

    breadth :

        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], limit=1, breadth=True))
        ('a', 'e', 'b', ['c', 'd'], 'f', 'g')
        >>> tuple(k for k in iterateArgs( 'a', ['b', ['c', 'd']], 'e', ['f', 'g'], breadth=True))
        ('a', 'e', 'b', 'f', 'g', 'c', 'd')

    Note that with default depth (-1 for unlimited) and order (preorder), if passed a pymel Tree
    result will be the equivalent of using a preorder iterator : iter(theTree) """

    tpe = kwargs.get('type', 'all')
    limit = kwargs.get('limit', sys.getrecursionlimit())
    postorder = kwargs.get('postorder', False)
    breadth = kwargs.get('breadth', False)
    if tpe == 'list' or tpe == list:
        def _iterateArgsTest(arg):
            return type(arg) == list
    elif tpe == 'all':
        def _iterateArgsTest(arg):
            return isIterable(arg)
    else:
        raise ValueError, "unknown expand type=%s" % str(tpe)

    if postorder:
        for arg in postorderIterArgs(limit, _iterateArgsTest, *args):
            yield arg
    elif breadth:
        for arg in breadthIterArgs(limit, _iterateArgsTest, *args):
            yield arg
    else:
        for arg in preorderIterArgs(limit, _iterateArgsTest, *args):
            yield arg

Example 62

Project: visual_turing_test-tutorial Source File: read_write.py
Function: pickle_model
def pickle_model(
        path, 
        model, 
        word2index_x,
        word2index_y,
        index2word_x,
        index2word_y):
    import sys
    import cPickle as pickle
    modifier=10
    tmp = sys.getrecursionlimit()
    sys.setrecursionlimit(tmp*modifier)
    with open(path, 'wb') as f:
        p_dict = {'model':model,
                'word2index_x':word2index_x,
                'word2index_y':word2index_y,
                'index2word_x':index2word_x,
                'index2word_y':index2word_y}
        pickle.dump(p_dict, f, protocol=2)
    sys.setrecursionlimit(tmp)

Example 63

Project: rez Source File: accessibility.py
def accessibility(graph):
    """
    Accessibility matrix (transitive closure).

    @type  graph: graph, digraph, hypergraph
    @param graph: Graph.

    @rtype:  dictionary
    @return: Accessibility information for each node.
    """
    recursionlimit = getrecursionlimit()
    setrecursionlimit(max(len(graph.nodes())*2,recursionlimit))
    
    accessibility = {}        # Accessibility matrix

    # For each node i, mark each node j if that exists a path from i to j.
    for each in graph:
        access = {}
        # Perform DFS to explore all reachable nodes
        _dfs(graph, access, 1, each)
        accessibility[each] = list(access.keys())
    
    setrecursionlimit(recursionlimit)
    return accessibility

Example 64

Project: rez Source File: accessibility.py
def mutual_accessibility(graph):
    """
    Mutual-accessibility matrix (strongly connected components).

    @type  graph: graph, digraph
    @param graph: Graph.

    @rtype:  dictionary
    @return: Mutual-accessibility information for each node.
    """
    recursionlimit = getrecursionlimit()
    setrecursionlimit(max(len(graph.nodes())*2,recursionlimit))
    
    mutual_access = {}
    stack = []
    low = {}
        
    def visit(node):
        if node in low:
            return

        num = len(low)
        low[node] = num
        stack_pos = len(stack)
        stack.append(node)
        
        for successor in graph.neighbors(node):
            visit(successor)
            low[node] = min(low[node], low[successor])
        
        if num == low[node]:
            component = stack[stack_pos:]
            del stack[stack_pos:]
            component.sort()
            for each in component:
                mutual_access[each] = component

            for item in component:
                low[item] = len(graph)
    
    for node in graph:
        visit(node)
    
    setrecursionlimit(recursionlimit)
    return mutual_access

Example 65

Project: rez Source File: accessibility.py
def connected_components(graph):
    """
    Connected components.

    @type  graph: graph, hypergraph
    @param graph: Graph.

    @rtype:  dictionary
    @return: Pairing that associates each node to its connected component.
    """
    recursionlimit = getrecursionlimit()
    setrecursionlimit(max(len(graph.nodes())*2,recursionlimit))
    
    visited = {}
    count = 1

    # For 'each' node not found to belong to a connected component, find its connected
    # component.
    for each in graph:
        if (each not in visited):
            _dfs(graph, visited, count, each)
            count = count + 1
    
    setrecursionlimit(recursionlimit)
    return visited

Example 66

Project: rez Source File: accessibility.py
def cut_edges(graph):
    """
    Return the cut-edges of the given graph.
    
    A cut edge, or bridge, is an edge of a graph whose removal increases the number of connected
    components in the graph.
    
    @type  graph: graph, hypergraph
    @param graph: Graph.
    
    @rtype:  list
    @return: List of cut-edges.
    """
    recursionlimit = getrecursionlimit()
    setrecursionlimit(max(len(graph.nodes())*2,recursionlimit))

    # Dispatch if we have a hypergraph
    if 'hypergraph' == graph.__class__.__name__:
        return _cut_hyperedges(graph)

    pre = {}    # Pre-ordering
    low = {}    # Lowest pre[] reachable from this node going down the spanning tree + one backedge
    spanning_tree = {}
    reply = []
    pre[None] = 0

    for each in graph:
        if (each not in pre):
            spanning_tree[each] = None
            _cut_dfs(graph, spanning_tree, pre, low, reply, each)
    
    setrecursionlimit(recursionlimit)
    return reply

Example 67

Project: rez Source File: accessibility.py
def cut_nodes(graph):
    """
    Return the cut-nodes of the given graph.
    
    A cut node, or articulation point, is a node of a graph whose removal increases the number of
    connected components in the graph.
    
    @type  graph: graph, hypergraph
    @param graph: Graph.
        
    @rtype:  list
    @return: List of cut-nodes.
    """
    recursionlimit = getrecursionlimit()
    setrecursionlimit(max(len(graph.nodes())*2,recursionlimit))
    
    # Dispatch if we have a hypergraph
    if 'hypergraph' == graph.__class__.__name__:
        return _cut_hypernodes(graph)
        
    pre = {}    # Pre-ordering
    low = {}    # Lowest pre[] reachable from this node going down the spanning tree + one backedge
    reply = {}
    spanning_tree = {}
    pre[None] = 0
    
    # Create spanning trees, calculate pre[], low[]
    for each in graph:
        if (each not in pre):
            spanning_tree[each] = None
            _cut_dfs(graph, spanning_tree, pre, low, [], each)

    # Find cuts
    for each in graph:
        # If node is not a root
        if (spanning_tree[each] is not None):
            for other in graph[each]:
                # If there is no back-edge from descendent to a ancestral of each
                if (low[other] >= pre[each] and spanning_tree[other] == each):
                    reply[each] = 1
        # If node is a root
        else:
            children = 0
            for other in graph:
                if (spanning_tree[other] == each):
                    children = children + 1
            # root is cut-vertex iff it has two or more children
            if (children >= 2):
                reply[each] = 1

    setrecursionlimit(recursionlimit)
    return list(reply.keys())

Example 68

Project: rez Source File: cycles.py
def find_cycle(graph):
    """
    Find a cycle in the given graph.
    
    This function will return a list of nodes which form a cycle in the graph or an empty list if
    no cycle exists.
    
    @type graph: graph, digraph
    @param graph: Graph.
    
    @rtype: list
    @return: List of nodes. 
    """
    
    if (isinstance(graph, graph_class)):
        directed = False
    elif (isinstance(graph, digraph_class)):
        directed = True
    else:
        raise InvalidGraphType

    def find_cycle_to_ancestor(node, ancestor):
        """
        Find a cycle containing both node and ancestor.
        """
        path = []
        while (node != ancestor):
            if (node is None):
                return []
            path.append(node)
            node = spanning_tree[node]
        path.append(node)
        path.reverse()
        return path
    
    def dfs(node):
        """
        Depth-first search subfunction.
        """
        visited[node] = 1
        # Explore recursively the connected component
        for each in graph[node]:
            if (cycle):
                return
            if (each not in visited):
                spanning_tree[each] = node
                dfs(each)
            else:
                if (directed or spanning_tree[node] != each):
                    cycle.extend(find_cycle_to_ancestor(node, each))

    recursionlimit = getrecursionlimit()
    setrecursionlimit(max(len(graph.nodes())*2,recursionlimit))

    visited = {}              # List for marking visited and non-visited nodes
    spanning_tree = {}        # Spanning tree
    cycle = []

    # Algorithm outer-loop
    for each in graph:
        # Select a non-visited node
        if (each not in visited):
            spanning_tree[each] = None
            # Explore node's connected component
            dfs(each)
            if (cycle):
                setrecursionlimit(recursionlimit)
                return cycle

    setrecursionlimit(recursionlimit)
    return []

Example 69

Project: rez Source File: searching.py
def depth_first_search(graph, root=None, filter=null()):
    """
    Depth-first search.

    @type  graph: graph, digraph
    @param graph: Graph.
    
    @type  root: node
    @param root: Optional root node (will explore only root's connected component)

    @rtype:  tuple
    @return: A tupple containing a dictionary and two lists:
        1. Generated spanning tree
        2. Graph's preordering
        3. Graph's postordering
    """
    
    recursionlimit = getrecursionlimit()
    setrecursionlimit(max(len(graph.nodes())*2,recursionlimit))

    def dfs(node):
        """
        Depth-first search subfunction.
        """
        visited[node] = 1
        pre.append(node)
        # Explore recursively the connected component
        for each in graph[node]:
            if (each not in visited and filter(each, node)):
                spanning_tree[each] = node
                dfs(each)
        post.append(node)

    visited = {}            # List for marking visited and non-visited nodes
    spanning_tree = {}      # Spanning tree
    pre = []                # Graph's preordering
    post = []               # Graph's postordering
    filter.configure(graph, spanning_tree)

    # DFS from one node only
    if (root is not None):
        if filter(root, None):
            spanning_tree[root] = None
            dfs(root)
        setrecursionlimit(recursionlimit)
        return spanning_tree, pre, post
    
    # Algorithm loop
    for each in graph:
        # Select a non-visited node
        if (each not in visited and filter(each, None)):
            spanning_tree[each] = None
            # Explore node's connected component
            dfs(each)

    setrecursionlimit(recursionlimit)
    
    return (spanning_tree, pre, post)

Example 70

Project: sverchok Source File: csg_boolean.py
def Boolean(VA, PA, VB, PB, operation):
    if not all([VA, PA, VB, PB]):
        return False, False

    a = CSG.Obj_from_pydata(VA, PA)
    b = CSG.Obj_from_pydata(VB, PB)

    faces = []
    vertices = []

    recursionlimit = sys.getrecursionlimit()
    sys.setrecursionlimit(10000)
    try:
        if operation == 'DIFF':
            polygons = a.subtract(b).toPolygons()
        elif operation == 'JOIN':
            polygons = a.union(b).toPolygons()
        elif operation == 'ITX':
            polygons = a.intersect(b).toPolygons()
    except RuntimeError as e:
        raise RuntimeError(e)

    sys.setrecursionlimit(recursionlimit)

    for polygon in polygons:
        indices = []
        for v in polygon.vertices:
            pos = [v.pos.x, v.pos.y, v.pos.z]
            if not pos in vertices:
                vertices.append(pos)
            index = vertices.index(pos)
            indices.append(index)

        faces.append(indices)

    return [vertices], [faces]

Example 71

Project: pyxb Source File: archive.py
Function: write_namespaces
    def writeNamespaces (self, output):
        """Store the namespaces into the archive.

        @param output: An instance substitutable for a writable file, or the
        name of a file to write to.
        """
        import sys

        assert NamespaceArchive.__PicklingArchive is None
        NamespaceArchive.__PicklingArchive = self
        assert self.__moduleRecords is not None

        # Recalculate the record/object associations: we didn't assign
        # anonymous names to the indeterminate scope objects because they
        # weren't needed for bindings, but they are needed in the archive.
        for mr in self.__moduleRecords:
            mr.namespace()._associateOrigins(mr)

        try:
            # See http://bugs.python.org/issue3338
            recursion_limit = sys.getrecursionlimit()
            sys.setrecursionlimit(10 * recursion_limit)

            pickler = self.__createPickler(output)

            assert isinstance(self.__moduleRecords, set)
            pickler.dump(self.__moduleRecords)

            for mr in self.__moduleRecords:
                pickler.dump(mr.namespace())
                pickler.dump(mr.categoryObjects())
        finally:
            sys.setrecursionlimit(recursion_limit)
        NamespaceArchive.__PicklingArchive = None
See More Examples - Go to Next Page
Page 1 Page 2 Selected