json.dumps

Here are the examples of the python api json.dumps taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

175 Examples 7

Example 51

Project: framework Source File: test_scheduledtask.py
    def test_scrubbing(self):
        """
        Validates the scrubbing workflow
        * Scenario 1: Validate disabled scrub task and single vDisk scrub logic
        * Scenario 2: 1 vPool, 10 vDisks, 1 scrub role
                      Scrubbing fails for 5 vDisks, check if scrubbing completed for all other vDisks
                      Run scrubbing a 2nd time and verify scrubbing now works for failed vDisks
        * Scenario 3: 1 vPool, 10 vDisks, 5 scrub roles
                      Check if vDisks are divided among all threads
        * Scenario 4: 3 vPools, 9 vDisks, 5 scrub roles
                      Validate 6 threads will be spawned and used out of a potential of 15 (5 scrub roles * 3 vPools)
                      We limit max amount of threads spawned per vPool to 2 in case 3 to 5 vPools are present
        """
        _ = self
        for i in xrange(1, 6):
            Configuration.set('/ovs/framework/hosts/{0}/ports'.format(i), {'storagedriver': [10000, 10100]})

        ##############
        # Scenario 1 #
        ##############
        structure = Helper.build_service_structure(
            {'vpools': [1],
             'vdisks': [(1, 1, 1, 1)],  # (<id>, <storagedriver_id>, <vpool_id>, <mds_service_id>)
             'mds_services': [(1, 1)],  # (<id>, <storagedriver_id>)
             'storagerouters': [1],
             'storagedrivers': [(1, 1, 1)]}  # (<id>, <vpool_id>, <storagerouter_id>)
        )
        vdisk = structure['vdisks'][1]
        vpool = structure['vpools'][1]
        storagerouter = structure['storagerouters'][1]
        System._machine_id = {storagerouter.ip: '1'}
        Configuration.set('/ovs/vpools/{0}/proxies/scrub/generic_scrub'.format(vpool.guid), json.dumps({}, indent=4), raw=True)
        LockedClient.scrub_controller = {'possible_threads': None,
                                         'volumes': {},
                                         'waiter': Waiter(1)}
        LockedClient.scrub_controller['volumes'][vdisk.volume_id] = {'success': False,
                                                                     'scrub_work': [0]}
        with self.assertRaises(Exception) as raise_info:
            VDiskController.scrub_single_vdisk(vdisk.guid, storagerouter.guid)
        self.assertIn(vdisk.name, raise_info.exception.message)
        LockedClient.scrub_controller['volumes'][vdisk.volume_id] = {'success': True,
                                                                     'scrub_work': [0]}
        VDiskController.scrub_single_vdisk(vdisk.guid, storagerouter.guid)
        with vdisk.storagedriver_client.make_locked_client(vdisk.volume_id) as locked_client:
            self.assertEqual(first=len(locked_client.get_scrubbing_workunits()),
                             second=0,
                             msg='Scrubbed vDisk {0} does not have the expected amount of scrubbing items: {1}'.format(vdisk.name, 0))

        ##############
        # Scenario 2 #
        ##############
        self.volatile.clean()
        self.persistent.clean()
        structure = Helper.build_service_structure(
            {'vpools': [1],
             'vdisks': [(1, 1, 1, 1), (2, 1, 1, 1), (3, 1, 1, 1), (4, 1, 1, 1), (5, 1, 1, 1),
                        (6, 1, 1, 1), (7, 1, 1, 1), (8, 1, 1, 1), (9, 1, 1, 1), (10, 1, 1, 1)],  # (<id>, <storagedriver_id>, <vpool_id>, <mds_service_id>)
             'mds_services': [(1, 1)],  # (<id>, <storagedriver_id>)
             'storagerouters': [1],
             'storagedrivers': [(1, 1, 1)]}  # (<id>, <vpool_id>, <storagerouter_id>)
        )
        vpool = structure['vpools'][1]
        vdisks = structure['vdisks']
        storagerouter = structure['storagerouters'][1]
        System._machine_id = {storagerouter.ip: '1'}
        Configuration.set('/ovs/vpools/{0}/proxies/scrub/generic_scrub'.format(vpool.guid), json.dumps({}, indent=4), raw=True)
        LockedClient.scrub_controller = {'possible_threads': ['scrub_{0}_{1}'.format(vpool.guid, storagerouter.guid)],
                                         'volumes': {},
                                         'waiter': Waiter(1)}
        failed_vdisks = []
        successful_vdisks = []
        for vdisk_id in sorted(vdisks):
            vdisk = vdisks[vdisk_id]
            success = vdisk_id % 2 == 0
            LockedClient.scrub_controller['volumes'][vdisk.volume_id] = {'success': success,
                                                                         'scrub_work': range(vdisk_id)}
            if success is True:
                successful_vdisks.append(vdisk)
            else:
                failed_vdisks.append(vdisk)

        # Execute scrubbing a 1st time
        with self.assertRaises(Exception) as raise_info:
            ScheduledTaskController.execute_scrub()
        for vdisk in failed_vdisks:
            self.assertIn(vdisk.name, raise_info.exception.message)

        # Validate expected successful vDisks
        for vdisk in successful_vdisks:
            with vdisk.storagedriver_client.make_locked_client(vdisk.volume_id) as locked_client:
                self.assertEqual(first=len(locked_client.get_scrubbing_workunits()),
                                 second=0,
                                 msg='Scrubbed vDisk {0} does still have scrubbing work left'.format(vdisk.name))
        # Validate expected failed vDisks
        for vdisk in failed_vdisks:
            with vdisk.storagedriver_client.make_locked_client(vdisk.volume_id) as locked_client:
                self.assertEqual(first=len(locked_client.get_scrubbing_workunits()),
                                 second=int(vdisk.name),
                                 msg='Scrubbed vDisk {0} does not have the expected amount of scrubbing items: {1}'.format(vdisk.name, int(vdisk.name)))

        # Execute scrubbing again
        for vdisk_id in sorted(vdisks):
            vdisk = vdisks[vdisk_id]
            LockedClient.scrub_controller['volumes'][vdisk.volume_id]['success'] = True
        ScheduledTaskController.execute_scrub()
        for vdisk in vdisks.values():
            with vdisk.storagedriver_client.make_locked_client(vdisk.volume_id) as locked_client:
                self.assertEqual(first=len(locked_client.get_scrubbing_workunits()),
                                 second=0,
                                 msg='Scrubbed vDisk {0} does still have scrubbing work left after scrubbing a 2nd time'.format(vdisk.name))

        ##############
        # Scenario 3 #
        ##############
        self.volatile.clean()
        self.persistent.clean()
        structure = Helper.build_service_structure(
            {'vpools': [1],
             'vdisks': [(1, 1, 1, 1), (2, 1, 1, 1), (3, 1, 1, 1), (4, 1, 1, 1), (5, 1, 1, 1),
                        (6, 1, 1, 1), (7, 1, 1, 1), (8, 1, 1, 1), (9, 1, 1, 1), (10, 1, 1, 1)],  # (<id>, <storagedriver_id>, <vpool_id>, <mds_service_id>)
             'mds_services': [(1, 1)],  # (<id>, <storagedriver_id>)
             'storagerouters': [1, 2, 3, 4, 5],
             'storagedrivers': [(1, 1, 1)]}  # (<id>, <vpool_id>, <storagerouter_id>)
        )
        vpool = structure['vpools'][1]
        vdisks = structure['vdisks']
        storagerouters = structure['storagerouters']
        System._machine_id = dict((sr.ip, sr.machine_id) for sr in storagerouters.values())
        Configuration.set('/ovs/vpools/{0}/proxies/scrub/generic_scrub'.format(vpool.guid), json.dumps({}, indent=4), raw=True)

        thread_names = ['scrub_{0}_{1}'.format(vpool.guid, storagerouter.guid) for storagerouter in storagerouters.values()]
        LockedClient.scrub_controller = {'possible_threads': thread_names,
                                         'volumes': {},
                                         'waiter': Waiter(len(thread_names))}
        LockedClient.thread_names = thread_names[:]
        for vdisk_id in sorted(vdisks):
            vdisk = vdisks[vdisk_id]
            LockedClient.scrub_controller['volumes'][vdisk.volume_id] = {'success': True,
                                                                         'scrub_work': range(vdisk_id)}
        ScheduledTaskController.execute_scrub()
        self.assertEqual(first=len(LockedClient.thread_names),
                         second=0,
                         msg='Not all threads have been used in the process')

        ##############
        # Scenario 4 #
        ##############
        self.volatile.clean()
        self.persistent.clean()
        structure = Helper.build_service_structure(
            {'vpools': [1, 2, 3],
             'vdisks': [(1, 1, 1, 1), (2, 1, 1, 1), (3, 1, 1, 1), (4, 2, 2, 2), (5, 2, 2, 2),
                        (6, 2, 2, 2), (7, 3, 3, 3), (8, 3, 3, 3), (9, 3, 3, 3)],  # (<id>, <storagedriver_id>, <vpool_id>, <mds_service_id>)
             'mds_services': [(1, 1), (2, 2), (3, 3)],  # (<id>, <storagedriver_id>)
             'storagerouters': [1, 2, 3, 4, 5],
             'storagedrivers': [(1, 1, 1), (2, 2, 1), (3, 3, 1)]}  # (<id>, <vpool_id>, <storagerouter_id>)
        )
        vpools = structure['vpools']
        vdisks = structure['vdisks']
        storagerouters = structure['storagerouters']

        thread_names = []
        for vpool in vpools.values():
            Configuration.set('/ovs/vpools/{0}/proxies/scrub/generic_scrub'.format(vpool.guid), json.dumps({}, indent=4), raw=True)
            for storagerouter in storagerouters.values():
                thread_names.append('scrub_{0}_{1}'.format(vpool.guid, storagerouter.guid))
        LockedClient.scrub_controller = {'possible_threads': thread_names,
                                         'volumes': {},
                                         'waiter': Waiter(len(thread_names) - 9)}
        LockedClient.thread_names = thread_names[:]
        for vdisk_id in sorted(vdisks):
            vdisk = vdisks[vdisk_id]
            LockedClient.scrub_controller['volumes'][vdisk.volume_id] = {'success': True,
                                                                         'scrub_work': range(vdisk_id)}
        ScheduledTaskController.execute_scrub()
        self.assertEqual(first=len(LockedClient.thread_names),
                         second=9,  # 5 srs * 3 vps = 15 threads, but only 2 will be spawned per vPool --> 15 - 6 = 9 left
                         msg='Not all threads have been used in the process')

        # 3 vPools will cause the scrubber to only launch 2 threads per vPool --> 1 possible thread should be unused per vPool
        for vpool in vpools.values():
            threads_left = [thread_name for thread_name in LockedClient.thread_names if vpool.guid in thread_name]
            self.assertEqual(first=len(threads_left),
                             second=3,
                             msg='Unexpected amount of threads left for vPool {0}'.format(vpool.name))

Example 52

Project: openstates Source File: bills.py
    def scrape(self, session, chambers):
        #get member id matching for vote parsing
        member_ids = self.get_member_ids()[session]
        per_page = 10 #seems like it gives me 10 no matter what.
        start_record = 0

        headers = {"Content-Type":"application/json"}
        url = "http://lims.dccouncil.us/_layouts/15/uploader/AdminProxy.aspx/GetPublicAdvancedSearch"
        bill_url = "http://lims.dccouncil.us/_layouts/15/uploader/AdminProxy.aspx/GetPublicData"
        params = {"request":{"sEcho":2,"iColumns":4,"sColumns":"","iDisplayStart":0,"iDisplayLength":per_page,"mDataProp_0":"ShortTitle","mDataProp_1":"Title","mDataProp_2":"LegislationCategories","mDataProp_3":"Modified","iSortCol_0":0,"sSortDir_0":"asc","iSortingCols":0,"bSortable_0":"true","bSortable_1":"true","bSortable_2":"true","bSortable_3":"true"},"criteria":{"Keyword":"","Category":"","SubCategoryId":"","RequestOf":"","CouncilPeriod":str(session),"Introducer":"","CoSponsor":"","CommitteeReferral":"","CommitteeReferralComments":"","StartDate":"","EndDate":"","QueryLimit":100,"FilterType":"","Phases":"","LegislationStatus":"0","IncludeDocuementSearch":"false"}}
        param_json = json.dumps(params)
        response = self.post(url,headers=headers,data=param_json)
        #the response is a terrible string-of-nested-json-strings. Yuck.
        response = self.decode_json(response.json()["d"])
        data = response["aaData"]
        
        global bill_versions

        while len(data) > 0:

            for bill in data:
                bill_versions = [] #sometimes they're in there more than once, so we'll keep track

                bill_id = bill["Title"]
                if bill_id.startswith("AG"):
                    #actually an agenda, skip
                    continue
                bill_params = {"legislationId":bill_id}
                bill_info = self.post(bill_url,headers=headers,data=json.dumps(bill_params))
                bill_info = self.decode_json(bill_info.json()["d"])["data"]
                bill_source_url = "http://lims.dccouncil.us/Legislation/"+bill_id


                legislation_info = bill_info["Legislation"][0]
                title = legislation_info["ShortTitle"]
                
                
                
                if bill_id.startswith("R") or bill_id.startswith("CER"):
                    bill_type = "resolution"
                else:
                    bill_type = "bill"
                
                #dc has no chambers. calling it all upper
                bill = Bill(session,"upper", bill_id, title, type=bill_type)

                #sponsors and cosponsors
                introducers = legislation_info["Introducer"]
                try:
                    #sometimes there are cosponsors, sometimes not.
                    cosponsors = legislation_info["CoSponsor"]
                except KeyError:
                    cosponsors = []
                for i in introducers:
                    sponsor_name = i["Name"]
                    #they messed up Phil Mendelson's name
                    if sponsor_name == "Phil Pmendelson":
                        sponsor_name = "Phil Mendelson"
                    bill.add_sponsor(name=sponsor_name,type="primary")
                for s in cosponsors:
                    sponsor_name = s["Name"]
                    if sponsor_name == "Phil Pmendelson":
                        sponsor_name = "Phil Mendelson"
                    bill.add_sponsor(name=sponsor_name,type="cosponsor")


                #if it's become law, add the law number as an alternate title
                if "LawNumber" in legislation_info:
                    law_num = legislation_info["LawNumber"]
                    if law_num:
                        bill.add_title(law_num)

                #also sometimes it's got an act number
                if "ActNumber" in legislation_info:
                    act_num = legislation_info["ActNumber"]
                    if act_num:
                        bill.add_title(act_num)

                #sometimes AdditionalInformation has a previous bill name
                if "AdditionalInformation" in legislation_info:
                    add_info = legislation_info["AdditionalInformation"]
                    if "previously" in add_info.lower():
                        prev_title = add_info.lower().replace("previously","").strip().replace(" ","")
                        bill.add_title(prev_title.upper())
                    elif add_info:
                        bill["additional_information"] = add_info

                if "WithDrawnDate" in legislation_info:
                    withdrawn_date = self.date_format(legislation_info["WithDrawnDate"])
                    withdrawn_by = legislation_info["WithdrawnBy"][0]["Name"].strip()
                    if withdrawn_by == "the Mayor":

                        bill.add_action("executive",
                                    "withdrawn",
                                    withdrawn_date,
                                    "bill:withdrawn")

                    elif "committee" in withdrawn_by.lower():
                        bill.add_action("upper",
                                    "withdrawn",
                                    withdrawn_date,
                                    "bill:withdrawn",
                                    committees=withdrawn_by)
                    else:
                        bill.add_action("upper",
                                    "withdrawn",
                                    withdrawn_date,
                                    "bill:withdrawn",
                                    legislators=withdrawn_by)


                #deal with actions involving the mayor
                mayor = bill_info["MayorReview"]
                if mayor != []:
                    mayor = mayor[0]

                    #in dc, mayor == governor because openstates schema
                    if "TransmittedDate" in mayor:
                        transmitted_date = self.date_format(mayor["TransmittedDate"])

                        bill.add_action("executive",
                                    "transmitted to mayor",
                                    transmitted_date,
                                    type = "governor:received")

                    if 'SignedDate' in mayor:
                        signed_date = self.date_format(mayor["SignedDate"])

                        bill.add_action("executive",
                                        "signed",
                                        signed_date,
                                        type="governor:signed")


                    elif 'ReturnedDate' in mayor: #if returned but not signed, it was vetoed
                        veto_date = self.date_format(mayor["ReturnedDate"])

                        bill.add_action("executive",
                                        "vetoed",
                                        veto_date,
                                        type="governor:vetoed")

                        if 'EnactedDate' in mayor: #if it was returned and enacted but not signed, there was a veto override
                            override_date = self.date_format(mayor["EnactedDate"])

                            bill.add_action("upper",
                                        "veto override",
                                        override_date,
                                        type="bill:veto_override:passed")

                    if 'AttachmentPath' in mayor:
                        #docuements relating to the mayor's review
                        self.add_docuements(mayor["AttachmentPath"],bill)

                congress = bill_info["CongressReview"]
                if len(congress) > 0:
                    congress = congress[0]
                    if "TransmittedDate" in congress:
                        transmitted_date = self.date_format(congress["TransmittedDate"])

                        bill.add_action("other",
                                    "Transmitted to Congress for review",
                                    transmitted_date)




                #deal with committee actions
                if "DateRead" in legislation_info:
                    date = legislation_info["DateRead"]
                elif "IntroductionDate" in legislation_info:
                    date = legislation_info["IntroductionDate"]
                else:
                    self.logger.warning("Crap, we can't find anything that looks like an action date. Skipping")
                    continue
                date = self.date_format(date)
                if "CommitteeReferral" in legislation_info:
                    committees = []
                    for committee in legislation_info["CommitteeReferral"]:
                        if committee["Name"].lower() == "retained by the council":
                            committees = []
                            break
                        else:
                            committees.append(committee["Name"])
                    if committees != []:
                        bill.add_action("upper",
                                    "referred to committee",
                                    date,
                                    committees=committees,
                                    type="committee:referred")

                if "CommitteeReferralComments" in legislation_info:
                    committees = []
                    for committee in legislation_info["CommitteeReferralComments"]:
                        committees.append(committee["Name"])
                    bill.add_action("upper",
                                    "comments from committee",
                                    date,
                                    committees=committees,
                                    type="other")

                #deal with random docs floating around
                docs = bill_info["OtherDocuements"]
                for d in docs:
                    if "AttachmentPath" in d:
                        self.add_docuements(d["AttachmentPath"],bill)
                    else:
                        self.logger.warning("Docuement path missing from 'Other Docuements'")

                if "MemoLink" in legislation_info:
                    self.add_docuements(legislation_info["MemoLink"],bill)

                if "AttachmentPath" in legislation_info:
                    self.add_docuements(legislation_info["AttachmentPath"],bill)


                #full council votes
                votes = bill_info["VotingSummary"]
                for vote in votes:
                    self.process_vote(vote, bill, member_ids)
     

                #deal with committee votes
                if "CommitteeMarkup" in bill_info:
                    committee_info = bill_info["CommitteeMarkup"]
                    if len(committee_info) > 0:
                        for committee_action in committee_info:
                            self.process_committee_vote(committee_action,bill)
                        if "AttachmentPath" in committee_info:
                            self.add_docuements(vote["AttachmentPath"],bill,is_version)

                bill.add_source(bill_source_url)
                self.save_bill(bill)
            
            #get next page
            start_record += per_page
            params["request"]["iDisplayStart"] = start_record
            param_json = json.dumps(params)
            response = self.post(url,headers=headers,data=param_json)
            response = self.decode_json(response.json()["d"])
            data = response["aaData"]

Example 53

Project: poppy Source File: certificates.py
    def create_sni_certificate(self, cert_obj, enqueue, https_upgrade):
        try:
            found, found_cert = (
                self._check_domain_already_exists_on_sni_certs(
                    cert_obj.domain_name
                )
            )
            if found is True:
                return self.responder.ssl_certificate_provisioned(None, {
                    'status': 'failed',
                    'sni_cert': None,
                    'created_at': str(datetime.datetime.now()),
                    'action': (
                        'Domain {0} already exists '
                        'on sni cert {1}.'.format(
                            cert_obj.domain_name, found_cert
                        )
                    )
                })
            if enqueue:
                self.mod_san_queue.enqueue_mod_san_request(
                    json.dumps(cert_obj.to_dict()))
                extras = {
                    'status': 'create_in_progress',
                    'sni_cert': None,
                    # Add logging so it is easier for testing
                    'created_at': str(datetime.datetime.now()),
                    'action': (
                        'SNI cert request for {0} has been '
                        'enqueued.'.format(cert_obj.domain_name)
                    )
                }
                if https_upgrade is True:
                    extras['https upgrade notes'] = (
                        "This domain was upgraded from HTTP to HTTPS SNI."
                        "Take note of the domain name. Where applicable, "
                        "delete the old HTTP policy after the upgrade is "
                        "complete or the old policy is no longer in use."
                    )
                return self.responder.ssl_certificate_provisioned(
                    None,
                    extras
                )
            cert_hostname_limit = (
                self.cert_info_storage.get_san_cert_hostname_limit()
            )
            for cert_name in self.sni_cert_cnames:
                enabled = (
                    self.cert_info_storage.get_enabled_status(
                        cert_name, info_type='sni'
                    )
                )
                if not enabled:
                    LOG.info("SNI cert {0} is disabled.".format(
                        cert_name))
                    continue
                cert_hostname_limit = (
                    cert_hostname_limit or
                    self.driver.san_cert_hostname_limit
                )

                host_names_count = utils.get_ssl_number_of_hosts_alternate(
                    cert_name
                )
                if host_names_count >= cert_hostname_limit:
                    LOG.info(
                        "SNI cert {0} has {1} hosts, "
                        "limit is {2}.".format(
                            cert_name,
                            host_names_count,
                            cert_hostname_limit))
                    continue

                try:
                    enrollment_id = (
                        self.cert_info_storage.get_cert_enrollment_id(
                            cert_name))
                    # GET the enrollment by ID
                    headers = {
                        'Accept': ('application/vnd.akamai.cps.enrollment.v1+'
                                   'json')
                    }
                    resp = self.cps_api_client.get(
                        self.cps_api_base_url.format(
                            enrollmentId=enrollment_id),
                        headers=headers
                    )
                    if resp.status_code not in [200, 202]:
                        raise RuntimeError(
                            'CPS Request failed. Unable to GET enrollment '
                            'with id {0} Exception: {1}'.format(
                                enrollment_id, resp.text))
                    resp_json = json.loads(resp.text)
                    # check enrollment does not have any pending changes
                    if len(resp_json['pendingChanges']) > 0:
                        LOG.info("{0} has pending changes, skipping...".format(
                            cert_name))
                        continue

                    # adding sans should get them cloned into sni host names
                    resp_json['csr']['sans'] = resp_json['csr']['sans'].append(
                        cert_obj.domain_name
                    )

                    # PUT the enrollment including the modifications
                    headers = {
                        'Content-Type': (
                            'application/vnd.akamai.cps.enrollment.v1+json'),
                        'Accept': (
                            'application/vnd.akamai.cps.enrollment-status.v1+'
                            'json')
                    }
                    resp = self.cps_api_client.put(
                        self.cps_api_base_url.format(
                            enrollmentId=enrollment_id),
                        data=json.dumps(resp_json),
                        headers=headers
                    )
                    if resp.status_code not in [200, 202]:
                        raise RuntimeError(
                            'CPS Request failed. Unable to modify enrollment '
                            'with id {0} Exception: {1}'.format(
                                enrollment_id, resp.text))

                    # resp code 200 means PUT didn't create a change
                    # resp code 202 means PUT created a change
                    if resp.status_code == 202:
                        # save the change id for future reference
                        change_url = json.loads(resp.text)['changes'][0]
                        cert_copy = copy.deepcopy(cert_obj.to_dict())
                        (
                            cert_copy['cert_details']
                            [self.driver.provider_name]
                        ) = {
                            'extra_info': {
                                'change_url': change_url,
                                'sni_cert': cert_name
                            }
                        }
                        self.san_mapping_queue.enqueue_san_mapping(
                            json.dumps(cert_copy)
                        )
                        return self.responder.ssl_certificate_provisioned(
                            cert_name, {
                                'status': 'create_in_progress',
                                'sni_cert': cert_name,
                                'change_url': change_url,
                                'created_at': str(datetime.datetime.now()),
                                'action': 'Waiting for customer domain '
                                          'validation for {0}'.format(
                                    cert_obj.domain_name)
                            })
                except Exception as exc:
                    LOG.exception(
                        "Unable to provision certificate {0}, "
                        "Error: {1}".format(cert_obj.domain_name, exc))
                    return self.responder.ssl_certificate_provisioned(None, {
                        'status': 'failed',
                        'sni_cert': None,
                        'created_at': str(datetime.datetime.now()),
                        'action': 'Waiting for action... CPS API provision '
                                  'DV SNI cert failed for {0} failed.'.format(
                            cert_obj.domain_name)
                    })
            else:
                self.mod_san_queue.enqueue_mod_san_request(
                    json.dumps(cert_obj.to_dict()))
                return self.responder.ssl_certificate_provisioned(None, {
                    'status': 'create_in_progress',
                    'sni_cert': None,
                    # Add logging so it is easier for testing
                    'created_at': str(datetime.datetime.now()),
                    'action': 'No available sni cert for {0} right now,'
                              ' or no sni cert info available. Support:'
                              'Please write down the domain and keep an'
                              ' eye on next available freed-up SNI certs.'
                              ' More provisioning might be needed'.format(
                        cert_obj.domain_name)
                })
        except Exception as e:
            LOG.exception(
                "Error {0} during SNI certificate creation for {1} "
                "sending the request sent back to the queue.".format(
                    e, cert_obj.domain_name
                )
            )
            try:
                self.mod_san_queue.enqueue_mod_san_request(
                    json.dumps(cert_obj.to_dict()))
                return self.responder.ssl_certificate_provisioned(None, {
                    'status': 'create_in_progress',
                    'sni_cert': None,
                    # Add logging so it is easier for testing
                    'created_at': str(datetime.datetime.now()),
                    'action': (
                        'SNI cert request for {0} has been '
                        'enqueued.'.format(cert_obj.domain_name)
                    )
                })
            except Exception as exc:
                LOG.exception("Unable to enqueue {0}, Error: {1}".format(
                    cert_obj.domain_name,
                    exc
                ))
                return self.responder.ssl_certificate_provisioned(None, {
                    'status': 'failed',
                    'sni_cert': None,
                    'created_at': str(datetime.datetime.now()),
                    'action': 'Waiting for action... Provision '
                              'sni cert failed for {0} failed.'.format(
                        cert_obj.domain_name)
                })

Example 54

Project: poppy Source File: certificates.py
    def create_certificate(self, cert_obj, enqueue=True, https_upgrade=False):
        if cert_obj.cert_type == 'san':
            try:
                found, found_cert = (
                    self._check_domain_already_exists_on_san_certs(
                        cert_obj.domain_name
                    )
                )
                if found is True:
                    return self.responder.ssl_certificate_provisioned(None, {
                        'status': 'failed',
                        'san cert': None,
                        'created_at': str(datetime.datetime.now()),
                        'action': (
                            'Domain {0} already exists '
                            'on san cert {1}.'.format(
                                cert_obj.domain_name, found_cert
                            )
                        )
                    })

                if enqueue:
                    self.mod_san_queue.enqueue_mod_san_request(
                        json.dumps(cert_obj.to_dict()))
                    extras = {
                        'status': 'create_in_progress',
                        'san cert': None,
                        # Add logging so it is easier for testing
                        'created_at': str(datetime.datetime.now()),
                        'action': (
                            'San cert request for {0} has been '
                            'enqueued.'.format(cert_obj.domain_name)
                        )
                    }
                    if https_upgrade is True:
                        extras['https upgrade notes'] = (
                            "This domain was upgraded from HTTP to HTTPS SAN."
                            "Take note of the domain name. Where applicable, "
                            "delete the old HTTP policy after the upgrade is "
                            "complete or the old policy is no longer in use."
                        )
                    return self.responder.ssl_certificate_provisioned(
                        None,
                        extras
                    )

                san_cert_hostname_limit = (
                    self.cert_info_storage.get_san_cert_hostname_limit()
                )

                for san_cert_name in self.san_cert_cnames:
                    enabled = (
                        self.cert_info_storage.get_enabled_status(
                            san_cert_name
                        )
                    )
                    if not enabled:
                        LOG.info("SAN cert {0} is disabled.".format(
                            san_cert_name))
                        continue

                    # if the limit provided as an arg to this function is None
                    # default san_cert_hostname_limit to the value provided in
                    # the config file.
                    san_cert_hostname_limit = (
                        san_cert_hostname_limit or
                        self.driver.san_cert_hostname_limit
                    )

                    # Check san_cert to enforce number of hosts hasn't
                    # reached the limit. If the current san_cert is at max
                    # capacity continue to the next san_cert
                    san_hosts = utils.get_ssl_number_of_hosts(
                        '.'.join(
                            [
                                san_cert_name,
                                self.driver.akamai_https_access_url_suffix
                            ]
                        )
                    )
                    if san_hosts >= san_cert_hostname_limit:
                        LOG.info(
                            "SAN cert {0} has {1} hosts, "
                            "limit is {2}.".format(
                                san_cert_name,
                                san_hosts,
                                san_cert_hostname_limit))
                        continue

                    last_sps_id = (
                        self.cert_info_storage.get_cert_last_spsid(
                            san_cert_name
                        )
                    )
                    if last_sps_id not in [None, ""]:
                        LOG.info('Latest spsId for {0} is: {1}'.format(
                            san_cert_name,
                            last_sps_id)
                        )
                        resp = self.sps_api_client.get(
                            self.sps_api_base_url.format(spsId=last_sps_id),
                        )
                        if resp.status_code != 200:
                            raise RuntimeError(
                                'SPS API Request Failed. '
                                'Exception: {0}'.format(resp.text)
                            )
                        sps_request_info = json.loads(resp.text)[
                            'requestList'][0]
                        status = sps_request_info['status']
                        work_flow_progress = (
                            sps_request_info['workflowProgress']
                        )
                        if status == 'edge host already created or pending':
                            if work_flow_progress is not None and \
                                    'error' in work_flow_progress.lower():
                                LOG.info("SPS Pending with Error: {0}".format(
                                    work_flow_progress))
                                continue
                            else:
                                pass
                        elif status == 'CPS cancelled':
                            pass
                        elif status != 'SPS Request Complete':
                            LOG.info("SPS Not completed for {0}...".format(
                                     san_cert_name))
                            continue
                    # issue modify san_cert sps request
                    cert_info = self.cert_info_storage.get_cert_info(
                        san_cert_name)
                    cert_info['add.sans'] = cert_obj.domain_name
                    string_post_data = '&'.join(
                        ['%s=%s' % (k, v) for (k, v) in cert_info.items()])
                    LOG.info(
                        'Post modSan request with request data: {0}'.format(
                            string_post_data
                        )
                    )
                    resp = self.sps_api_client.post(
                        self.sps_api_base_url.format(spsId=""),
                        data=string_post_data.encode('utf-8')
                    )
                    if resp.status_code != 202:
                        raise RuntimeError(
                            'SPS Request failed. '
                            'Exception: {0}'.format(resp.text)
                        )
                    else:
                        resp_dict = json.loads(resp.text)
                        LOG.info(
                            'modSan request submitted. Response: {0}'.format(
                                resp_dict
                            )
                        )
                        this_sps_id = resp_dict['spsId']
                        # get last item in results array and use its jobID
                        results = resp_dict['Results']['data']
                        this_job_id = results[0]['results']['jobID']
                        self.cert_info_storage.save_cert_last_ids(
                            san_cert_name,
                            this_sps_id,
                            this_job_id
                        )
                        cert_copy = copy.deepcopy(cert_obj.to_dict())
                        (
                            cert_copy['cert_details']
                            [self.driver.provider_name]
                        ) = {
                            'extra_info': {
                                'akamai_spsId': this_sps_id,
                                'san cert': san_cert_name
                            }
                        }

                        self.san_mapping_queue.enqueue_san_mapping(
                            json.dumps(cert_copy)
                        )
                        return self.responder.ssl_certificate_provisioned(
                            san_cert_name, {
                                'status': 'create_in_progress',
                                'san cert': san_cert_name,
                                'akamai_spsId': this_sps_id,
                                'created_at': str(datetime.datetime.now()),
                                'action': 'Waiting for customer domain '
                                          'validation for {0}'.format(
                                    cert_obj.domain_name)
                            })
                else:
                    self.mod_san_queue.enqueue_mod_san_request(
                        json.dumps(cert_obj.to_dict()))
                    return self.responder.ssl_certificate_provisioned(None, {
                        'status': 'create_in_progress',
                        'san cert': None,
                        # Add logging so it is easier for testing
                        'created_at': str(datetime.datetime.now()),
                        'action': 'No available san cert for {0} right now,'
                                  ' or no san cert info available. Support:'
                                  'Please write down the domain and keep an'
                                  ' eye on next available freed-up SAN certs.'
                                  ' More provisioning might be needed'.format(
                            cert_obj.domain_name)
                    })
            except Exception as e:
                LOG.exception(
                    "Error {0} during certificate creation for {1} "
                    "sending the request sent back to the queue.".format(
                        e, cert_obj.domain_name
                    )
                )
                try:
                    self.mod_san_queue.enqueue_mod_san_request(
                        json.dumps(cert_obj.to_dict()))
                    return self.responder.ssl_certificate_provisioned(None, {
                        'status': 'create_in_progress',
                        'san cert': None,
                        # Add logging so it is easier for testing
                        'created_at': str(datetime.datetime.now()),
                        'action': (
                            'San cert request for {0} has been '
                            'enqueued.'.format(cert_obj.domain_name)
                        )
                    })
                except Exception as exc:
                    LOG.exception("Unable to enqueue {0}, Error: {1}".format(
                        cert_obj.domain_name,
                        exc
                    ))
                    return self.responder.ssl_certificate_provisioned(None, {
                        'status': 'failed',
                        'san cert': None,
                        'created_at': str(datetime.datetime.now()),
                        'action': 'Waiting for action... Provision '
                                  'san cert failed for {0} failed.'.format(
                            cert_obj.domain_name)
                    })
        elif cert_obj.cert_type == 'sni':
            # create a DV SAN SNI certificate using Akamai CPS API
            return self.create_sni_certificate(
                cert_obj, enqueue, https_upgrade)
        else:
            return self.responder.ssl_certificate_provisioned(None, {
                'status': 'failed',
                'reason': "Cert type : {0} hasn't been implemented".format(
                    cert_obj.cert_type
                )
            })

Example 55

Project: p2pool Source File: web.py
def get_web_root(wb, datadir_path, bitcoind_getinfo_var, stop_event=variable.Event(), static_dir=None):
    node = wb.node
    start_time = time.time()
    
    web_root = resource.Resource()
    
    def get_users():
        height, last = node.tracker.get_height_and_last(node.best_share_var.value)
        weights, total_weight, donation_weight = node.tracker.get_cuemulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256)
        res = {}
        for script in sorted(weights, key=lambda s: weights[s]):
            res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight
        return res
    
    def get_current_scaled_txouts(scale, trunc=0):
        txouts = node.get_current_txouts()
        total = sum(txouts.itervalues())
        results = dict((script, value*scale//total) for script, value in txouts.iteritems())
        if trunc > 0:
            total_random = 0
            random_set = set()
            for s in sorted(results, key=results.__getitem__):
                if results[s] >= trunc:
                    break
                total_random += results[s]
                random_set.add(s)
            if total_random:
                winner = math.weighted_choice((script, results[script]) for script in random_set)
                for script in random_set:
                    del results[script]
                results[winner] = total_random
        if sum(results.itervalues()) < int(scale):
            results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
        return results
    
    def get_patron_sendmany(total=None, trunc='0.01'):
        if total is None:
            return 'need total argument. go to patron_sendmany/<TOTAL>'
        total = int(float(total)*1e8)
        trunc = int(float(trunc)*1e8)
        return json.dumps(dict(
            (bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8)
            for script, value in get_current_scaled_txouts(total, trunc).iteritems()
            if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None
        ))
    
    def get_global_stats():
        # averaged over last hour
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
        
        nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)
        stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
        diff = bitcoin_data.target_to_difficulty(wb.current_work.value['bits'].target)

        return dict(
            pool_nonstale_hash_rate=nonstale_hash_rate,
            pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
            pool_stale_prop=stale_prop,
            min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target),
            network_block_difficulty=diff,
            network_hashrate=(diff * 2**32 // node.net.PARENT.BLOCK_PERIOD),
        )
    
    def get_local_stats():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
        
        global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
        
        my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes)
        my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan')
        my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa')
        my_share_count = my_unstale_count + my_orphan_count + my_doa_count
        my_stale_count = my_orphan_count + my_doa_count
        
        my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
        
        my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
            for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1)
            if share.hash in wb.my_share_hashes)
        actual_time = (node.tracker.items[node.best_share_var.value].timestamp -
            node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp)
        share_att_s = my_work / actual_time
        
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()

        miner_last_difficulties = {}
        for addr in wb.last_work_shares.value:
            miner_last_difficulties[addr] = bitcoin_data.target_to_difficulty(wb.last_work_shares.value[addr].target)
        
        return dict(
            my_hash_rates_in_last_hour=dict(
                note="DEPRECATED",
                nonstale=share_att_s,
                rewarded=share_att_s/(1 - global_stale_prop),
                actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
            ),
            my_share_counts_in_last_hour=dict(
                shares=my_share_count,
                unstale_shares=my_unstale_count,
                stale_shares=my_stale_count,
                orphan_stale_shares=my_orphan_count,
                doa_stale_shares=my_doa_count,
            ),
            my_stale_proportions_in_last_hour=dict(
                stale=my_stale_prop,
                orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
                dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
            ),
            miner_hash_rates=miner_hash_rates,
            miner_dead_hash_rates=miner_dead_hash_rates,
            miner_last_difficulties=miner_last_difficulties,
            efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
            efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
            peers=dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
            ),
            shares=dict(
                total=shares,
                orphan=stale_orphan_shares,
                dead=stale_doa_shares,
            ),
            uptime=time.time() - start_time,
            attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
            attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
            block_value=node.bitcoind_work.value['subsidy']*1e-8,
            warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_getinfo_var.value, node.bitcoind_work.value),
            donation_proportion=wb.donation_percentage/100,
            version=p2pool.__version__,
            protocol_version=p2p.Protocol.VERSION,
            fee=wb.worker_fee,
        )
    
    class WebInterface(deferred_resource.DeferredResource):
        def __init__(self, func, mime_type='application/json', args=()):
            deferred_resource.DeferredResource.__init__(self)
            self.func, self.mime_type, self.args = func, mime_type, args
        
        def getChild(self, child, request):
            return WebInterface(self.func, self.mime_type, self.args + (child,))
        
        @defer.inlineCallbacks
        def render_GET(self, request):
            request.setHeader('Content-Type', self.mime_type)
            request.setHeader('Access-Control-Allow-Origin', '*')
            res = yield self.func(*self.args)
            defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res)
    
    def decent_height():
        return min(node.tracker.get_height(node.best_share_var.value), 720)
    web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height()))))
    web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target)))
    web_root.putChild('users', WebInterface(get_users))
    web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in
        p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems())))
    web_root.putChild('fee', WebInterface(lambda: wb.worker_fee))
    web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in node.get_current_txouts().iteritems())))
    web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
    web_root.putChild('global_stats', WebInterface(get_global_stats))
    web_root.putChild('local_stats', WebInterface(get_local_stats))
    web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join('%s%s' % (peer.transport.getPeer().host, ':'+str(peer.transport.getPeer().port) if peer.transport.getPeer().port != node.net.P2P_PORT else '') for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue(
        dict([(a, (yield b)) for a, b in
            [(
                '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port),
                defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
                    min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)])
                ))()
            ) for peer in list(node.p2p_node.peers.itervalues())]
        ])
    ))))
    web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT)))
    web_root.putChild('payout_addrs', WebInterface(lambda: list(('%s' % bitcoin_data.pubkey_hash_to_address(add, node.net.PARENT)) for add in wb.pubkeys.keys)))
    web_root.putChild('recent_blocks', WebInterface(lambda: [dict(
        ts=s.timestamp,
        hash='%064x' % s.header_hash,
        number=p2pool_data.parse_bip0034(s.share_data['coinbase'])[0],
        share='%064x' % s.hash,
    ) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target]))
    web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
    web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True)))
    
    new_root = resource.Resource()
    web_root.putChild('web', new_root)
    
    stat_log = []
    if os.path.exists(os.path.join(datadir_path, 'stats')):
        try:
            with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
                stat_log = json.loads(f.read())
        except:
            log.err(None, 'Error loading stats:')
    def update_stat_log():
        while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
            stat_log.pop(0)
        
        lookbehind = 3600//node.net.SHARE_PERIOD
        if node.tracker.get_height(node.best_share_var.value) < lookbehind:
            return None
        
        global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
        (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        
        my_current_payout=0.0
        for add in wb.pubkeys.keys:
            my_current_payout+=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(add), 0)*1e-8
        stat_log.append(dict(
            time=time.time(),
            pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop),
            pool_stale_prop=global_stale_prop,
            local_hash_rates=miner_hash_rates,
            local_dead_hash_rates=miner_dead_hash_rates,
            shares=shares,
            stale_shares=stale_orphan_shares + stale_doa_shares,
            stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
            current_payout=my_current_payout,
            peers=dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
            ),
            attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
            attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
            block_value=node.bitcoind_work.value['subsidy']*1e-8,
        ))
        
        with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
            f.write(json.dumps(stat_log))
    x = deferral.RobustLoopingCall(update_stat_log)
    x.start(5*60)
    stop_event.watch(x.stop)
    new_root.putChild('log', WebInterface(lambda: stat_log))
    
    def get_share(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return None
        share = node.tracker.items[int(share_hash_str, 16)]
        
        return dict(
            parent='%064x' % share.previous_hash,
            far_parent='%064x' % share.share_info['far_share_hash'],
            children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children
            type_name=type(share).__name__,
            local=dict(
                verified=share.hash in node.tracker.verified.items,
                time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
                peer_first_received_from=share.peer_addr,
            ),
            share_data=dict(
                timestamp=share.timestamp,
                target=share.target,
                max_target=share.max_target,
                payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT),
                donation=share.share_data['donation']/65535,
                stale_info=share.share_data['stale_info'],
                nonce=share.share_data['nonce'],
                desired_version=share.share_data['desired_version'],
                absheight=share.absheight,
                abswork=share.abswork,
            ),
            block=dict(
                hash='%064x' % share.header_hash,
                header=dict(
                    version=share.header['version'],
                    previous_block='%064x' % share.header['previous_block'],
                    merkle_root='%064x' % share.header['merkle_root'],
                    timestamp=share.header['timestamp'],
                    target=share.header['bits'].target,
                    nonce=share.header['nonce'],
                ),
                gentx=dict(
                    hash='%064x' % share.gentx_hash,
                    coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
                    value=share.share_data['subsidy']*1e-8,
                    last_txout_nonce='%016x' % share.contents['last_txout_nonce'],
                ),
                other_transaction_hashes=['%064x' % x for x in share.get_other_tx_hashes(node.tracker)],
            ),
        )

    def get_share_address(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return None
        share = node.tracker.items[int(share_hash_str, 16)]
        return bitcoin_data.script2_to_address(share.new_script, node.net.PARENT)

    new_root.putChild('payout_address', WebInterface(lambda share_hash_str: get_share_address(share_hash_str)))
    new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
    new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads]))
    new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads]))
    new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())]))
    new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())]))
    new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value))
    new_root.putChild('my_share_hashes', WebInterface(lambda: ['%064x' % my_share_hash for my_share_hash in wb.my_share_hashes]))
    def get_share_data(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return ''
        share = node.tracker.items[int(share_hash_str, 16)]
        return p2pool_data.share_type.pack(share.as_share())
    new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream'))
    new_root.putChild('currency_info', WebInterface(lambda: dict(
        symbol=node.net.PARENT.SYMBOL,
        block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
        address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
        tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX,
    )))
    new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
    
    hd_path = os.path.join(datadir_path, 'graph_db')
    hd_data = _atomic_read(hd_path)
    hd_obj = {}
    if hd_data is not None:
        try:
            hd_obj = json.loads(hd_data)
        except Exception:
            log.err(None, 'Error reading graph database:')
    dataview_descriptions = {
        'last_hour': graph.DataViewDescription(150, 60*60),
        'last_day': graph.DataViewDescription(300, 60*60*24),
        'last_week': graph.DataViewDescription(300, 60*60*24*7),
        'last_month': graph.DataViewDescription(300, 60*60*24*30),
        'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
    }
    hd = graph.HistoryDatabase.from_obj({
        'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
        'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
        'local_share_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False,
            multivalues=True, multivalue_undefined_means_0=True,
            default_func=graph.make_multivalue_migrator(dict(good='local_share_hash_rate', dead='local_dead_share_hash_rate', orphan='local_orphan_share_hash_rate'),
                post_func=lambda bins: [dict((k, (v[0] - (sum(bin.get(rem_k, (0, 0))[0] for rem_k in ['dead', 'orphan']) if k == 'good' else 0), v[1])) for k, v in bin.iteritems()) for bin in bins])),
        'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
            multivalue_undefined_means_0=True),
        'current_payout': graph.DataStreamDescription(dataview_descriptions),
        'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
        'peers': graph.DataStreamDescription(dataview_descriptions, multivalues=True, default_func=graph.make_multivalue_migrator(dict(incoming='incoming_peers', outgoing='outgoing_peers'))),
        'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
        'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
        'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
            multivalue_undefined_means_0=True),
        'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
        'getwork_latency': graph.DataStreamDescription(dataview_descriptions),
        'memory_usage': graph.DataStreamDescription(dataview_descriptions),
    }, hd_obj)
    x = deferral.RobustLoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
    x.start(100)
    stop_event.watch(x.stop)
    @wb.pseudoshare_received.watch
    def _(work, dead, user):
        t = time.time()
        hd.datastreams['local_hash_rate'].add_datum(t, work)
        if dead:
            hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
        if user is not None:
            hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
            if dead:
                hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
    @wb.share_received.watch
    def _(work, dead, share_hash):
        t = time.time()
        if not dead:
            hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=work))
        else:
            hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=work))
        def later():
            res = node.tracker.is_child_of(share_hash, node.best_share_var.value)
            if res is None: res = False # share isn't connected to sharechain? assume orphaned
            if res and dead: # share was DOA, but is now in sharechain
                # move from dead to good
                hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=-work, good=work))
            elif not res and not dead: # share wasn't DOA, and isn't in sharechain
                # move from good to orphan
                hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=-work, orphan=work))
        reactor.callLater(200, later)
    @node.p2p_node.traffic_happened.watch
    def _(name, bytes):
        hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})
    def add_point():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value))
        t = time.time()
        
        pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True)
        pool_total = sum(pool_rates.itervalues())
        hd.datastreams['pool_rates'].add_datum(t, pool_rates)
        
        current_txouts = node.get_current_txouts()
        my_current_payouts = 0.0
        for add in wb.pubkeys.keys:
             my_current_payouts += current_txouts.get(bitcoin_data.pubkey_hash_to_script2(add), 0)*1e-8
        hd.datastreams['current_payout'].add_datum(t, my_current_payouts)
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems())
        hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
        
        hd.datastreams['peers'].add_datum(t, dict(
            incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
            outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
        ))
        
        vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind)
        vs_total = sum(vs.itervalues())
        hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems()))
        try:
            hd.datastreams['memory_usage'].add_datum(t, memory.resident())
        except:
            if p2pool.DEBUG:
                traceback.print_exc()
    x = deferral.RobustLoopingCall(add_point)
    x.start(5)
    stop_event.watch(x.stop)
    @node.bitcoind_work.changed.watch
    def _(new_work):
        hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency'])
    new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
    
    if static_dir is None:
        static_dir = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), 'web-static')
    web_root.putChild('static', static.File(static_dir))
    
    return web_root

Example 56

Project: certitude Source File: web.py
@app.route('/api/scan/', methods=['POST'])
def api_json():
    if 'logged_in' in session:

        def getCible(param):
            param_list = param.get('ip_list', None)
            param_ip = param.get('ip', None)
            param_hostname = param.get('hostname', None)

            list = []

            if param_list is not None and param_list!='':
                liste = param_list.replace('\r\n','\n')
                ips = liste.split('\n')
                list+=[(e, 'ipl') for e in ips]


            if param_ip is not None and param_ip!='':
                list.append((param_ip, 'ipn'))
                

            if param_hostname is not None and param_hostname!='' :
                list.append((param_hostname, 'host'))

            return list

        loggingserver.debug('Scan request incoming ')
        param = request.form

        # param = urlparse.parse_qs(args[1])
        # Target IP(s)
        ip_list = getCible(param)
        if len(ip_list) > 0:

            # Priority IOC
            try:
                priority_ioc = int(param.get('priority_ioc'))
            except:
                priority_ioc = 10
            if not priority_ioc > 0:
                priority_ioc = 10

            # Priority HASH
            try:
                priority_hash = int(param.get('priority_hash'))
            except:
                priority_hash = 10
            if not priority_hash > 0:
                priority_hash = 10

            # Retries count (IOC)
            essais_ioc = param.get('retries_ioc')
            if essais_ioc is not None:
                try:
                    assert 0 < int(essais_ioc) <= 10
                    retries_left_ioc = int(essais_ioc)
                except:
                    retries_left_ioc = 10
            else:
                retries_left_ioc = 10

            # Retries count (hash)
            essais_hash = param.get('retries_hash')
            if essais_hash is not None:
                try:
                    assert 0 < int(essais_hash) <= 10
                    retries_left_hash = int(essais_hash)
                except:
                    retries_left_hash = 10
            else:
                retries_left_hash = 10

            subnet = 'n/a'
            subnetp = param.get('subnet', None)
            if subnetp  is not None:
                subnet = subnetp

            batchp = param.get('batch', None)
            if batchp is not None:
                batch = batchp

            reponse = {}
            reponse['code'] = 200
            reponse['ips'] = {}

            # Ajout à la queue...
            nb_ip, nb_ip_ok = 0, 0
            for ip, iptype in ip_list:
                actualise = False

                # try:
                    # ip_int = int(ip)
                # except ValueError,e:
                    # try:
                        # ipn = IPNetwork(ip)
                        # ip_int = int(ipn[0])
                    # except Exception, e:
                        # if iptype=='ip':
                            # reponse['ips'][str(ip)] = 'invalid IP address'
                            # continue
                        # ip_int=0
                
                try:
                
                    if iptype[:2]=='ip':                   
                        ipn = netaddr.IPNetwork(ip)
                    else:
                        ipn = [ip]
                        
                    ipSubnet = str(ip) if iptype=='ipl' else subnet

                    for ipa in ipn:
                        nb_ip+=1
                    
                        if param.get('force', None) is None:
                            limite_avant_nouvel_essai = datetime.datetime.now() - datetime.timedelta(0, SECONDES_POUR_RESCAN)
                            if dbsession.query(Result).filter(Result.ip == str(ipa), Result.finished >= limite_avant_nouvel_essai).count() > 0:
                                reponse['ips'][str(ipa)] = 'already scanned a few moments ago...'
                                continue
                            elif dbsession.query(Task).filter(Task.ip == str(ipa), Task.batch_id == batch, Task.date_soumis >= limite_avant_nouvel_essai).count() > 0:
                                reponse['ips'][str(ipa)] = 'already requested a few moments ago'
                                continue
                        
                        nb_ip_ok+=1
                        tache = Task(
                            ip = str(ipa),
                            priority_ioc  =  priority_ioc,
                            priority_hash = priority_hash,
                            reserved_ioc = False,
                            reserved_hash = False,
                            iocscanned = False,
                            hashscanned = False,
                            ip_demandeur = request.remote_addr,
                            retries_left_ioc = retries_left_ioc,
                            retries_left_hash = retries_left_hash,
                            commentaire = ipSubnet,
                            batch_id = batch
                        )
                        dbsession.add(tache)
                        
                    if batch and len(batch) > 0 and not actualise:
                        reponse['ips'][str(ip)] = 'added to batch ' + batch
                    elif batch and len(batch) > 0 and actualise:
                        reponse['ips'][str(ip)] = 'added to batch ' + batch + ' for retry'
                    else:
                        reponse['ips'][str(ip)] = 'added to queue'
                        
                    reponse['ips'][str(ip)] +=  ' (%d tries for iocscan, %d tries for hashscan)' % (retries_left_ioc, retries_left_hash)
                        
                except netaddr.core.AddrFormatError:
                    reponse['ips'][str(ip)] = ' not added to batch ' + batch + ': bad formatting)'

            
            reponse['message'] = 'Requested scan of %d IP addresses, %d were OK' % (nb_ip, nb_ip_ok)
            dbsession.commit()
            return Response(
                status=200,
                response=json.dumps(
                    reponse,
                    indent=4
                ),
                content_type='application/json'
            )

        else:
            return APIscan()

    else: # Not logged in
        return redirect(app.jinja_env.globals['url_for']('login'))

Example 57

Project: autonetkit Source File: ank_json.py
def jsonify_anm_with_graphics(anm, nidb=None):
    """ Returns a dictionary of json-ified overlay graphs, with graphics data appended to each overlay"""
    from collections import defaultdict
    import math
    anm_json = {}
    test_anm_data = {}
    graphics_graph = anm["graphics"]._graph.copy()
    phy_graph = anm["phy"]._graph  # to access ASNs

    """simple layout of deps - more advanced layout could
    export to dot and import to omnigraffle, etc
    """
    g_deps = anm['_dependencies']
    nm_graph = g_deps._graph
    # build tree
    layers = defaultdict(list)
    nodes_by_layer = {}
    if len(nm_graph) > 0:
        topo_sort = nx.topological_sort(nm_graph)
        # trim out any nodes with no sucessors

        tree_root = topo_sort[0]
        # Note: topo_sort ensures that once reach node, would have reached its predecessors
        # start at first element after root
        for node in topo_sort:
            preds = nm_graph.predecessors(node)
            if len(preds):
                pred_level = max(nm_graph.node[p].get('level') for p in preds)
            else:
                # a root node
                pred_level = -1  # this node becomes level 0
            level = pred_level + 1
            nm_graph.node[node]['level'] = level
            layers[level].append(node)

            data = nm_graph.node[node]
            data['y'] = 100 * data['level']
            data['device_type'] = "ank_internal"

        MIDPOINT = 50  # assign either side of
        for layer, nodes in layers.items():
            # TODO: since sort is stable, first sort by parent x (avoids
            # zig-zags)
            nodes = sorted(nodes, reverse=True,
                           key=lambda x: nm_graph.degree(x))
            for index, node in enumerate(nodes):
                # TODO: work out why weird offset due to the math.pow *
                #node_x = MIDPOINT  + 125*index * math.pow(-1, index)
                node_x = MIDPOINT + 125 * index
                nm_graph.node[node]['x'] = node_x
                nodes_by_layer[node] = layer

    import random
    attribute_cache = defaultdict(dict)
    # the attributes to copy
    # TODO: check behaviour for None if explicitly set
    # TODO: need to check if attribute is set in overlay..... using API
    copy_attrs = ["x", "y", "asn", "label", "device_type", "device_subtype"]
    for node, in_data in phy_graph.nodes(data=True):
        out_data = {key: in_data.get(key) for key in copy_attrs
                    if key in in_data}
        attribute_cache[node].update(out_data)

    # Update for graphics (over-rides phy)
    for node, in_data in graphics_graph.nodes(data=True):
        out_data = {key: in_data.get(key) for key in copy_attrs
                    if key in in_data}
        attribute_cache[node].update(out_data)

        # append label from function
        for node in anm['phy']:
            attribute_cache[node.id]['label'] = str(node)

    overlay_ids = sorted(anm.overlays(),
                         key=lambda x: nodes_by_layer.get(x, 0))

    for overlay_id in overlay_ids:
        try:
            #make a shallow copy
            # input_graph = anm[overlay_id]._graph
            # nm_graph = shallow_copy_nx_graph(input_graph)
            nm_graph = anm[overlay_id]._graph.copy()
        except Exception, e:
            log.warning("Unable to copy overlay %s: %s", overlay_id, e)
            continue

        if overlay_id == "_dependencies":
            # convert to undirected for visual clarify
            nm_graph = nx.Graph(nm_graph)

        for node in nm_graph:
            node_data = dict(attribute_cache.get(node, {}))
            # update with node data from this overlay
            # TODO: check is not None won't clobber specifically set in
            # overlay...
            graph_node_data = nm_graph.node[node]
            overlay_node_data = {key: graph_node_data.get(key)
                                 for key in graph_node_data}
            node_data.update(overlay_node_data)

            # check for any non-set properties
            if node_data.get("x") is None:
                new_x = random.randint(0, 800)
                node_data['x'] = new_x
                # store for other graphs to use
                log.debug("Allocated random x %s to node %s in overlay %s" %
                          (new_x, node, overlay_id))
                attribute_cache[node]['x'] = new_x
            else:
                # cache for next time, such as vswitch in l2 for l2_bc
                attribute_cache[node]['x'] = node_data['x']
            if node_data.get("y") is None:
                new_y = random.randint(0, 800)
                node_data['y'] = new_y
                # store for other graphs to use
                attribute_cache[node]['y'] = new_y
                log.debug("Allocated random y %s to node %s in overlay %s" %
                          (new_y, node, overlay_id))
            else:
                attribute_cache[node]['y'] = node_data['y']

            # TODO: may want to re-introduce graphics to store cross-layer data for virtual nodes
            # and cache device type and device subtype
            # TODO: catch for each, if node not in cache
            try:
                attribute_cache[node]['device_type'] = node_data['device_type']
            except KeyError:
                pass  # not set
            try:
                attribute_cache[node][
                    'device_subtype'] = node_data['device_subtype']
            except KeyError:
                pass  # not set

            if node_data.get("label") == node:
                # try from cache
                node_data['label'] = attribute_cache.get(node, {}).get("label")
            if node_data.get("label") is None:
                node_data['label'] = str(node)  # don't need to cache

            # store on graph
            nm_graph.node[node] = node_data

            try:
                del nm_graph.node[node]['id']
            except KeyError:
                pass

            if nidb:
                nidb_graph = nidb.raw_graph()
                if node in nidb:
                    DmNode_data = nidb_graph.node[node]
                    try:
                        # TODO: check why not all nodes have _ports initialised
                        overlay_interfaces = nm_graph.node[node]["_ports"]
                    except KeyError:
                        continue  # skip copying interface data for this node

                    for interface_id in overlay_interfaces.keys():
                        # TODO: use raw_interfaces here
                        try:
                            nidb_interface_id = DmNode_data[
                                '_ports'][interface_id]['id']
                        except KeyError:
                            # TODO: check why arrive here - something not
                            # initialised?
                            continue
                        nm_graph.node[node]['_ports'][
                            interface_id]['id'] = nidb_interface_id
                        id_brief = shortened_interface(nidb_interface_id)
                        nm_graph.node[node]['_ports'][
                            interface_id]['id_brief'] = id_brief

        anm_json[overlay_id] = ank_json_dumps(nm_graph)
        test_anm_data[overlay_id] = nm_graph

    if nidb:
        test_anm_data['nidb'] = prepare_nidb(nidb)

    result = json.dumps(
        test_anm_data, cls=AnkEncoder, indent=4, sort_keys=True)
    return result

Example 58

Project: elijah-openstack Source File: cloudlet_client.py
Function: main
def main(argv=None):
    CMD_CREATE_BASE = "create-base"
    CMD_EXPORT_BASE = "export-base"
    CMD_IMPORT_BASE = "import-base"
    CMD_CREATE_OVERLAY = "create-overlay"
    CMD_DOWNLOAD = "download"
    CMD_SYNTHESIS = "synthesis"
    CMD_HANDOFF = "handoff"
    CMD_HANDOFF_RECV = "handoff-recv"
    CMD_EXT_LIST = "ext-list"
    commands = {
        CMD_CREATE_BASE: "create base vm from the running instance",
        CMD_CREATE_OVERLAY: "create VM overlay from the customizaed VM",
        CMD_DOWNLOAD: "Download VM overlay",
        CMD_SYNTHESIS: "VM Synthesis (Need downloadable URLs for VM overlay)",
        CMD_HANDOFF: "Perform VM handoff to destination URL",
        CMD_HANDOFF_RECV: "Send handoff recv message to the dest OpenStack",
        CMD_EXT_LIST: "List available extensions",
        CMD_EXPORT_BASE: "Export Base VM",
        CMD_IMPORT_BASE: "Import Base VM",
    }

    settings, args = process_command_line(sys.argv[1:], commands)
    token, endpoint, glance_endpoint = \
        get_token(settings.server_address, settings.user_name,
                  settings.password, settings.tenant_name)
    sys.stdout.write("Success to log in to %s for tenant %s..\n" % \
        (settings.server_address, settings.tenant_name))

    if len(args) < 1:
        sys.stderr.write("Need command")
        sys.exit(1)
    if args[0] == CMD_CREATE_BASE:
        if len(args) != 3:
            msg = "Error: creating Base VM needs [VM UUID] and [new name]\n"
            msg += " 1) VM UUID: UUID of a running instance that you want to use for base VM\n"
            msg += " 2) new name: name for base VM\n"
            sys.stderr.write(msg)
            sys.exit(1)
        instance_uuid = args[1]
        snapshot_name = args[2]
        request_cloudlet_base(settings.server_address, token,
                              urlparse(endpoint), instance_uuid,
                              snapshot_name)
    elif args[0] == CMD_CREATE_OVERLAY:
        if len(args) != 3:
            msg = "Error: creating VM overlay needs [VM UUID] and [new name]\n"
            msg += " 1) VM UUID: UUID of a running instance that you want to create VM overlay\n"
            msg += " 2) new name: name for VM overlay\n"
            sys.stderr.write(msg)
            sys.exit(1)
        instance_uuid = args[1]
        snapshot_name = args[2]
        ret = request_create_overlay(settings.server_address,
                                     token,
                                     urlparse(endpoint),
                                     instance_uuid,
                                     snapshot_name)
        pprint(ret)
    elif args[0] == CMD_DOWNLOAD:
        if len(args) != 2:
            msg = "Error: downlading VM overlay needs [Image UUID]\n"
            msg += " 1) Image UUID: UUID of a VM overlay\n"
            sys.stderr.write(msg)
            sys.exit(1)
        image_name = args[1]
        output_name = image_name + ".zip"
        sys.stdout.write("Download %s to %s...\n" % (image_name, output_name))
        overlay_download(settings.server_address, token,
                         urlparse(glance_endpoint),
                         image_name, output_name)
    elif args[0] == CMD_EXPORT_BASE:
        if len(args) != 2:
            msg = "Error: Exporting Base VM needs [Image UUID]\n"
            msg += " 1) Image UUID: UUID of a Base VM (base disk)\n"
            sys.stderr.write(msg)
            sys.exit(1)
        basedisk_uuid = args[1]
        output_path = os.path.join(os.curdir, "base-%s.zip" % basedisk_uuid)
        sys.stdout.write("Export %s to %s...\n" % (basedisk_uuid, output_path))
        if os.path.exists(output_path):
            is_overwrite = raw_input(
                "%s exists. Overwirte it? (y/N) " %
                output_path)
            if is_overwrite != 'y':
                sys.exit(1)
        request_export_basevm(settings.server_address, token,
                              urlparse(endpoint), basedisk_uuid, output_path)
    elif args[0] == CMD_IMPORT_BASE:
        if len(args) != 3:
            msg = "Error: Importing Base VM needs [Path to Base VM file] [Name for Base VM]\n"
            msg += " 1) Path to Base VM file: Absolute path to base VM package\n"
            msg += " 2) Name for Base VM: new name for Base VM\n"
            sys.stderr.write(msg)
            sys.exit(1)
        import_filepath = args[1]
        basevm_name = args[2]
        if os.access(import_filepath, os.R_OK) == False:
            sys.stderr("Cannot access the file at %s" % import_filepath)
            sys.exit(1)
        try:
            request_import_basevm(settings.server_address, token,
                                  urlparse(endpoint), urlparse(glance_endpoint),
                                  import_filepath, basevm_name)
            sys.stdout.write("SUCCESS\n")
        except CloudletClientError as e:
            sys.stderr.write("Error: %s\n" % str(e))
    elif args[0] == CMD_SYNTHESIS:
        if len(args) != 3:
            msg = "Error: synthesis cmd needs [overlay url] and [name of VM]\n"
            sys.stderr.write(msg)
            sys.exit(1)
        overlay_url = str(args[1])
        new_instance_name = str(args[2])
        try:
            ret = request_synthesis(settings.server_address, token,
                                    urlparse(endpoint), key_name=None,
                                    server_name=new_instance_name,
                                    overlay_url=overlay_url)
            pprint(ret)
        except CloudletClientError as e:
            sys.stderr.write("Error: %s\n" % str(e))
    elif args[0] == CMD_HANDOFF:
        if len(args) != 3:
            msg = "Error: VM handoff needs [Instance UUID] []\n"
            msg += " 1) Instance UUID: Absolute path to base VM package\n"
            msg += " 2) Destination Credential : File path of a credential file for destination OpenStacknew\n"
            sys.stderr.write(msg)
            sys.exit(1)
        instance_uuid = str(args[1])
        handoff_dest_credential_file = str(args[2])

        try:
            # get token for the handoff destination
            dest_cred = _parse_credential_file(handoff_dest_credential_file)
            dest_account, dest_passwd, dest_tenant, dest_addr = dest_cred
            dest_token, dest_endpoint, dest_glance_endpoint = \
                get_token(dest_addr, dest_account, dest_passwd,
                            dest_tenant)
            handoff_url = dest_endpoint

            request_handoff(settings.server_address,
                            token, urlparse(endpoint),
                            instance_uuid,
                            handoff_url,
                            dest_token)
        except CloudletClientError as e:
            sys.stderr.write(str(e))
            sys.exit(1)
    elif args[0] == CMD_HANDOFF_RECV:
        if not len(args) == 3:
            msg = "Need overlay_url and name of the instance"
            raise CloudletClientError(msg)

        overlay_url = str(args[1])
        new_instance_name = str(args[2])
        try:
            _request_handoff_recv(settings.server_address, token,
                                  urlparse(endpoint),
                                  server_name=new_instance_name,
                                  overlay_url=overlay_url)
        except CloudletClientError as e:
            sys.stderr.write("Error: %s\n" % str(e))
    elif args[0] == CMD_EXT_LIST:
        filter_name = None
        if len(args) == 2:
            filter_name = args[1]
        ext_info = get_extension(settings.server_address,
                                    token,
                                    urlparse(endpoint),
                                    filter_name)
        sys.stdout.write(json.dumps(ext_info, indent=2) + "\n")
    else:
        sys.stderr.write("No such command")
        sys.exit(1)

Example 59

Project: jirafs Source File: fetch.py
    def main(self, folder, **kwargs):
        folder.clear_cache()

        file_meta = folder.get_remote_file_metadata(shadow=True)
        original_hash = folder.run_git_command('rev-parse', 'jira')

        for filename in folder.get_remotely_changed():
            for attachment in folder.issue.fields.attachment:
                if attachment.filename == filename:
                    folder.log(
                        'Download file "%s"',
                        (attachment.filename, ),
                    )
                    content = six.BytesIO(attachment.get())
                    filename, content = folder.execute_plugin_method_series(
                        'alter_file_download',
                        args=((filename, content, ),),
                        single_response=True,
                    )
                    save_path = folder.get_shadow_path(filename)
                    with open(save_path, 'wb') as save_file:
                        content.seek(0)
                        save_file.write(content.read())
                        file_meta[filename] = attachment.created

        folder.set_remote_file_metadata(file_meta, shadow=True)

        field_map = self.get_field_map(folder)
        detail_path = folder.get_shadow_path(constants.TICKET_DETAILS)
        with io.open(detail_path, 'w', encoding='utf-8') as dets:
            for field in sorted(folder.issue.raw['fields'].keys()):
                value = folder.issue.raw['fields'][field]
                if isinstance(value, six.string_types):
                    value = value.replace('\r\n', '\n').strip()
                elif value is None:
                    value = ''
                elif field in constants.NO_DETAIL_FIELDS:
                    continue

                if not isinstance(value, six.string_types):
                    value = json.dumps(
                        value,
                        sort_keys=True,
                        indent=4,
                        ensure_ascii=False
                    )

                if field in constants.FILE_FIELDS:
                    # Write specific fields to their own files without
                    # significant alteration

                    file_field_path = folder.get_shadow_path(
                        constants.TICKET_FILE_FIELD_TEMPLATE
                    ).format(field_name=field)
                    with io.open(
                        file_field_path,
                        'w',
                        encoding='utf-8'
                    ) as file_field_file:
                        file_field_file.write(six.text_type(value))
                        file_field_file.write(
                            six.text_type('\n')
                        )  # For unix' sake
                else:
                    # Normal fields, though, just go into the standard
                    # fields file.
                    if value is None:
                        continue
                    elif field in constants.NO_DETAIL_FIELDS:
                        continue

                    human_readable = field_map.get(field)
                    dets.write(
                        six.text_type('* %s (%s):\n') % (
                            human_readable,
                            field
                        )
                    )
                    for line in value.replace('\r\n', '\n').split('\n'):
                        dets.write(six.text_type('    %s\n' % line))

        links_path = folder.get_shadow_path(constants.TICKET_LINKS)
        with io.open(links_path, 'w', encoding='utf-8') as links_handle:
            # Write issue links
            for link in folder.issue.fields.issuelinks:
                category = 'outward'
                if 'inwardIssue' in link.raw:
                    category = 'inward'

                links_handle.write(
                    six.u("* {status}: {key}\n").format(
                        status=getattr(link.type, category).title(),
                        key=getattr(link, '%sIssue' % category).key
                    )
                )

            # Write remote links
            for link in folder.jira.remote_links(folder.issue):
                if link.object.title:
                    links_handle.write(
                        six.u("* {title}: {url}\n").format(
                            title=link.object.title,
                            url=link.object.url
                        )
                    )
                else:
                    links_handle.write(
                        six.u("* {url}\n").format(
                            title=link.object.title,
                            url=link.object.url
                        )
                    )

        comments_filename = folder.get_shadow_path(constants.TICKET_COMMENTS)
        with io.open(comments_filename, 'w', encoding='utf-8') as comm:
            for comment in folder.issue.fields.comment.comments:
                comm.write(
                    six.text_type('* At %s, %s wrote:\n\n') % (
                        comment.created,
                        comment.author
                    )
                )
                final_lines = []
                lines = comment.body.replace('\r\n', '\n').split('\n')
                for line in lines:
                    if not line:
                        final_lines.append(six.text_type(''))
                    else:
                        final_lines.extend(
                            textwrap.wrap(
                                line,
                                width=70,
                                expand_tabs=False,
                                replace_whitespace=False,
                                break_long_words=False,
                            )
                        )
                for line in final_lines:
                    comm.write(six.text_type('    %s\n') % line)
                comm.write(six.text_type('\n'))

        folder.store_cached_issue()

        # Clone subtasks
        subtasks = folder.issue.fields.subtasks
        if len(subtasks) > 0:
            commands = utils.get_installed_commands()
            jira = utils.lazy_get_jira()
            with open(
                folder.get_metadata_path('subtasks'),
                'w'
            ) as out:
                for issue in subtasks:
                    out.write(
                        '%s\n' % issue.key
                    )
                    issue_path = folder.get_path(issue.key)
                    if not os.path.exists(issue_path):
                        command_name = 'clone'
                        args = [
                            issue.permalink(),
                            issue_path
                        ]
                        path = folder.path
                        commands[command_name].execute_command(
                            args,
                            jira=jira,
                            path=path,
                            command_name=command_name
                        )
        folder.build_ignore_files()

        folder.run_git_command('add', '-A', shadow=True)
        folder.run_git_command(
            'commit', '-m', 'Fetched remote changes',
            failure_ok=True, shadow=True
        )

        self.apply_macros(folder)

        folder.run_git_command('push', 'origin', 'jira', shadow=True)
        final_hash = folder.run_git_command('rev-parse', 'jira')
        if original_hash != final_hash:
            folder.log(
                "Updated 'jira' to %s" % final_hash
            )

        return utils.PostStatusResponse(
            original_hash == final_hash,
            final_hash
        )

Example 60

Project: pypot Source File: httpserver.py
Function: init
    def __init__(self, robot, host='0.0.0.0', port='8080', cross_domain_origin='*', quiet=True):
        AbstractServer.__init__(self, robot, host, port)
        self.quiet = quiet
        self.app = bottle.Bottle()

        jd = lambda s: json.dumps(s, cls=MyJSONEncoder)
        self.app.install(bottle.JSONPlugin(json_dumps=jd))

        if(cross_domain_origin):
            self.app.install(EnableCors(cross_domain_origin))

        rr = self.restfull_robot

        @self.app.route("/", method=['OPTIONS'])
        @self.app.route("/<p:path>", method=['OPTIONS'])
        def options(p=""):
            return ""

        # Motors route
        @self.app.get('/')
        @self.app.get('/robot.json')
        def robot():
            out = {
                'motors': [],
                'primitives': []
            }
            for m in rr.get_motors_list('motors'):
                motor = {}
                for r in rr.get_motor_registers_list(m):
                    try:
                        motor[r] = rr.get_motor_register_value(m, r)
                    except AttributeError:
                        pass
                out['motors'].append(motor)

            running_primitives = rr.get_running_primitives_list()
            for prim in rr.get_primitives_list():
                primitve = {'primitive': prim,
                            'running': prim in running_primitives,
                            'properties': [],
                            'methods': rr.get_primitive_methods_list(prim)
                            }
                for prop in rr.get_primitive_properties_list(prim):
                    primitve['properties'].append({'property': prop, 'value': rr.get_primitive_property(prim, prop)})
                out['primitives'].append(primitve)

            return out

        @self.app.get('/motor/list.json')
        @self.app.get('/motor/<alias>/list.json')
        def get_motor_list(alias='motors'):
            return {
                alias: rr.get_motors_list(alias)
            }

        @self.app.get('/sensor/list.json')
        def get_sensor_list():
            return {
                'sensors': rr.get_sensors_list()
            }

        @self.app.get('/motor/alias/list.json')
        def get_motor_alias():
            return {
                'alias': rr.get_motors_alias()
            }

        @self.app.get('/motor/<motor_name>/register/list.json')
        @self.app.get('/sensor/<motor_name>/register/list.json')
        def get_motor_registers(motor_name):
            return {
                'registers': rr.get_motor_registers_list(motor_name)
            }

        @self.app.get('/motor/<motor_name>/register/<register_name>')
        @self.app.get('/sensor/<motor_name>/register/<register_name>')
        def get_register_value(motor_name, register_name):
            return {
                register_name: rr.get_motor_register_value(motor_name, register_name)
            }

        @self.app.post('/motor/<motor_name>/register/<register_name>/value.json')
        @self.app.post('/sensor/<motor_name>/register/<register_name>/value.json')
        def set_register_value(motor_name, register_name):
            rr.set_motor_register_value(motor_name, register_name,
                                        bottle.request.json)
            return {}

        # Sensors route

        # Primitives route
        @self.app.get('/primitive/list.json')
        def get_primitives_list():
            return {
                'primitives': rr.get_primitives_list()
            }

        @self.app.get('/primitive/running/list.json')
        def get_running_primitives_list():
            return {
                'running_primitives': rr.get_running_primitives_list()
            }

        @self.app.get('/primitive/<prim>/start.json')
        def start_primitive(prim):
            rr.start_primitive(prim)

        @self.app.get('/primitive/<prim>/stop.json')
        def stop_primitive(prim):
            rr.stop_primitive(prim)

        @self.app.get('/primitive/<prim>/pause.json')
        def pause_primitive(prim):
            rr.pause_primitive(prim)

        @self.app.get('/primitive/<prim>/resume.json')
        def resume_primitive(prim):
            rr.resume_primitive(prim)

        @self.app.get('/primitive/<prim>/property/list.json')
        def get_primitive_properties_list(prim):
            return {
                'property': rr.get_primitive_properties_list(prim)
            }

        @self.app.get('/primitive/<prim>/property/<prop>')
        def get_primitive_property(prim, prop):
            res = rr.get_primitive_property(prim, prop)
            return {
                '{}.{}'.format(prim, prop): res
            }

        @self.app.post('/primitive/<prim>/property/<prop>/value.json')
        def set_primitive_property(prim, prop):
            rr.set_primitive_property(prim, prop,
                                      bottle.request.json)

        @self.app.get('/primitive/<prim>/method/list.json')
        def get_primitive_methods_list(prim):
            return {
                'methods': rr.get_primitive_methods_list(self, prim)
            }

        @self.app.post('/primitive/<prim>/method/<meth>/args.json')
        def call_primitive_method(prim, meth):
            res = rr.call_primitive_method(prim, meth,
                                           bottle.request.json)
            return {
                '{}:{}'.format(prim, meth): res
            }

        @self.app.get('/motors/register/<register_name>')
        def get_motors_register_value(register_name):
            motors_list = rr.get_motors_list('motors')
            registers_motors = {}

            for motor_name in motors_list:
                registers_motors[motor_name] = {
                    register_name: rr.get_motor_register_value(motor_name, register_name)
                }

            return registers_motors

Example 61

Project: CumulusCI Source File: run_apex_tests.py
def run_tests():
    username = os.environ.get('SF_USERNAME')
    password = os.environ.get('SF_PASSWORD')
    serverurl = os.environ.get('SF_SERVERURL')
    test_name_match = os.environ.get('APEX_TEST_NAME_MATCH', '%_TEST')
    test_name_exclude = os.environ.get('APEX_TEST_NAME_EXCLUDE', '')
    namespace = os.environ.get('NAMESPACE', None)
    poll_interval = int(os.environ.get('POLL_INTERVAL', 10))
    debug = os.environ.get('DEBUG_TESTS',False) in ['true','True']
    debug_logdir = os.environ.get('DEBUG_LOGDIR')
    json_output = os.environ.get('TEST_JSON_OUTPUT', None)
    junit_output = os.environ.get('TEST_JUNIT_OUTPUT', None)
    
    if namespace:
        namespace = "'{0}'".format(namespace,)
    else:
        namespace = 'null'
    
    sandbox = False
    if serverurl.find('test.salesforce.com') != -1:
        sandbox = True
    
    sf = Salesforce(username=username, password=password, security_token='', sandbox=sandbox, version='32.0')
    
    # Change base_url to use the tooling api
    sf.base_url = sf.base_url + 'tooling/'
    
    # Split test_name_match by commas to allow multiple class name matching options
    where_name = []
    for pattern in test_name_match.split(','):
        if pattern:
            where_name.append("Name LIKE '{0}'".format(pattern))

    # Add any excludes to the where clause
    where_exclude = []
    for pattern in test_name_exclude.split(','):
        if pattern:
            where_exclude.append("(NOT Name LIKE '{0}')".format(pattern,))
   
    # Get all test classes for namespace
    query = "SELECT Id, Name FROM ApexClass WHERE NamespacePrefix = {0}".format(namespace,)
    if where_name:
        query += " AND ({0})".format(' OR '.join(where_name),)
    if where_exclude:
        query += " AND {0}".format(' AND '.join(where_exclude),)

    print "Running Query: {0}".format(query,)
    sys.stdout.flush()

    res = sf.query_all(query)

    print "Found {0} classes".format(res['totalSize'],)
    sys.stdout.flush()

    if not res['totalSize']:
        return {'Pass': 0, 'Fail': 0, 'CompileFail': 0, 'Skip': 0}
    
    classes_by_id = {}
    classes_by_name = {}
    trace_id = None
    results_by_class_name = {}
    classes_by_log_id = {}
    logs_by_class_id = {}
    
    for cls in res['records']:
        classes_by_id[cls['Id']] = cls['Name']
        classes_by_name[cls['Name']] = cls['Id']
        results_by_class_name[cls['Name']] = {}

    # If debug is turned on, setup debug traces for all test classes
    if debug:
        print 'Setting up trace flag to capture debug logs'

        # Get the User's id to set a TraceFlag
        res_user = sf.query("Select Id from User where Username = '{0}'".format(username,))
        user_id = res_user['records'][0]['Id']
        
        # Set up a simple-salesforce sobject for TraceFlag using the tooling api
        TraceFlag = sf.TraceFlag
        TraceFlag.base_url = (u'https://{instance}/services/data/v{sf_version}/tooling/sobjects/{object_name}/'
                     .format(instance=sf.sf_instance,
                             object_name='TraceFlag',
                             sf_version=sf.sf_version))

        # First, delete any old trace flags still lying around
        tf_res = sf.query('Select Id from TraceFlag')
        if tf_res['totalSize']:
            for tf in tf_res['records']:
                TraceFlag.delete(tf['Id'])
    
        expiration = datetime.datetime.now() + datetime.timedelta(seconds=60*60*12)
        res = TraceFlag.create({
            'ApexCode': 'Info',
            'ApexProfiling': 'Debug',
            'Callout': 'Info',
            'Database': 'Info',
            'ExpirationDate': expiration.isoformat(),
            #'ScopeId': user_id,
            'System': 'Info',
            'TracedEntityId': user_id,
            'Validation': 'Info',
            'Visualforce': 'Info',
            'Workflow': 'Info',
        })
        trace_id = res['id']

        print 'Created TraceFlag for user'
    
    # Run all the tests
    print "Queuing tests for execution..."
    sys.stdout.flush()
    job_id = sf.restful('runTestsAsynchronous', params={'classids': ','.join(classes_by_id.keys())})
    
    # Loop waiting for the tests to complete
    while True:
        res = sf.query_all("SELECT Id, Status, ApexClassId FROM ApexTestQueueItem WHERE ParentJobId = '{0}'".format(job_id,))
        counts = {
            'Queued': 0,
            'Processing': 0,
            'Aborted': 0,
            'Completed': 0,
            'Failed': 0,
            'Preparing': 0,
            'Holding': 0,
        }
        for item in res['records']:
            counts[item['Status']] += 1
    
        # If all tests have run, break from the loop
        if not counts['Queued'] and not counts['Processing']:
            print ''
            print '-------------------------------------------------------------------------------'
            print 'Test Results'
            print '-------------------------------------------------------------------------------'
            sys.stdout.flush()
            break
        
        print 'Completed: %(Completed)s  Processing: %(Processing)s  Queued: %(Queued)s' % counts
        sys.stdout.flush()
        sleep(poll_interval)
    
    # Get the test results by method
    res = sf.query_all("SELECT StackTrace,Message, ApexLogId, AsyncApexJobId,MethodName, Outcome, ApexClassId, TestTimestamp FROM ApexTestResult WHERE AsyncApexJobId = '{0}'".format(job_id,))
    
    counts = {
        'Pass': 0,
        'Fail': 0,
        'CompileFail': 0,
        'Skip': 0,
    }
    for result in res['records']:
        class_name = classes_by_id[result['ApexClassId']]
        results_by_class_name[class_name][result['MethodName']] = result
        counts[result['Outcome']] += 1
        if debug and result['ApexLogId']:
            classes_by_log_id[result['ApexLogId']] = result['ApexClassId']
    
    # Fetch debug logs if debug is enabled
    if debug:
        log_ids = "('{0}')".format("','".join([str(id) for id in classes_by_log_id.keys()]),)
        res = sf.query_all("SELECT Id, Application, DurationMilliseconds, Location, LogLength, LogUserId, Operation, Request, StartTime, Status from ApexLog where Id in {0}".format(log_ids,))
        for log in res['records']:
            class_id = classes_by_log_id[log['Id']]
            class_name = classes_by_id[class_id]
            logs_by_class_id[class_id] = log
            # Fetch the debug log file
            body_url = '{0}sobjects/ApexLog/{1}/Body'.format(sf.base_url, log['Id'])
            resp = sf.request.get(body_url, headers=sf.headers)
            log_file = class_name + '.log'
            if debug_logdir:
                log_file = debug_logdir + os.sep + log_file
            f = open(log_file, 'w')
            f.write(resp.content)
            f.close()

            # Parse stats from the log file
            f = open(log_file, 'r')
            method_stats = parse_log(class_name, f)
            
            # Add method stats to results_by_class_name
            for method, info in method_stats.items():
                results_by_class_name[class_name][method].update(info)

        # Delete the trace flag
        TraceFlag.delete(trace_id)

    # Build an OrderedDict of results
    test_results = []

    class_names = results_by_class_name.keys()
    class_names.sort()
    for class_name in class_names:
        class_id = classes_by_name[class_name]
        duration = None
        if debug and class_id in logs_by_class_id:
            duration = int(logs_by_class_id[class_id]['DurationMilliseconds']) * .001
            print 'Class: {0} ({1}s)'.format(class_name, duration)
        else:
            print 'Class: {0}'.format(class_name,)
        sys.stdout.flush()

        method_names = results_by_class_name[class_name].keys()
        method_names.sort()
        for method_name in method_names:
            result = results_by_class_name[class_name][method_name]

            test_results.append({
                'Children': result.get('children', None),
                'ClassName': decode_to_unicode(class_name),
                'Method': decode_to_unicode(result['MethodName']),
                'Message': decode_to_unicode(result['Message']),
                'Outcome': decode_to_unicode(result['Outcome']),
                'StackTrace': decode_to_unicode(result['StackTrace']),
                'Stats': result.get('stats', None),
                'TestTimestamp': result.get('TestTimestamp', None),
            })
            
            # Output result for method
            if debug and json_output and result.get('stats') and 'duration' in result['stats']:
                # If debug is enabled and we're generating the json output, include duration with the test
                print u'   {0}: {1} ({2}s)'.format(
                    result['Outcome'], 
                    result['MethodName'], 
                    result['stats']['duration']
                )
            else:
                print u'   {Outcome}: {MethodName}'.format(**result)

            if debug and not json_output:
                print u'     DEBUG LOG INFO:'
                stats = result.get('stats',None)
                if not stats:
                    print u'       No stats found, likely because of debug log size limit'
                else:
                    stat_keys = stats.keys()
                    stat_keys.sort()
                    for stat in stat_keys:
                        try:
                            value = stats[stat]
                            output = u'       {0} / {1}'.format(value['used'], value['allowed'])
                            print output.ljust(26) + stat
                        except:
                            output = u'       {0}'.format(stats[stat],)
                            print output.ljust(26) + stat
    
            # Print message and stack trace if failed
            if result['Outcome'] in ['Fail','CompileFail']:
                print u'   Message: {Message}'.format(**result)
                print u'   StackTrace: {StackTrace}'.format(**result)
            sys.stdout.flush()
    
    print u'-------------------------------------------------------------------------------'
    print u'Passed: %(Pass)s  Fail: %(Fail)s  Compile Fail: %(CompileFail)s  Skipped: %(Skip)s' % counts
    print u'-------------------------------------------------------------------------------'
    sys.stdout.flush()
    
    if counts['Fail'] or counts['CompileFail']:
        print u''
        print u'Failing Tests'
        print u'-------------'
        print u''
        sys.stdout.flush()

        counter = 0
        for result in test_results:
            if result['Outcome'] not in ['Fail','CompileFail']:
                continue
            counter += 1
            print u'{0}: {1}.{2} - {3}'.format(counter, result['ClassName'], result['Method'], result['Outcome'])
            print u'  Message: {0}'.format(result['Message'],)
            print u'  StackTrace: {0}'.format(result['StackTrace'],)
            sys.stdout.flush()

    if json_output:
        f = codecs.open(json_output, encoding='utf-8', mode='w')
        f.write(json.dumps(test_results))
        f.close()

    if junit_output:
        f = codecs.open(junit_output, encoding='utf-8', mode='w')
        f.write('<testsuite tests="{0}">\n'.format(len(test_results)),)
        for result in test_results:
            testcase = '  <testcase classname="{0}" name="{1}"'.format(result['ClassName'], result['Method'])
            if 'Stats' in result and result['Stats'] and 'duration' in result['Stats']:
                testcase = '{0} time="{1}"'.format(testcase, result['Stats']['duration'])
            if result['Outcome'] in ['Fail','CompileFail']:
                testcase = '{0}>\n'.format(testcase,)
                testcase = '{0}    <failure type="{1}">{2}</failure>\n'.format(
                    testcase, 
                    cgi.escape(result['StackTrace']), 
                    cgi.escape(result['Message']),
                )
                testcase = '{0}  </testcase>\n'.format(testcase,)
            else:
                testcase = '{0} />\n'.format(testcase,)
            f.write(testcase)

        f.write('</testsuite>')
        f.close()
        

    return counts

Example 62

Project: beeconnect Source File: PrintingLoader.py
    def __init__(self, interfaceJson, dispWidth, dispHeight):
        r"""
        __init__ method
        
        Initialization method. Loads configurations from the json file
        """
        
        self.displayWidth = dispWidth
        self.displayHeight = dispHeight
        
        ff = FileFinder.FileFinder()
        
        self.interfaceJson = interfaceJson
        
        self.buttonsJson = []
        
        self.lblsJson = []
        self.lblFontColor = []
        self.lblXPos = []
        self.lblYPos = []
        self.lblText = []
        self.lblFont = []
        self.lblIndexes = []
        
        self.images = []
        self.imagesJson = []
        self.imagePath = []
        self.imageX = []
        self.imageY = []
        
        self.lblsJson.append(json.loads(json.dumps(self.interfaceJson['PrintingTopLabel'])))
        self.lblIndexes.append(len(self.lblsJson[0]))
        self.lblsJson.append(json.loads(json.dumps(self.interfaceJson['PausedTopLabel'])))
        self.lblIndexes.append(len(self.lblsJson[1]))
        self.lblsJson.append(json.loads(json.dumps(self.interfaceJson['ShutdownTopLabel'])))
        self.lblIndexes.append(len(self.lblsJson[2]))
        self.lblsJson.append(json.loads(json.dumps(self.interfaceJson['FilamentTopLabel'])))
        self.lblIndexes.append(len(self.lblsJson[3]))
        self.lblsJson.append(json.loads(json.dumps(self.interfaceJson['PickerTopLabel'])))
        self.lblIndexes.append(len(self.lblsJson[4]))
        self.lblsJson.append(json.loads(json.dumps(self.interfaceJson['FinishTopLabel'])))
        self.lblIndexes.append(len(self.lblsJson[5]))
        
        self.buttonsJson.append(json.loads(json.dumps(self.interfaceJson['PrintingButtons'])))
        self.buttonsJson.append(json.loads(json.dumps(self.interfaceJson['PausedButtons'])))
        self.buttonsJson.append(json.loads(json.dumps(self.interfaceJson['ShutdownButtons'])))
        self.buttonsJson.append(json.loads(json.dumps(self.interfaceJson['FilamentButtons'])))
        self.buttonsJson.append(json.loads(json.dumps(self.interfaceJson['PickerButtons'])))
        self.buttonsJson.append(json.loads(json.dumps(self.interfaceJson['FinishButtons'])))
        
        self.imagesJson.append(json.loads(json.dumps(self.interfaceJson['PrintingImage'])))
        self.imagesJson.append(json.loads(json.dumps(self.interfaceJson['PausedImage'])))
        self.imagesJson.append(json.loads(json.dumps(self.interfaceJson['ShutdownImage'])))
        self.imagesJson.append('')                                                              #Filament Change IMG
        self.imagesJson.append('')                                                              #Picker IMG
        self.imagesJson.append(json.loads(json.dumps(self.interfaceJson['FinishImage'])))
        
        """
        Time Label Configuration
        """
        timeLblJson = json.loads(json.dumps(self.interfaceJson['TimeLabel']))
        
        self.timeLblXPos = int(float(timeLblJson['X'])*self.displayWidth)
        self.timeLblYPos = int(float(timeLblJson['Y'])*self.displayHeight)
        self.timeLblText = timeLblJson['Text']
        
        self.timeLblFont = self.GetFont(timeLblJson['FontType'],int(float(timeLblJson['FontSize'])*self.displayHeight))
        
        timeFontColor = timeLblJson['FontColor']
        splitColor = timeFontColor.split(",")
        self.timeLblFontColor = pygame.Color(int(splitColor[0]),int(splitColor[1]),int(splitColor[2]))
        
        """
        Color Label Configuration
        """
        colorLblJson = json.loads(json.dumps(self.interfaceJson['ColorLabel']))
        
        self.colorLblXPos = int(float(colorLblJson['X'])*self.displayWidth)
        self.colorLblYPos = int(float(colorLblJson['Y'])*self.displayHeight)
        self.colorLblText = colorLblJson['Text']
        
        self.colorLblFont = self.GetFont(colorLblJson['FontType'],int(float(colorLblJson['FontSize'])*self.displayHeight))
        
        colorFontColor = colorLblJson['FontColor']
        splitColor = colorFontColor.split(",")
        self.colorLblFontColor = pygame.Color(int(splitColor[0]),int(splitColor[1]),int(splitColor[2]))
        
        """
        Load Labels Configuration
        """
        for lbls in self.lblsJson:
            lblJson = json.loads(json.dumps(lbls))
            for lbl in lblJson:
                lblFontType = lbl['FontType']
                lblFontSize = int(float(lbl['FontSize'])*self.displayHeight)
                lblFColor = lbl['FontColor']
                self.lblXPos.append(int(float(lbl['X'])*self.displayWidth))
                self.lblYPos.append(int(float(lbl['Y'])*self.displayHeight))
                self.lblText.append(lbl['Text'])
                self.lblFont.append(self.GetFont(lblFontType,lblFontSize))
                
                splitColor = lblFColor.split(",")
                fontColor = pygame.Color(int(splitColor[0]),int(splitColor[1]),int(splitColor[2]))
                self.lblFontColor.append(fontColor)
        """
        Load Buttons Configuration
        """
        self.interfaceButtons = []
        for btns in self.buttonsJson:
            filButtons = []
            for btn in btns:
                btnX = int(float(btn['X'])*self.displayWidth)
                btnY = int(float(btn['Y'])*self.displayHeight)
                btnWidth = int(float(btn['Width'])*self.displayHeight)
                btnHeight = int(float(btn['Height'])*self.displayHeight)
                btnType = btn['ButtonType']
                
            
                if btnType == "Text":
                    btnTitle = btn['Title']
                    bgColor = btn['bgColor'].split(",")
                    fColor = btn['FontColor'].split(",")
                    fType = btn['FontType']
                    fSize = int(float(btn['FontSize'])*self.displayHeight)
                    btnName = btn['ButtonName']
                
                    jogBtn = BeePanel_Button.Button(btnX,btnY,btnWidth,btnHeight,btnTitle,
                                                int(bgColor[0]),int(bgColor[2]),int(bgColor[2]),
                                                int(fColor[0]),int(fColor[2]),int(fColor[2]),
                                                fType,fSize,None,None,None,btnName)
                                                
                    newBtn = jogBtn.GetTextButton()
                    newBtn._propSetName(btnTitle)
                    filButtons.append(newBtn)
                elif btnType == "Img":
                    btnTitle = btn['Title']
                    normalPath = btn['NormalPath']
                    downPath = btn['DownPath']
                    highlightedPath = btn['HighlightedPath']
                    btnName = btn['ButtonName']
                
                    jogBtn = BeePanel_Button.Button(btnX,btnY,btnWidth,btnHeight,None,
                                                None,None,None,None,None,None,
                                                None,None,
                                                normalPath,downPath,highlightedPath,
                                                btnName)
                    newBtn = jogBtn.GetImageButton()
                    newBtn._propSetName(btnTitle)
                    filButtons.append(newBtn)
        
            self.interfaceButtons.append(filButtons)
            
        """
        Load Image Configuration
        """
        for img in self.imagesJson:
            if(img != ''):
                imgJson = json.loads(json.dumps(img))
                for img in imgJson:
                    self.imagePath.append(ff.GetAbsPath(img['ImgPath']))
                    self.imageX.append(int(float(img['X'])*self.displayHeight))
                    self.imageY.append(int(float(img['Y'])*self.displayHeight))
            else:
                self.imagePath.append('')
                self.imageX.append('')
                self.imageY.append('')
                    
        
        """
        Load Progress Bar Configuration
        """
        pBarJson = json.loads(json.dumps(self.interfaceJson['ProgressBar']))
        pBarX = int(float(pBarJson['X'])*self.displayWidth)
        pBarY = int(float(pBarJson['Y'])*self.displayHeight)
        pBarWidth = int(float(pBarJson['Width'])*self.displayWidth)
        pBarHeight = int(float(pBarJson['Height'])*self.displayHeight)
        pBarThickness = int(pBarJson['Thickness'])
        pBarLineColorRGB = pBarJson['LineColor']
        pBarFillColorRGB = pBarJson['bgColor']
        
        splitColor = pBarLineColorRGB.split(",")
        pBarLineColor = pygame.Color(int(splitColor[0]),int(splitColor[1]),int(splitColor[2]))
        
        splitColor = pBarFillColorRGB.split(",")
        pBarFillColor = pygame.Color(int(splitColor[0]),int(splitColor[1]),int(splitColor[2]))
        
        self.progressBar = ProgressBar.ProgressBar(pBarX,pBarY,pBarWidth,pBarHeight,pBarLineColor,pBarFillColor,pBarThickness)
        
        """
        Color Picker Configuration
        """
        colorPickerJson = json.loads(json.dumps(self.interfaceJson['ColorPicker']))
        self.pickerX = int(float(colorPickerJson['X'])*self.displayWidth)
        self.pickerY = int(float(colorPickerJson['Y'])*self.displayHeight)
        self.pickerWidth = int(float(colorPickerJson['Width'])*self.displayWidth)
        self.pickerHeight = int(float(colorPickerJson['Height'])*self.displayHeight)
        self.pickerFontSize = int(float(colorPickerJson['FontSize'])*self.displayHeight)
        pickerFontColorRGB = colorPickerJson['FontColor']
        fontType = colorPickerJson['FontType']
            
        font = self.GetFont(fontType,self.pickerFontSize)
        
        self.pickerFont = font
        
        splitColor = pickerFontColorRGB.split(",")
        self.pickerFontColor = pygame.Color(int(splitColor[0]),int(splitColor[1]),int(splitColor[2]))
                
        return

Example 63

Project: graphite-api Source File: app.py
@app.route('/render', methods=methods)
def render():
    # Start with some defaults
    errors = {}
    graph_options = {
        'width': 600,
        'height': 300,
    }
    request_options = {}

    # Fill in the request_options
    graph_type = RequestParams.get('graphType', 'line')

    # Fill in the request_options
    try:
        graph_class = GraphTypes[graph_type]
        request_options['graphType'] = graph_type
        request_options['graphClass'] = graph_class
    except KeyError:
        errors['graphType'] = (
            "Invalid graphType '{0}', must be one of '{1}'.".format(
                graph_type, "', '".join(sorted(GraphTypes.keys()))))
    request_options['pieMode'] = RequestParams.get('pieMode', 'average')
    targets = RequestParams.getlist('target')
    if not len(targets):
        errors['target'] = 'This parameter is required.'
    request_options['targets'] = targets

    if 'rawData' in RequestParams:
        request_options['format'] = 'raw'
    if 'format' in RequestParams:
        request_options['format'] = RequestParams['format']
        if 'jsonp' in RequestParams:
            request_options['jsonp'] = RequestParams['jsonp']
    if 'maxDataPoints' in RequestParams:
        try:
            request_options['maxDataPoints'] = int(
                float(RequestParams['maxDataPoints']))
        except ValueError:
            errors['maxDataPoints'] = 'Must be an integer.'
    if 'noNullPoints' in RequestParams:
        request_options['noNullPoints'] = True

    if errors:
        return jsonify({'errors': errors}, status=400)

    # Fill in the graph_options
    for opt in graph_class.customizable:
        if opt in RequestParams:
            value = RequestParams[opt]
            try:
                intvalue = int(value)
                if str(intvalue) == str(value):
                    value = intvalue
            except ValueError:
                try:
                    value = float(value)
                except ValueError:
                    if value.lower() in ('true', 'false'):
                        value = value.lower() == 'true'
                    elif value.lower() == 'default' or not value:
                        continue
            graph_options[opt] = value

    tzinfo = pytz.timezone(app.config['TIME_ZONE'])
    tz = RequestParams.get('tz')
    if tz:
        try:
            tzinfo = pytz.timezone(tz)
        except pytz.UnknownTimeZoneError:
            errors['tz'] = "Unknown timezone: '{0}'.".format(tz)
    request_options['tzinfo'] = tzinfo

    # Get the time interval for time-oriented graph types
    until_time = parseATTime(RequestParams.get('until', 'now'), tzinfo)
    from_time = parseATTime(RequestParams.get('from', '-1d'), tzinfo)

    start_time = min(from_time, until_time)
    end_time = max(from_time, until_time)
    if start_time == end_time:
        errors['from'] = errors['until'] = 'Invalid empty time range'

    request_options['startTime'] = start_time
    request_options['endTime'] = end_time

    template = dict()
    for key in RequestParams.keys():
        if key.startswith('template['):
            template[key[9:-1]] = RequestParams.get(key)
    request_options['template'] = template

    use_cache = app.cache is not None and 'noCache' not in RequestParams
    cache_timeout = RequestParams.get('cacheTimeout')
    if cache_timeout is not None:
        cache_timeout = int(cache_timeout)

    if errors:
        return jsonify({'errors': errors}, status=400)

    # Done with options.

    if use_cache:
        request_key = hash_request()
        response = app.cache.get(request_key)
        if response is not None:
            return response

    headers = {
        'Last-Modified': http_date(time.time()),
        'Expires': http_date(time.time() + (cache_timeout or 60)),
        'Cache-Control': 'max-age={0}'.format(cache_timeout or 60)
    } if use_cache else {
        'Pragma': 'no-cache',
        'Cache-Control': 'no-cache',
    }

    context = {
        'startTime': request_options['startTime'],
        'endTime': request_options['endTime'],
        'tzinfo': request_options['tzinfo'],
        'template': request_options['template'],
        'data': [],
    }

    # Gather all data to take advantage of backends with fetch_multi
    paths = []
    for target in request_options['targets']:
        if request_options['graphType'] == 'pie':
            if ':' in target:
                continue
        if target.strip():
            paths += pathsFromTarget(context, target)
    data_store = fetchData(context, paths)

    if request_options['graphType'] == 'pie':
        for target in request_options['targets']:
            if ':' in target:
                name, value = target.split(':', 1)
                try:
                    value = float(value)
                except ValueError:
                    errors['target'] = "Invalid target: '{0}'.".format(target)
                context['data'].append((name, value))
            else:
                series_list = evaluateTarget(context, target, data_store)

                for series in series_list:
                    func = app.functions[request_options['pieMode']]
                    context['data'].append((series.name,
                                            func(context, series) or 0))

        if errors:
            return jsonify({'errors': errors}, status=400)

    else:  # graphType == 'line'
        for target in request_options['targets']:
            if not target.strip():
                continue
            series_list = evaluateTarget(context, target, data_store)
            context['data'].extend(series_list)

        request_options['format'] = request_options.get('format')

        if request_options['format'] == 'csv':
            response = BytesIO() if six.PY2 else StringIO()
            writer = csv.writer(response, dialect='excel')
            for series in context['data']:
                for index, value in enumerate(series):
                    ts = datetime.fromtimestamp(
                        series.start + index * series.step,
                        request_options['tzinfo']
                    )
                    writer.writerow((series.name,
                                     ts.strftime("%Y-%m-%d %H:%M:%S"), value))
            response.seek(0)
            headers['Content-Type'] = 'text/csv'
            response = (response.read(), 200, headers)
            if use_cache:
                app.cache.add(request_key, response, cache_timeout)
            return response

        if request_options['format'] == 'json':
            series_data = []
            if 'maxDataPoints' in request_options and any(context['data']):
                start_time = min([s.start for s in context['data']])
                end_time = max([s.end for s in context['data']])
                for series in context['data']:
                    series_data.append(prune_datapoints(
                        series, request_options['maxDataPoints'],
                        start_time, end_time))
            elif 'noNullPoints' in request_options and any(context['data']):
                for series in context['data']:
                    values = []
                    for (index, v) in enumerate(series):
                        if v is not None:
                            timestamp = series.start + (index * series.step)
                            values.append((v, timestamp))
                    if len(values) > 0:
                        series_data.append({'target': series.name,
                                            'datapoints': values})
            else:
                for series in context['data']:
                    timestamps = range(series.start, series.end + series.step,
                                       series.step)
                    datapoints = zip(series, timestamps)
                    series_data.append({'target': series.name,
                                        'datapoints': datapoints})

            response = jsonify(series_data, headers=headers)
            if use_cache:
                app.cache.add(request_key, response, cache_timeout)
            return response

        if request_options['format'] == 'dygraph':
            series_data = {}
            labels = ['Time']
            if any(context['data']):
                datapoints = [[ts * 1000]
                              for ts in range(context['data'][0].start,
                                              context['data'][0].end,
                                              context['data'][0].step)]
                for series in context['data']:
                    labels.append(series.name)
                    for i, point in enumerate(series):
                        datapoints[i].append(point)
                series_data = {'labels': labels, 'data': datapoints}

            return jsonify(series_data, headers=headers)

        if request_options['format'] == 'rickshaw':
            series_data = []
            for series in context['data']:
                timestamps = range(series.start, series.end, series.step)
                datapoints = [{'x': x, 'y': y}
                              for x, y in zip(timestamps, series)]
                series_data.append(dict(target=series.name,
                                   datapoints=datapoints))
            return jsonify(series_data, headers=headers)

        if request_options['format'] == 'raw':
            response = StringIO()
            for series in context['data']:
                response.write(u"%s,%d,%d,%d|" % (
                    series.name, series.start, series.end, series.step))
                response.write(u','.join(map(repr, series)))
                response.write(u'\n')
            response.seek(0)
            headers['Content-Type'] = 'text/plain'
            response = (response.read(), 200, headers)
            if use_cache:
                app.cache.add(request_key, response, cache_timeout)
            return response

        if request_options['format'] == 'svg':
            graph_options['outputFormat'] = 'svg'
        elif request_options['format'] == 'pdf':
            graph_options['outputFormat'] = 'pdf'

    graph_options['data'] = context['data']
    image = doImageRender(request_options['graphClass'], graph_options)

    use_svg = graph_options.get('outputFormat') == 'svg'

    if use_svg and 'jsonp' in request_options:
        headers['Content-Type'] = 'text/javascript'
        response = ('{0}({1})'.format(request_options['jsonp'],
                                      json.dumps(image.decode('utf-8'))),
                    200, headers)
    else:
        if use_svg:
            ctype = 'image/svg+xml'
        elif graph_options.get('outputFormat') == 'pdf':
            ctype = 'application/x-pdf'
        else:
            ctype = 'image/png'
        headers['Content-Type'] = ctype
        response = image, 200, headers

    if use_cache:
        app.cache.add(request_key, response, cache_timeout)
    return response

Example 64

Project: baruwa2 Source File: reports.py
    @ActionProtector(CanAccessReport())
    def display(self, reportid, format=None):
        "Display a report"
        try:
            c.report_title = REPORTS[reportid]['title']
        except KeyError:
            abort(404)

        filters = session.get('filter_by', [])
        if reportid in ['1', '2', '3', '4', '5', '6', '7', '8', '10']:
            rquery = ReportQuery(c.user, reportid, filters)
            query = rquery()
            cachekey = u'reportquery-%s-%s' % (c.user.username, reportid)
            query = query.options(FromCache('sql_cache_short', cachekey))
            data = query[:10]
            if format == 'png':
                return self._generate_png(data, reportid)
            if format == 'csv':
                info = REPORTDL_MSG % dict(r=c.report_title, f='csv')
                audit_log(c.user.username,
                        1, unicode(info), request.host,
                        request.remote_addr, arrow.utcnow().datetime)
                return self._generate_csv(data, reportid)
            jsondata = [dict(tooltip=getattr(item, 'address'),
                        y=getattr(item, REPORTS[reportid]['sort']),
                        stroke='black', color=PIE_COLORS[index],
                        size=getattr(item, 'size'))
                        for index, item in enumerate(data)]
            template = '/reports/piereport.html'
        if reportid == '9':
            query = sa_scores(Session, c.user)
            if filters:
                dynq = DynaQuery(Message, query, filters)
                query = dynq.generate()
            cachekey = u'sascores-%s' % c.user.username
            query = query.options(FromCache('sql_cache_short', cachekey))
            data = query.all()
            if format == 'json':
                scores = []
                counts = []
                for row in data:
                    scores.append(dict(value=int(row.score),
                                text=str(row.score)))
                    counts.append(dict(y=int(row.count), tooltip=(_('Score ') +
                                str(row.score) + ': ' + str(row.count))))
                jsondata = dict(scores=scores, count=counts)
            elif format == 'png':
                return self._generate_png(data, reportid)
            else:
                jsondata = {}
                jsondata['labels'] = [{'value': index + 1,
                                    'text': str(item.score)}
                                    for index, item in enumerate(data)]
                jsondata['scores'] = [item.count for item in data]
                template = '/reports/barreport.html'
        if reportid == '10':
            if format == 'json':
                data = [[item.address.strip(),
                        get_hostname(item.address.strip()),
                        country_flag(item.address.strip()),
                        item.count, item.size]
                        for item in data]
            template = '/reports/relays.html'
        if reportid == '11':
            query = message_totals(Session, c.user)
            if filters:
                dynq = DynaQuery(Message, query, filters)
                query = dynq.generate()
            cachekey = u'msgtotals-%s' % c.user.username
            query = query.options(FromCache('sql_cache_short', cachekey))
            data = query.all()
            if format == 'png':
                return self._generate_png(data, reportid)
            elif format == 'json':
                dates = []
                mail_total = []
                spam_total = []
                size_total = []
                virus_total = []
                for row in data:
                    dates.append(str(row.ldate))
                    mail_total.append(int(row.mail_total))
                    spam_total.append(int(row.spam_total))
                    virus_total.append(int(row.virus_total))
                    size_total.append(int(row.total_size))
                jsondata = dict(dates=[dict(value=index + 1, text=date)
                                for index, date in enumerate(dates)],
                            mail=[dict(y=total,
                                tooltip=(_('Mail totals on ') +
                                dates[index] + ': ' + str(total)))
                                for index, total in enumerate(mail_total)],
                            spam=[dict(y=total,
                                tooltip=(_('Spam totals on ') +
                                dates[index] + ': ' + str(total)))
                                for index, total in enumerate(spam_total)],
                            virii=[dict(y=total,
                                tooltip=(_('Virus totals on ') +
                                dates[index] + ': ' + str(total)))
                                for index, total in enumerate(virus_total)],
                            volume=size_total, mail_total=sum(mail_total),
                            spam_total=sum(spam_total),
                            virus_total=sum(virus_total),
                            volume_total=sum(size_total))
                try:
                    vpct = "%.1f" % ((1.0 * sum(virus_total) /
                                    sum(mail_total)) * 100)
                    spct = "%.1f" % ((1.0 * sum(spam_total) /
                                    sum(mail_total)) * 100)
                except ZeroDivisionError:
                    vpct = "0.0"
                    spct = "0.0"
                jsondata['vpct'] = vpct
                jsondata['spct'] = spct
                data = [dict(date=str(row.ldate),
                        mail_total=row.mail_total,
                        spam_total=row.spam_total,
                        virus_total=row.virus_total,
                        size_total=format_byte_size(row.total_size),
                        virus_percent="%.1f" % ((1.0 * int(row.virus_total) /
                        int(row.mail_total)) * 100),
                        spam_percent="%.1f" % ((1.0 * int(row.spam_total) /
                        int(row.mail_total)) * 100)) for row in data]
            elif format == 'csv':
                info = REPORTDL_MSG % dict(r=c.report_title, f='csv')
                audit_log(c.user.username,
                        1, unicode(info), request.host,
                        request.remote_addr, arrow.utcnow().datetime)
                return self._generate_csv(data, reportid)
            else:
                jsondata = dict(mail=[],
                                spam=[],
                                virus=[],
                                volume=[],
                                labels=[])
                for index, item in enumerate(data):
                    jsondata['spam'].append(item.spam_total)
                    jsondata['mail'].append(item.mail_total)
                    jsondata['virus'].append(item.virus_total)
                    jsondata['volume'].append(item.total_size)
                    jsondata['labels'].append(dict(text=str(item.ldate),
                                            value=index))
                template = '/reports/listing.html'
        if format == 'json':
            response.headers['Content-Type'] = JSON_HEADER
            return json.dumps(dict(items=list(data), pie_data=jsondata))
        if format == 'pdf' and reportid != '9':
            info = REPORTDL_MSG % dict(r=c.report_title, f='pdf')
            audit_log(c.user.username,
                    1, unicode(info), request.host,
                    request.remote_addr, arrow.utcnow().datetime)
            return self._generate_pdf(data, reportid)
        c.reportid = reportid
        c.chart_data = json.dumps(jsondata)
        c.top_items = data
        c.active_filters = filters
        c.saved_filters = []
        c.FILTER_BY = FILTER_BY
        c.FILTER_ITEMS = FILTER_ITEMS
        c.form = FilterForm(request.POST, csrf_context=session)
        info = REPORTVIEW_MSG % dict(r=c.report_title)
        audit_log(c.user.username,
                1, unicode(info), request.host,
                request.remote_addr, arrow.utcnow().datetime)
        return self.render(template)

Example 65

Project: cognitive Source File: operations.py
Function: set_operation
    def set_operation(self, operation, data):
        if operation == 'math_formula':
            try:
                print data["op_type"], data["op_constant"], data["component_type"], data["component_id"]
                op = DataOperationType(
                    function_type='Update',
                    function_arg=data["component_type"],
                    function_subtype=data["op_type"],
                    function_arg_id=data["component_id"],
                    function_subtype_arg=data["op_constant"])
                op.save()
            except KeyError:
                op = DataOperationType(function_type='Update')
                op.save()

        elif operation == 'normalization':
            try:
                print data["component_type"], data["op_type"], data["component_id"]
                op = DataOperationType(
                    function_type='Update',
                    function_arg=data["component_type"],
                    function_arg_id=data["component_id"],
                    function_subtype='Normalize',
                    function_subtype_arg=data["op_type"])
                op.save()
            except KeyError:
                op = DataOperationType(
                    function_type='Update',
                    function_subtype='Normalize')
                op.save()

        elif operation == 'projection':
            try:
                print data["component_id"]
                op = DataOperationType(
                    function_type='Filter',
                    function_arg='Table',
                    function_arg_id=data["component_id"],
                    function_subtype='Project')
                op.save()
            except KeyError:
                op = DataOperationType(
                    function_type='Filter',
                    function_arg='Table')
                op.save()

        elif operation == 'duplication_removal':
            try:
                print data["component_id"]
                op = DataOperationType(
                    function_type='Filter',
                    function_arg='Table',
                    function_arg_id=data["component_id"],
                    function_subtype='RemoveDup')
                op.save()
            except KeyError:
                op = DataOperationType(
                    function_type='Filter',
                    function_arg='Table',
                    function_subtype='RemoveDup')
                op.save()

        elif operation == 'remove_missing':
            try:
                print data["op_action"]
                op = DataOperationType(
                    function_type='Filter',
                    function_arg='Table',
                    function_subtype='RemoveMissing',
                    function_subtype_arg=data["op_action"])
                op.save()
            except KeyError:
                op = DataOperationType(
                    function_type='Filter',
                    function_arg='Table',
                    function_subtype='RemoveMissing')
                op.save()

        elif operation == 'metadata':
            try:
                print data["column_type"]
                op = DataOperationType(
                    function_type='Update',
                    function_arg='Table',
                    function_subtype='Metadata',
                    function_subtype_arg=data["column_type"])
                op.save()
            except KeyError:
                op = DataOperationType(
                    function_type='Update',
                    function_arg='Table',
                    function_subtype='Metadata')
                op.save()

        elif operation == 'row':
            try:
                print data["row_values"]
                op = DataOperationType(
                    function_type='Create',
                    function_arg='Row',
                    function_subtype='Row',
                    function_subtype_arg=data["row_values"])
                op.save()
            except KeyError:
                op = DataOperationType(
                    function_type='Create',
                    function_arg='Row',
                    function_subtype='Row')
                op.save()

        elif operation == 'input':
            try:
                if data["input_file_type"] == "csv":
                    print data["input_file"], data["input_file_type"]
                    filename = "/tmp/" + str(data["experiment"]) + "_" + data["input_file"]
                    print "Filename ", filename
                    f = open(filename, 'w')
                    f.write(data["data_values"])
                    f.close()
                    op = DataOperationType(
                        function_type='Create', function_arg='Table',
                        function_subtype='Input', function_subtype_arg=filename)
                    op.save()

                elif data["input_file_type"] == "http":
                    filename = "/tmp/" + str(data["experiment"]) + "_" + data["input_file"].split('/')[-1]
                    print "Filename ", filename
                    response = urllib2.urlopen(data["input_file"])
                    csv_data = read_csv(response)
                    csv_data.to_csv(filename, index=False)
                    op = DataOperationType(
                        function_type='Create', function_arg='Table',
                        function_subtype='Input', function_subtype_arg=filename)
                    op.save()
            except KeyError:
                op = DataOperationType(
                    function_type='Create',
                    function_arg='Table',
                    function_subtype='Input')
                op.save()

        elif operation == "machine_learning":
            try:
                print data["model_type"], data["train_data_percentage"], data["target_column"]
                arg = {
                    'train_data_percentage': data["train_data_percentage"],
                    'target_column': data["target_column"]}
                op = DataOperationType(
                    function_type='Create',
                    function_arg='Model',
                    function_arg_id=data["model_type"],
                    function_subtype='Train-Test',
                    function_subtype_arg=json.dumps(arg))
                op.save()
            except KeyError:
                op = DataOperationType(
                    function_type='Create',
                    function_arg='Model',
                    function_subtype='Train-Test')
                op.save()
        return op

Example 66

Project: lfd Source File: planning.py
def joint_fit_tps_follow_finger_pts_trajs(robot, manip_name, flr2finger_link_names, flr2finger_rel_pts, flr2old_finger_pts_trajs, old_traj, 
                                         f, closing_pts=None,
                                         no_collision_cost_first=False, use_collision_cost=True, start_fixed=False, joint_vel_limits=None,
                                          alpha=settings.ALPHA, beta_pos=settings.BETA_POS, gamma=settings.GAMMA):
    orig_dof_inds = robot.GetActiveDOFIndices()
    orig_dof_vals = robot.GetDOFValues()
    
    n_steps = old_traj.shape[0]
    dof_inds = sim_util.dof_inds_from_name(robot, manip_name)
    assert old_traj.shape[1] == len(dof_inds)
    for flr2old_finger_pts_traj in flr2old_finger_pts_trajs:
        for old_finger_pts_traj in flr2old_finger_pts_traj.values():
            assert len(old_finger_pts_traj)== n_steps
    assert len(flr2finger_link_names) == len(flr2old_finger_pts_trajs)
    
    # expand these
    (n,d) = f.x_na.shape
    bend_coefs = np.ones(d) * f.bend_coef if np.isscalar(f.bend_coef) else f.bend_coef
    rot_coefs = np.ones(d) * f.rot_coef if np.isscalar(f.rot_coef) else f.rot_coef
    if f.wt_n is None:
        wt_n = np.ones(n)
    else:
        wt_n = f.wt_n
    if wt_n.ndim == 1:
        wt_n = wt_n[:,None]
    if wt_n.shape[1] == 1:
        wt_n = np.tile(wt_n, (1,d))
    
    if no_collision_cost_first:
        init_traj, _, (N, init_z) , _, _ = joint_fit_tps_follow_finger_pts_trajs(robot, manip_name, flr2finger_link_names, flr2finger_rel_pts, flr2old_finger_pts_trajs, old_traj, 
                                                                                 f, closing_pts=closing_pts, 
                                                                                 no_collision_cost_first=False, use_collision_cost=False, start_fixed=start_fixed, joint_vel_limits=joint_vel_limits, 
                                                                                 alpha=alpha, beta_pos=beta_pos, gamma=gamma)
    else:
        init_traj = old_traj.copy()
        N = f.N
        init_z = f.z
    
    if start_fixed:
        init_traj = np.r_[robot.GetDOFValues(dof_inds)[None,:], init_traj[1:]]
        sim_util.unwrap_in_place(init_traj, dof_inds)
        init_traj += robot.GetDOFValues(dof_inds) - init_traj[0,:]

    request = {
        "basic_info" : {
            "n_steps" : n_steps,
            "m_ext" : n, 
            "n_ext" : d,
            "manip" : manip_name,
            "start_fixed" : start_fixed
        },
        "costs" : [
        {
            "type" : "joint_vel",
            "params": {"coeffs" : [gamma/(n_steps-1)]}
        },
        {
            "type" : "tps",
            "name" : "tps",
            "params" : {"x_na" : [row.tolist() for row in f.x_na],
                        "y_ng" : [row.tolist() for row in f.y_ng],
                        "bend_coefs" : bend_coefs.tolist(),
                        "rot_coefs" : rot_coefs.tolist(),
                        "wt_n" : [row.tolist() for row in wt_n],
                        "N" : [row.tolist() for row in N],
                        "alpha" : alpha,
            }
        }
        ],
        "constraints" : [
        ],
        "init_info" : {
            "type":"given_traj",
            "data":[x.tolist() for x in init_traj],
            "data_ext":[row.tolist() for row in init_z]
        }
    }
    
    if use_collision_cost:
        request["costs"].append(
            {
                "type" : "collision",
                "params" : {
                  "continuous" : True,
                  "coeffs" : [1000],  # penalty coefficients. list of length one is automatically expanded to a list of length n_timesteps
                  "dist_pen" : [0.025]  # robot-obstacle distance that penalty kicks in. expands to length n_timesteps
                }
            })
    
    if joint_vel_limits is not None:
        request["constraints"].append(
             {
                "type" : "joint_vel_limits",
                "params": {"vals" : joint_vel_limits,
                           "first_step" : 0,
                           "last_step" : n_steps-1
                           }
              })

    if closing_pts is not None:
        request["costs"].append(
            {
                "type":"tps_jac_orth",
                "params":  {
                            "tps_cost_name":"tps",
                            "pts":closing_pts.tolist(),
                            "coeffs":[10.0]*len(closing_pts),
                            }
            })
    
    for (flr2finger_link_name, flr2old_finger_pts_traj) in zip(flr2finger_link_names, flr2old_finger_pts_trajs):
        for finger_lr, finger_link_name in flr2finger_link_name.items():
            finger_rel_pts = flr2finger_rel_pts[finger_lr]
            old_finger_pts_traj = flr2old_finger_pts_traj[finger_lr]
            for (i_step, old_finger_pts) in enumerate(old_finger_pts_traj):
                if start_fixed and i_step == 0:
                    continue
                request["costs"].append(
                    {"type":"tps_rel_pts",
                     "params":{
                        "tps_cost_name":"tps",
                        "src_xyzs":old_finger_pts.tolist(),
                        "rel_xyzs":finger_rel_pts.tolist(),
                        "link":finger_link_name,
                        "timestep":i_step,
                        "pos_coeffs":[np.sqrt(beta_pos/n_steps)]*4,
                     }
                    })

    s = json.dumps(request)
    with openravepy.RobotStateSaver(robot):
        with util.suppress_stdout():
            prob = trajoptpy.ConstructProblem(s, robot.GetEnv()) # create object that stores optimization problem
            result = trajoptpy.OptimizeProblem(prob) # do optimization
    
    traj = result.GetTraj()
    f.z = result.GetExt()
    theta = N.dot(f.z)
    f.trans_g = theta[0,:]
    f.lin_ag = theta[1:d+1,:]
    f.w_ng = theta[d+1:]
    
    tps_rel_pts_costs = np.sum([cost_val for (cost_type, cost_val) in result.GetCosts() if cost_type == "tps_rel_pts"])
    tps_rel_pts_err = []
    with openravepy.RobotStateSaver(robot):
        for (flr2finger_link_name, flr2old_finger_pts_traj) in zip(flr2finger_link_names, flr2old_finger_pts_trajs):
            for finger_lr, finger_link_name in flr2finger_link_name.items():
                finger_link = robot.GetLink(finger_link_name)
                finger_rel_pts = flr2finger_rel_pts[finger_lr]
                old_finger_pts_traj = flr2old_finger_pts_traj[finger_lr]
                for (i_step, old_finger_pts) in enumerate(old_finger_pts_traj):
                    if start_fixed and i_step == 0:
                        continue
                    robot.SetDOFValues(traj[i_step], dof_inds)
                    new_hmat = finger_link.GetTransform()
                    tps_rel_pts_err.append(f.transform_points(old_finger_pts) - (new_hmat[:3,3][None,:] + finger_rel_pts.dot(new_hmat[:3,:3].T)))
    tps_rel_pts_err = np.concatenate(tps_rel_pts_err, axis=0)
    tps_rel_pts_costs2 = (beta_pos/n_steps) * np.square(tps_rel_pts_err).sum() # TODO don't square n_steps

    tps_cost = np.sum([cost_val for (cost_type, cost_val) in result.GetCosts() if cost_type == "tps"])
    tps_cost2 = alpha * f.get_objective().sum()
    matching_err = f.transform_points(f.x_na) - f.y_ng
    
    joint_vel_cost = np.sum([cost_val for (cost_type, cost_val) in result.GetCosts() if cost_type == "joint_vel"])
    joint_vel_err = np.diff(traj, axis=0)
    joint_vel_cost2 = (gamma/(n_steps-1)) * np.square(joint_vel_err).sum()
    sim_util.unwrap_in_place(traj, dof_inds)
    joint_vel_err = np.diff(traj, axis=0)

    collision_costs = [cost_val for (cost_type, cost_val) in result.GetCosts() if "collision" in cost_type]
    if len(collision_costs) > 0:
        collision_err = np.asarray(collision_costs)
        collision_costs = np.sum(collision_costs)

    tps_jac_orth_cost = [cost_val for (cost_type, cost_val) in result.GetCosts() if "tps_jac_orth" in cost_type]
    if len(tps_jac_orth_cost) > 0:
        tps_jac_orth_cost = np.sum(tps_jac_orth_cost)
        f_jacs = f.compute_jacobian(closing_pts)
        tps_jac_orth_err = []
        for jac in f_jacs:
            tps_jac_orth_err.extend((jac.dot(jac.T) - np.eye(3)).flatten())
        tps_jac_orth_err = np.asarray(tps_jac_orth_err)
        tps_jac_orth_cost2 = np.square( 10.0 * tps_jac_orth_err ).sum()

    obj_value = np.sum([cost_val for (cost_type, cost_val) in result.GetCosts()])
    
    print "{:>15} | {:>10} | {:>10}".format("", "trajopt", "computed")
    print "{:>15} | {:>10}".format("COSTS", "-"*23)
    print "{:>15} | {:>10,.4} | {:>10,.4}".format("joint_vel", joint_vel_cost, joint_vel_cost2)
    print "{:>15} | {:>10,.4} | {:>10,.4}".format("tps", tps_cost, tps_cost2)
    if np.isscalar(collision_costs):
        print "{:>15} | {:>10,.4} | {:>10}".format("collision(s)", collision_costs, "-")
    print "{:>15} | {:>10,.4} | {:>10,.4}".format("tps_rel_pts(s)", tps_rel_pts_costs, tps_rel_pts_costs2)
    if np.isscalar(tps_jac_orth_cost):
        print "{:>15} | {:>10,.4} | {:>10,.4}".format("tps_jac_orth", tps_jac_orth_cost, tps_jac_orth_cost2)
    print "{:>15} | {:>10,.4} | {:>10}".format("total_obj", obj_value, "-")
    print ""

    print "{:>15} | {:>10} | {:>10}".format("", "abs min", "abs max")
    print "{:>15} | {:>10}".format("ERRORS", "-"*23)
    print "{:>15} | {:>10,.4} | {:>10,.4}".format("joint_vel (deg)", np.rad2deg(np.abs(joint_vel_err).min()), np.rad2deg(np.abs(joint_vel_err).max()))
    print "{:>15} | {:>10,.4} | {:>10,.4}".format("tps (matching)", np.abs(matching_err).min(), np.abs(matching_err).max())
    if np.isscalar(collision_costs):
        print "{:>15} | {:>10,.4} | {:>10,.4}".format("collision(s)", np.abs(-collision_err).min(), np.abs(-collision_err).max())
    print "{:>15} | {:>10,.4} | {:>10,.4}".format("tps_rel_pts(s)", np.abs(tps_rel_pts_err).min(), np.abs(tps_rel_pts_err).max())
    if np.isscalar(tps_jac_orth_cost):
        print "{:>15} | {:>10,.4} | {:>10,.4}".format("tps_jac_orth", np.abs(tps_jac_orth_err).min(), np.abs(tps_jac_orth_err).max())
    print ""

    # make sure this function doesn't change state of the robot
    assert not np.any(orig_dof_inds - robot.GetActiveDOFIndices())
    assert not np.any(orig_dof_vals - robot.GetDOFValues())
    
    return traj, obj_value, tps_rel_pts_costs, tps_cost

Example 67

Project: coala-bears Source File: JSHintBear.py
Function: generate_config
    @staticmethod
    @deprecate_settings(es_version='use_es6_syntax',
                        javascript_strictness=(
                            "allow_global_strict",
                            lambda x: "global" if x else True),
                        cyclomatic_complexity='maxcomplexity',
                        allow_unused_variables=('prohibit_unused', negate),
                        max_parameters='maxparams',
                        allow_missing_semicolon='allow_missing_semicol',
                        allow_this_statements='allow_this_stmt',
                        allow_with_statements='allow_with_stmt',
                        allow_bitwise_operators=('prohibit_bitwise', negate),
                        max_statements='maxstatements',
                        max_depth='maxdepth',
                        allow_comma_operator=('prohibit_comma', negate),
                        allow_non_breaking_whitespace=(
                            'prohibit_non_breaking_whitespace', negate),
                        allow_prototype_overwrite=(
                            'prohibit_prototype_overwrite', negate),
                        allow_type_coercion=('prohibit_type_coercion', negate),
                        allow_future_identifiers=('future_hostile', negate),
                        allow_typeof=('prohibit_typeof', negate),
                        allow_var_statement=(
                            'prohibit_variable_statements', negate),
                        allow_grouping_operator=('prohibit_groups', negate),
                        allow_variable_shadowing='shadow',
                        use_mozilla_extension='using_mozilla',
                        allow_constructor_functions=('prohibit_new', negate),
                        allow_argument_caller_and_callee=(
                            'prohibit_arg', negate),
                        allow_iterator_property=('iterator', negate),
                        allow_filter_in_forin='force_filter_forin')
    def generate_config(filename, file,
                        allow_bitwise_operators: bool=False,
                        allow_prototype_overwrite: bool=False,
                        force_braces: bool=True,
                        allow_type_coercion: bool=False,
                        allow_future_identifiers: bool=True,
                        allow_typeof: bool=True,
                        allow_filter_in_forin: bool=True,
                        allow_funcscope: bool=False,
                        allow_iterator_property: bool=True,
                        allow_argument_caller_and_callee: bool=False,
                        allow_comma_operator: bool=True,
                        allow_non_breaking_whitespace: bool=False,
                        allow_constructor_functions: bool=True,
                        allow_grouping_operator: bool=True,
                        allow_var_statement: bool=True,
                        allow_missing_semicolon: bool=False,
                        allow_debugger: bool=False,
                        allow_assignment_comparisions: bool=False,
                        allow_eval: bool=False,
                        allow_increment: bool=False,
                        allow_proto: bool=False,
                        allow_scripturls: bool=False,
                        allow_singleton: bool=False,
                        allow_this_statements: bool=False,
                        allow_with_statements: bool=False,
                        use_mozilla_extension: bool=False,
                        javascript_strictness: bool_or_str=True,
                        allow_noyield: bool=False,
                        allow_eqnull: bool=False,
                        allow_last_semicolon: bool=False,
                        allow_func_in_loop: bool=False,
                        allow_expr_in_assignments: bool=False,
                        use_es3_array: bool=False,
                        environment_mootools: bool=False,
                        environment_couch: bool=False,
                        environment_jasmine: bool=False,
                        environment_jquery: bool=False,
                        environment_node: bool=False,
                        environment_qunit: bool=False,
                        environment_rhino: bool=False,
                        environment_shelljs: bool=False,
                        environment_prototypejs: bool=False,
                        environment_yui: bool=False,
                        environment_mocha: bool=True,
                        environment_module: bool=False,
                        environment_wsh: bool=False,
                        environment_worker: bool=False,
                        environment_nonstandard: bool=False,
                        environment_browser: bool=True,
                        environment_browserify: bool=False,
                        environment_devel: bool=True,
                        environment_dojo: bool=False,
                        environment_typed: bool=False,
                        environment_phantom: bool=False,
                        max_statements: bool_or_int=False,
                        max_depth: bool_or_int=False,
                        max_parameters: bool_or_int=False,
                        cyclomatic_complexity: bool_or_int=False,
                        allow_variable_shadowing: bool_or_str=False,
                        allow_unused_variables: bool_or_str=False,
                        allow_latedef: bool_or_str=False,
                        es_version: bool_or_int=5,
                        jshint_config: str=""):
        """
        :param allow_bitwise_operators:
            Allows the use of bitwise operators.
        :param allow_prototype_overwrite:
            This options allows overwriting prototypes of native objects such
            as ``Array``.
        :param force_braces:
            This option requires you to always put curly braces around blocks
            in loops and conditionals.
        :param allow_type_coercion:
            This options allows the use of ``==`` and ``!=``.
        :param allow_future_identifiers:
            This option allows the use of identifiers which are defined in
            future versions of JavaScript.
        :param allow_typeof:
            This option enables warnings about invalid ``typeof`` operator
            values.
        :param allow_filter_in_forin:
            This option requires all ``for in`` loops to filter object's items.
        :param allow_iterator_property:
            This option suppresses warnings about the ``__iterator__``
            property.
        :param allow_funcscope:
            This option suppresses warnings about declaring variables inside of
            control structures while accessing them later from outside.
        :param allow_argument_caller_and_callee:
            This option allows the use of ``arguments.caller`` and
            ``arguments.callee``.
        :param allow_comma_operator:
            This option allows the use of the comma operator.
        :param allow_non_breaking_whitespace:
            Allows "non-breaking whitespace characters".
        :param allow_constructor_functions:
            Allows the use of constructor functions.
        :param allow_grouping_operator:
            This option allows the use of the grouping operator when it is
            not strictly required.
        :param allow_var_statement:
            Allows the use of the ``var`` statement while declaring a variable.
            Should use ``let`` or ``const`` while it is set to ``False``.
        :param allow_missing_semicolon:
            This option suppresses warnings about missing semicolons.
        :param allow_debugger:
            This option suppresses warnings about the ``debugger`` statements.
        :param allow_assignment_comparisions:
            This option suppresses warnings about the use of assignments in
            cases where comparisons are expected.
        :param allow_eval:
            This options suppresses warnings about the use of ``eval``
            function.
        :param allow_increment:
            This option suppresses warnings about the use of unary increment
            and decrement operators.
        :param allow_proto:
            This option suppresses warnings about the ``__proto__`` property.
        :param allow_scripturls:
            This option suppresses warnings about the use of script-targeted
            URLs.
        :param allow_singleton:
            This option suppresses warnings about constructions like
            ``new function () { ... }`` and ``new Object;`` sometimes used to
            produce singletons.
        :param allow_this_statements:
            This option suppresses warnings about possible strict violations
            when the code is running in strict mode and ``this`` is used in a
            non-constructor function.
        :param allow_with_statements:
            This option suppresses warnings about the use of the ``with``
            statement.
        :param use_mozilla_extension:
            This options tells JSHint that your code uses Mozilla JavaScript
            extensions.
        :param javascript_strictness:
            Determines what sort of strictness to use in the JavaScript code.
            The possible options are:

            - "global" - there must be a ``"use strict";`` at global level
            - "implied" - lint the code as if there is a ``"use strict";``
            - "False" - disable warnings about strict mode
            - "True" - there must be a ``"use strict";`` at function level
        :param allow_noyield:
            This option suppresses warnings about generator functions with no
            ``yield`` statement in them.
        :param allow_eqnull:
            This option suppresses warnings about ``== null`` comparisons.
        :param allow_last_semicolon:
            This option suppresses warnings about missing semicolons for the
            last statement.
        :param allow_func_in_loop:
            This option suppresses warnings about functions inside of loops.
        :param allow_expr_in_assignments:
            This option suppresses warnings about the use of expressions where
            normally assignments or function calls are expected.
        :param use_es3_array:
            This option tells JSHintBear ES3 array elision elements, or empty
            elements are used.
        :param environment_mootools:
            This option defines globals exposed by the Mootools.
        :param environment_couch:
            This option defines globals exposed by CouchDB.
        :param environment_jasmine:
            This option defines globals exposed by Jasmine.
        :param environment_jquery:
            This option defines globals exposed by Jquery.
        :param environment_node:
            This option defines globals exposed by Node.
        :param environment_qunit:
            This option defines globals exposed by Qunit.
        :param environment_rhino:
            This option defines globals exposed when the code is running inside
            rhino runtime environment.
        :param environment_shelljs:
            This option defines globals exposed by the ShellJS.
        :param environment_prototypejs:
            This option defines globals exposed by the Prototype.
        :param environment_yui:
            This option defines globals exposed by the YUI JavaScript
            Framework.
        :param environment_mocha:
            This option defines globals exposed by the "BDD" and "TDD" UIs of
            the Mocha unit testing framework.
        :param environment_module:
            This option informs JSHintBear that the input code describes an
            ECMAScript 6 module.
        :param environment_wsh:
            This option defines globals available when the code is running as a
            script for the Windows Script Host.
        :param environment_worker:
            This option defines globals available when the code is running
            inside of a Web Worker.
        :param environment_nonstandard:
            This option defines non- standard but widely adopted globals such
            as ``escape`` and ``unescape``.
        :param environment_browser:
            This option defines globals exposed by modern browsers.
        :param environment_browserify:
            This option defines globals available when using the Browserify.
        :param environment_devel:
            This option defines globals that are usually used for debugging:
            ``console``, ``alert``, etc.
        :param environment_dojo:
            This option defines globals exposed by the Dojo Toolkit.
        :param environment_typed:
            This option defines globals for typed array constructors.
        :param environment_phantom:
            This option defines globals available when your core is running
            inside of the PhantomJS runtime environment.
        :param max_statements:
            Maximum number of statements allowed per function.
        :param max_depth:
            This option lets you control how nested do you want your blocks to
            be.
        :param max_parameters:
            Maximum number of parameters allowed per function.
        :param cyclomatic_complexity:
            Maximum cyclomatic complexity in the code.
        :param allow_variable_shadowing:
            This option suppresses warnings about variable shadowing i.e.
            declaring a variable that had been already declared somewhere in
            the outer scope.

            - "inner" - check for variables defined in the same scope only
            - "outer" - check for variables defined in outer scopes as well
            - False - same as inner
            - True  - allow variable shadowing
        :param allow_unused_variables:
            Allows when variables are defined but never used. This can be set
            to ""vars"" to only check for variables, not function parameters,
            or ""strict"" to check all variables and parameters.
        :param allow_latedef:
            This option allows the use of a variable before it was defined.
            Setting this option to "nofunc" will allow function declarations to
            be ignored.
        :param es_version:
            This option is used to specify the ECMAScript version to which the
            code must adhere to.
        """
        # Assume that when es_version is bool, it is intended for the
        # deprecated use_es6_version
        if es_version is True:
            es_version = 6
        elif es_version is False:
            es_version = 5
        if not jshint_config:
            options = {"bitwise": not allow_bitwise_operators,
                       "freeze": not allow_prototype_overwrite,
                       "curly": force_braces,
                       "eqeqeq": not allow_type_coercion,
                       "futurehostile": not allow_future_identifiers,
                       "notypeof": not allow_typeof,
                       "forin": allow_filter_in_forin,
                       "funcscope": allow_funcscope,
                       "iterator": not allow_iterator_property,
                       "noarg": not allow_argument_caller_and_callee,
                       "nocomma": not allow_comma_operator,
                       "nonbsp": not allow_non_breaking_whitespace,
                       "nonew": not allow_constructor_functions,
                       "undef": True,
                       "singleGroups": not allow_grouping_operator,
                       "varstmt": not allow_var_statement,
                       "asi": allow_missing_semicolon,
                       "debug": allow_debugger,
                       "boss": allow_assignment_comparisions,
                       "evil": allow_eval,
                       "strict": javascript_strictness,
                       "plusplus": allow_increment,
                       "proto": allow_proto,
                       "scripturl": allow_scripturls,
                       "supernew": allow_singleton,
                       "validthis": allow_this_statements,
                       "withstmt": allow_with_statements,
                       "moz": use_mozilla_extension,
                       "noyield": allow_noyield,
                       "eqnull": allow_eqnull,
                       "lastsemic": allow_last_semicolon,
                       "loopfunc": allow_func_in_loop,
                       "expr": allow_expr_in_assignments,
                       "elision": use_es3_array,
                       "mootools": environment_mootools,
                       "couch": environment_couch,
                       "jasmine": environment_jasmine,
                       "jquery": environment_jquery,
                       "node": environment_node,
                       "qunit": environment_qunit,
                       "rhino": environment_rhino,
                       "shelljs": environment_shelljs,
                       "prototypejs": environment_prototypejs,
                       "yui": environment_yui,
                       "mocha": environment_mocha,
                       "module": environment_module,
                       "wsh": environment_wsh,
                       "worker": environment_worker,
                       "nonstandard": environment_nonstandard,
                       "browser": environment_browser,
                       "browserify": environment_browserify,
                       "devel": environment_devel,
                       "dojo": environment_dojo,
                       "typed": environment_typed,
                       "phantom": environment_phantom,
                       "maxerr": 99999,
                       "maxcomplexity": cyclomatic_complexity,
                       "maxdepth": max_depth,
                       "maxparams": max_parameters,
                       "maxstatements": max_statements,
                       "shadow": allow_variable_shadowing,
                       "unused": not allow_unused_variables,
                       "latedef": allow_latedef,
                       "esversion": es_version}

            return json.dumps(options)
        else:
            return None

Example 68

Project: battleschool Source File: main.py
def main(args, battleschool_dir=None):
    if not battleschool_dir:
        battleschool_dir = "%s/.battleschool" % os.environ['HOME']

    # TODO: make battle OO or more modular
    #-----------------------------------------------------------
    # make ansible defaults, battleschool defaults
    AC.DEFAULT_HOST_LIST = C.DEFAULT_HOST_LIST
    AC.DEFAULT_SUDO_FLAGS = C.DEFAULT_SUDO_FLAGS

    #-----------------------------------------------------------
    # create parser for CLI options
    usage = "%prog"
    parser = utils.base_parser(
        constants=AC,
        usage=usage,
        connect_opts=True,
        runas_opts=True,
        subset_opts=True,
        check_opts=True,
        diff_opts=True,
        output_opts=True
    )
    parser.version = "%s %s" % ("battleschool", __version__)
    # parser.add_option('--tags', dest='tags', default='all',
    #                   help="only run plays and tasks tagged with these values")
    parser.add_option('--syntax-check', dest='syntax', action='store_true',
                      help="do a playbook syntax check on the playbook, do not execute the playbook")
    parser.add_option('--list-tasks', dest='listtasks', action='store_true',
                      help="do list all tasks that would be executed")
    parser.add_option('--step', dest='step', action='store_true',
                      help="one-step-at-a-time: confirm each task before running")
    parser.add_option('--config-dir', dest='config_dir', default=None,
                      help="config directory for battleschool (default=%s)" % battleschool_dir)
    parser.add_option('--config-file', dest='config_file', default=None,
                      help="config file for battleschool (default=%s/%s)" % (battleschool_dir, "config.yml"))
    parser.add_option('-X', '--update-sources', dest='update_sources', default=False, action='store_true',
                      help="update playbooks from sources(git, url, etc...)")
    parser.add_option('--acquire-only', dest='acquire_only', default=False, action='store_true',
                      help="configure mac_pkg module to only aquire package (ie download only)")
    parser.add_option('--use-default-callbacks', dest='use_default_callbacks',
                      default=False, action='store_true',
                      help="use default ansible callbacks (to exec vars_prompt, etc.)")

    options, args = parser.parse_args(args)
    # options.connection = 'local'

    playbooks_to_run = []  #[C.DEFAULT_PLAYBOOK]

    #-----------------------------------------------------------
    # setup inventory
    inventory = ansible.inventory.Inventory(options.inventory)
    inventory.subset(options.subset)
    if len(inventory.list_hosts()) == 0:
        raise errors.AnsibleError("provided hosts list is empty")

    #-----------------------------------------------------------
    # setup default options
    sshpass = None
    sudopass = None
    vault_pass = None
    options.remote_user = AC.DEFAULT_REMOTE_USER
    if not options.listhosts and not options.syntax and not options.listtasks:
        options.ask_pass = AC.DEFAULT_ASK_PASS
        options.ask_sudo_pass = options.ask_sudo_pass or AC.DEFAULT_ASK_SUDO_PASS
        options.become_method = options.become_method or AC.DEFAULT_BECOME_METHOD
        passwds = utils.ask_passwords(ask_pass=options.ask_pass, become_ask_pass=options.ask_sudo_pass, ask_vault_pass=options.ask_vault_pass, become_method=options.become_method)
        sshpass = passwds[0]
        sudopass = passwds[1]
        vault_pass = passwds[2]
        # if options.sudo_user or options.ask_sudo_pass:
        #     options.sudo = True
        options.sudo_user = AC.DEFAULT_SUDO_USER

    extra_vars = utils.parse_extra_vars(options.extra_vars, vault_pass)
    only_tags = None  # options.tags.split(",")

    #-----------------------------------------------------------
    # setup config_dir and battleschool_dir
    if options.config_dir:
        battleschool_dir = options.config_dir
    else:
        options.config_dir = battleschool_dir

    #-----------------------------------------------------------
    # setup module_path
    if options.module_path is None:
        options.module_path = AC.DEFAULT_MODULE_PATH

    if options.module_path is None:
        options.module_path = C.DEFAULT_MODULE_PATH

    if C.DEFAULT_MODULE_PATH not in options.module_path:
        options.module_path = "%s:%s" % (C.DEFAULT_MODULE_PATH, options.module_path)

    #-----------------------------------------------------------
    # parse config data
    config_path = load_config_path(options, inventory, sshpass, sudopass)
    if os.path.exists(config_path) and os.path.isfile(config_path):
        config_data = utils.parse_yaml_from_file(config_path)
    else:
        config_data = {}

    #-----------------------------------------------------------
    # set config_dir
    if "cache_dir" in config_data:
        options.cache_dir = os.path.expanduser(config_data["cache_dir"])
    elif _platform == "darwin":  # OS X
        options.cache_dir = os.path.expanduser("~/Library/Caches/battleschool")
    else:
        options.cache_dir = "%s/cache" % battleschool_dir

    os.environ["BATTLESCHOOL_CACHE_DIR"] = options.cache_dir

    #-----------------------------------------------------------
    # setup extra_vars for later use
    if extra_vars is None:
        extra_vars = dict()

    extra_vars['battleschool_config_dir'] = battleschool_dir
    extra_vars['battleschool_cache_dir'] = options.cache_dir
    extra_vars['mac_pkg_acquire_only'] = options.acquire_only

    #-----------------------------------------------------------
    # set mac_version for extra_vars
    if _platform == "darwin":
        mac_version = platform.mac_ver()[0].split(".")
        extra_vars['mac_version'] = mac_version
        extra_vars['mac_major_minor_version'] = "%s.%s" % (mac_version[0], mac_version[1])

    #-----------------------------------------------------------
    # serialize extra_vars since there is now way to pass data
    # to a module without modifying every playbook
    tempdir = tempfile.gettempdir()
    extra_vars_path = os.path.join(tempdir, "battleschool_extra_vars.json")
    with open(extra_vars_path, 'w') as f:
        f.write(json.dumps(extra_vars))

    #-----------------------------------------------------------
    # setup and run source handlers
    handlers = getSourceHandlers()

    if 'sources' in config_data and config_data['sources']:
        sources = config_data['sources']
        display(banner("Updating sources"))
        for handler in handlers:
            source = handler(options, sources)
            playbooks = source.run(inventory, sshpass, sudopass)
            for playbook in playbooks:
                playbooks_to_run.append(playbook)
    else:
        display(banner("No sources to update"))

    #-----------------------------------------------------------
    # validate playbooks
    for playbook in playbooks_to_run:
        if not os.path.exists(playbook):
            raise errors.AnsibleError("the playbook: %s could not be found" % playbook)
        if not os.path.isfile(playbook):
            raise errors.AnsibleError("the playbook: %s does not appear to be a file" % playbook)

    become = True
    #-----------------------------------------------------------
    # run all playbooks specified from config
    for playbook in playbooks_to_run:
        stats = callbacks.AggregateStats()

        # let inventory know which playbooks are using so it can know the basedirs
        inventory.set_playbook_basedir(os.path.dirname(playbook))

        if options.use_default_callbacks:
            runner_cb = callbacks.PlaybookRunnerCallbacks(stats, verbose=utils.VERBOSITY)
            playbook_cb = callbacks.PlaybookCallbacks(verbose=utils.VERBOSITY)
        else:
            runner_cb = BattleschoolRunnerCallbacks()
            playbook_cb = BattleschoolCallbacks()

        if options.step:
            playbook_cb.step = options.step

        pb = ansible.playbook.PlayBook(
            playbook=playbook,
            module_path=options.module_path,
            inventory=inventory,
            forks=options.forks,
            remote_user=options.remote_user,
            remote_pass=sshpass,
            callbacks=playbook_cb,
            runner_callbacks=runner_cb,
            stats=stats,
            timeout=options.timeout,
            transport=options.connection,
            become=become,
            become_method="sudo",
            become_user=options.sudo_user,
            become_pass=sudopass,
            extra_vars=extra_vars,
            private_key_file=options.private_key_file,
            only_tags=only_tags,
            check=options.check,
            diff=options.diff
        )

        if options.listhosts or options.listtasks:
            print ''
            print 'playbook: %s' % playbook
            print ''
            playnum = 0
            for (play_ds, play_basedir) in zip(pb.playbook, pb.play_basedirs):
                playnum += 1
                play = ansible.playbook.Play(pb, play_ds, play_basedir)
                label = play.name
                if options.listhosts:
                    hosts = pb.inventory.list_hosts(play.hosts)
                    print '  play #%d (%s): host count=%d' % (playnum, label, len(hosts))
                    for host in hosts:
                        print '    %s' % host
                if options.listtasks:
                    matched_tags, unmatched_tags = play.compare_tags(pb.only_tags)
                    unmatched_tags.discard('all')
                    unknown_tags = set(pb.only_tags) - (matched_tags | unmatched_tags)
                    if unknown_tags:
                        continue
                    print '  play #%d (%s): task count=%d' % (playnum, label, len(play.tasks()))
                    for task in play.tasks():
                        if set(task.tags).intersection(pb.only_tags):
                            if getattr(task, 'name', None) is not None:
                                # meta tasks have no names
                                print '    %s' % task.name
                print ''
            continue

        if options.syntax:
            # if we've not exited by now then we are fine.
            print 'Playbook Syntax is fine'
            return 0

        failed_hosts = []

        try:

            pb.run()

            hosts = sorted(pb.stats.processed.keys())
            # display(callbacks.banner("PLAY RECAP"))
            playbook_cb.on_stats(pb.stats)

            for host in hosts:
                smry = pb.stats.summarize(host)
                if smry['unreachable'] > 0 or smry['failures'] > 0:
                    failed_hosts.append(host)

            if len(failed_hosts) > 0:
                filename = pb.generate_retry_inventory(failed_hosts)
                if filename:
                    display("           to retry, use: --limit @%s\n" % filename)

            for host in hosts:
                smry = pb.stats.summarize(host)
                print_stats(host, smry)

            # print ""
            if len(failed_hosts) > 0:
                return 2

        except errors.AnsibleError, e:
            display("ERROR: %s" % e, color='red')
            return 1

    if not playbooks_to_run:
        display("\tWARNING: no playbooks run!", color='yellow')

    os.remove(extra_vars_path)
    display(banner("Battleschool completed"))

Example 69

Project: conda Source File: main_info.py
def execute(args, parser):
    import os
    from os.path import dirname

    import conda
    from conda.base.context import context
    from conda.models.channel import offline_keep
    from conda.resolve import Resolve
    from conda.api import get_index
    from conda.connection import user_agent

    if args.root:
        if context.json:
            stdout_json({'root_prefix': context.root_dir})
        else:
            print(context.root_dir)
        return

    if args.packages:
        index = get_index()
        r = Resolve(index)
        if context.json:
            stdout_json({
                package: [p._asdict()
                          for p in sorted(r.get_pkgs(arg2spec(package)))]
                for package in args.packages
            })
        else:
            for package in args.packages:
                versions = r.get_pkgs(arg2spec(package))
                for pkg in sorted(versions):
                    pretty_package(pkg)
        return

    options = 'envs', 'system', 'license'

    try:
        from conda.install import linked_data
        root_pkgs = linked_data(sys.prefix)
    except:
        root_pkgs = None

    try:
        import requests
        requests_version = requests.__version__
    except ImportError:
        requests_version = "could not import"
    except Exception as e:
        requests_version = "Error %s" % e

    try:
        import conda_env
        conda_env_version = conda_env.__version__
    except:
        try:
            cenv = [p for p in itervalues(root_pkgs) if p['name'] == 'conda-env']
            conda_env_version = cenv[0]['version']
        except:
            conda_env_version = "not installed"

    try:
        import conda_build
    except ImportError:
        conda_build_version = "not installed"
    except Exception as e:
        conda_build_version = "Error %s" % e
    else:
        conda_build_version = conda_build.__version__

    channels = context.channels

    if args.unsafe_channels:
        if not context.json:
            print("\n".join(channels))
        else:
            print(json.dumps({"channels": channels}))
        return 0

    channels = list(prioritize_channels(channels).keys())
    if not context.json:
        channels = [c + ('' if offline_keep(c) else '  (offline)')
                    for c in channels]
    channels = [mask_anaconda_token(c) for c in channels]

    info_dict = dict(
        platform=context.subdir,
        conda_version=conda.__version__,
        conda_env_version=conda_env_version,
        conda_build_version=conda_build_version,
        root_prefix=context.root_dir,
        conda_prefix=context.conda_prefix,
        conda_private=context.conda_private,
        root_writable=context.root_writable,
        pkgs_dirs=context.pkgs_dirs,
        envs_dirs=context.envs_dirs,
        default_prefix=context.default_prefix,
        channels=channels,
        rc_path=rc_path,
        user_rc_path=user_rc_path,
        sys_rc_path=sys_rc_path,
        # is_foreign=bool(foreign),
        offline=context.offline,
        envs=[],
        python_version='.'.join(map(str, sys.version_info)),
        requests_version=requests_version,
        user_agent=user_agent,
    )
    if not on_win:
        info_dict['UID'] = os.geteuid()
        info_dict['GID'] = os.getegid()

    if args.all or context.json:
        for option in options:
            setattr(args, option, True)

    if args.all or all(not getattr(args, opt) for opt in options):
        for key in 'pkgs_dirs', 'envs_dirs', 'channels':
            info_dict['_' + key] = ('\n' + 26 * ' ').join(info_dict[key])
        info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else
                               'read only')
        print("""\
Current conda install:

               platform : %(platform)s
          conda version : %(conda_version)s
       conda is private : %(conda_private)s
      conda-env version : %(conda_env_version)s
    conda-build version : %(conda_build_version)s
         python version : %(python_version)s
       requests version : %(requests_version)s
       root environment : %(root_prefix)s  (%(_rtwro)s)
    default environment : %(default_prefix)s
       envs directories : %(_envs_dirs)s
          package cache : %(_pkgs_dirs)s
           channel URLs : %(_channels)s
            config file : %(rc_path)s
           offline mode : %(offline)s
             user-agent : %(user_agent)s\
""" % info_dict)

        if not on_win:
            print("""\
                UID:GID : %(UID)s:%(GID)s
""" % info_dict)
        else:
            print()

    if args.envs:
        handle_envs_list(info_dict['envs'], not context.json)

    if args.system:
        from conda.cli.find_commands import find_commands, find_executable

        site_dirs = get_user_site()
        evars = ['PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV',
                 'CIO_TEST', 'CONDA_ENVS_PATH']

        if context.platform == 'linux':
            evars.append('LD_LIBRARY_PATH')
        elif context.platform == 'osx':
            evars.append('DYLD_LIBRARY_PATH')

        if context.json:
            info_dict['sys.version'] = sys.version
            info_dict['sys.prefix'] = sys.prefix
            info_dict['sys.executable'] = sys.executable
            info_dict['site_dirs'] = get_user_site()
            info_dict['env_vars'] = {ev: os.getenv(ev, '<not set>') for ev in evars}
        else:
            print("sys.version: %s..." % (sys.version[:40]))
            print("sys.prefix: %s" % sys.prefix)
            print("sys.executable: %s" % sys.executable)
            print("conda location: %s" % dirname(conda.__file__))
            for cmd in sorted(set(find_commands() + ['build'])):
                print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd)))
            print("user site dirs: ", end='')
            if site_dirs:
                print(site_dirs[0])
            else:
                print()
            for site_dir in site_dirs[1:]:
                print('                %s' % site_dir)
            print()

            for ev in sorted(evars):
                print("%s: %s" % (ev, os.getenv(ev, '<not set>')))
            print()

    if args.license and not context.json:
        try:
            from _license import show_info
            show_info()
        except ImportError:
            print("""\
WARNING: could not import _license.show_info
# try:
# $ conda install -n root _license""")

    if context.json:
        stdout_json(info_dict)

Example 70

Project: pecan Source File: test_rest.py
    def test_complicated_nested_rest(self):

        class BarsController(RestController):

            data = [['zero-zero', 'zero-one'], ['one-zero', 'one-one']]

            @expose()
            def get_one(self, foo_id, id):
                return self.data[int(foo_id)][int(id)]

            @expose('json')
            def get_all(self, foo_id):
                return dict(items=self.data[int(foo_id)])

            @expose()
            def new(self, foo_id):
                return 'NEW FOR %s' % foo_id

            @expose()
            def post(self, foo_id, value):
                foo_id = int(foo_id)
                if len(self.data) < foo_id + 1:
                    self.data.extend([[]] * (foo_id - len(self.data) + 1))
                self.data[foo_id].append(value)
                response.status = 302
                return 'CREATED FOR %s' % foo_id

            @expose()
            def edit(self, foo_id, id):
                return 'EDIT %s' % self.data[int(foo_id)][int(id)]

            @expose()
            def put(self, foo_id, id, value):
                self.data[int(foo_id)][int(id)] = value
                return 'UPDATED'

            @expose()
            def get_delete(self, foo_id, id):
                return 'DELETE %s' % self.data[int(foo_id)][int(id)]

            @expose()
            def delete(self, foo_id, id):
                del self.data[int(foo_id)][int(id)]
                return 'DELETED'

        class FoosController(RestController):

            data = ['zero', 'one']

            bars = BarsController()

            @expose()
            def get_one(self, id):
                return self.data[int(id)]

            @expose('json')
            def get_all(self):
                return dict(items=self.data)

            @expose()
            def new(self):
                return 'NEW'

            @expose()
            def edit(self, id):
                return 'EDIT %s' % self.data[int(id)]

            @expose()
            def post(self, value):
                self.data.append(value)
                response.status = 302
                return 'CREATED'

            @expose()
            def put(self, id, value):
                self.data[int(id)] = value
                return 'UPDATED'

            @expose()
            def get_delete(self, id):
                return 'DELETE %s' % self.data[int(id)]

            @expose()
            def delete(self, id):
                del self.data[int(id)]
                return 'DELETED'

        class RootController(object):
            foos = FoosController()

        # create the app
        app = TestApp(make_app(RootController()))

        # test get_all
        r = app.get('/foos')
        assert r.status_int == 200
        assert r.body == b_(dumps(dict(items=FoosController.data)))

        # test nested get_all
        r = app.get('/foos/1/bars')
        assert r.status_int == 200
        assert r.body == b_(dumps(dict(items=BarsController.data[1])))

        # test get_one
        for i, value in enumerate(FoosController.data):
            r = app.get('/foos/%d' % i)
            assert r.status_int == 200
            assert r.body == b_(value)

        # test nested get_one
        for i, value in enumerate(FoosController.data):
            for j, value in enumerate(BarsController.data[i]):
                r = app.get('/foos/%s/bars/%s' % (i, j))
                assert r.status_int == 200
                assert r.body == b_(value)

        # test post
        r = app.post('/foos', {'value': 'two'})
        assert r.status_int == 302
        assert r.body == b_('CREATED')

        # make sure it works
        r = app.get('/foos/2')
        assert r.status_int == 200
        assert r.body == b_('two')

        # test nested post
        r = app.post('/foos/2/bars', {'value': 'two-zero'})
        assert r.status_int == 302
        assert r.body == b_('CREATED FOR 2')

        # make sure it works
        r = app.get('/foos/2/bars/0')
        assert r.status_int == 200
        assert r.body == b_('two-zero')

        # test edit
        r = app.get('/foos/1/edit')
        assert r.status_int == 200
        assert r.body == b_('EDIT one')

        # test nested edit
        r = app.get('/foos/1/bars/1/edit')
        assert r.status_int == 200
        assert r.body == b_('EDIT one-one')

        # test put
        r = app.put('/foos/2', {'value': 'TWO'})
        assert r.status_int == 200
        assert r.body == b_('UPDATED')

        # make sure it works
        r = app.get('/foos/2')
        assert r.status_int == 200
        assert r.body == b_('TWO')

        # test nested put
        r = app.put('/foos/2/bars/0', {'value': 'TWO-ZERO'})
        assert r.status_int == 200
        assert r.body == b_('UPDATED')

        # make sure it works
        r = app.get('/foos/2/bars/0')
        assert r.status_int == 200
        assert r.body == b_('TWO-ZERO')

        # test put with _method parameter and GET
        r = app.get('/foos/2?_method=put', {'value': 'TWO!'}, status=405)
        assert r.status_int == 405

        # make sure it works
        r = app.get('/foos/2')
        assert r.status_int == 200
        assert r.body == b_('TWO')

        # test nested put with _method parameter and GET
        r = app.get(
            '/foos/2/bars/0?_method=put',
            {'value': 'ZERO-TWO!'}, status=405
        )
        assert r.status_int == 405

        # make sure it works
        r = app.get('/foos/2/bars/0')
        assert r.status_int == 200
        assert r.body == b_('TWO-ZERO')

        # test put with _method parameter and POST
        r = app.post('/foos/2?_method=put', {'value': 'TWO!'})
        assert r.status_int == 200
        assert r.body == b_('UPDATED')

        # make sure it works
        r = app.get('/foos/2')
        assert r.status_int == 200
        assert r.body == b_('TWO!')

        # test nested put with _method parameter and POST
        r = app.post('/foos/2/bars/0?_method=put', {'value': 'TWO-ZERO!'})
        assert r.status_int == 200
        assert r.body == b_('UPDATED')

        # make sure it works
        r = app.get('/foos/2/bars/0')
        assert r.status_int == 200
        assert r.body == b_('TWO-ZERO!')

        # test get delete
        r = app.get('/foos/2/delete')
        assert r.status_int == 200
        assert r.body == b_('DELETE TWO!')

        # test nested get delete
        r = app.get('/foos/2/bars/0/delete')
        assert r.status_int == 200
        assert r.body == b_('DELETE TWO-ZERO!')

        # test nested delete
        r = app.delete('/foos/2/bars/0')
        assert r.status_int == 200
        assert r.body == b_('DELETED')

        # make sure it works
        r = app.get('/foos/2/bars')
        assert r.status_int == 200
        assert len(loads(r.body.decode())['items']) == 0

        # test delete
        r = app.delete('/foos/2')
        assert r.status_int == 200
        assert r.body == b_('DELETED')

        # make sure it works
        r = app.get('/foos')
        assert r.status_int == 200
        assert len(loads(r.body.decode())['items']) == 2

        # test nested delete with _method parameter and GET
        r = app.get('/foos/1/bars/1?_method=DELETE', status=405)
        assert r.status_int == 405

        # make sure it works
        r = app.get('/foos/1/bars')
        assert r.status_int == 200
        assert len(loads(r.body.decode())['items']) == 2

        # test delete with _method parameter and GET
        r = app.get('/foos/1?_method=DELETE', status=405)
        assert r.status_int == 405

        # make sure it works
        r = app.get('/foos')
        assert r.status_int == 200
        assert len(loads(r.body.decode())['items']) == 2

        # test nested delete with _method parameter and POST
        r = app.post('/foos/1/bars/1?_method=DELETE')
        assert r.status_int == 200
        assert r.body == b_('DELETED')

        # make sure it works
        r = app.get('/foos/1/bars')
        assert r.status_int == 200
        assert len(loads(r.body.decode())['items']) == 1

        # test delete with _method parameter and POST
        r = app.post('/foos/1?_method=DELETE')
        assert r.status_int == 200
        assert r.body == b_('DELETED')

        # make sure it works
        r = app.get('/foos')
        assert r.status_int == 200
        assert len(loads(r.body.decode())['items']) == 1

Example 71

Project: transcriptic Source File: english.py
    def job_tree(self):
        """
        A Job Tree visualizes the instructions of a protocol in a hierarchical
        structure based on container dependency to help human readers with manual
        execution. Its construction utilizes the algorithm below, as well as the
        Node object class (to store relational information) at the bottom of this 
        script.

        Example Usage:
            .. code-block:: python

                p = Protocol()

                bacterial_sample = p.ref("bacteria", None, "micro-1.5", discard=True)
                test_plate = p.ref("test_plate", None, "96-flat", storage="cold_4")

                p.dispense_full_plate(test_plate, "lb-broth-noAB", "50:microliter")
                w = 0
                amt = 1
                while amt < 20:
                    p.transfer(bacterial_sample.well(
                        0), test_plate.well(w), "%d:microliter" % amt)
                    amt += 2
                    w += 1

                pjsonString = json.dumps(p.as_dict(), indent=2)
                pjson = json.loads(pjsonString)
                parser_instance = english.AutoprotocolParser(pjson)
                parser_instance.job_tree()

                Output:
                1
                +---2
                3
                +---4
                5
                +---6
                7
                +---8
                9
                +---10
                11
                +---12


        Variables 
        ---------
        steps: list
            deep list of objects per instruction/step;
            is primary information job tree is built from
        nodes: list
            list of node objects
        proto_forest: list
            list of lists grouped by connected nodes
        forest: list
            list of nested dictionaries, depicting parent-children relations
        forest_list: list
            list of nested lists, depticting parent-children relations 
        """

        # 1. Enforce depth of 1 for steps
        def depth_one(steps):
            depth_one = []
            for step in steps:
                if type(step) is list:
                    if type(step[0]) is list:
                        depth_one.append(step[0])
                    else:
                        depth_one.append(step)
                else:
                    depth_one.append([step])
            return depth_one

        # 2. Convert steps to list of node objects (0,1,2,3...)
        def assign_nodes(steps):
            nodes = [i for i in range(len(steps))]
            objects = list(
                set([elem for sublist in steps for elem in sublist]))

            # checks for multiple src and dst objects -- added when looking for
            # mutiples
            split_objects = []
            for obj in objects:
                if len(obj) > 1:
                    new_objs = obj.split(", ")
                    split_objects.extend(new_objs)
                else:
                    split_objects.append(obj)
            objects = split_objects
            del(split_objects)

            # populate with leafless trees (Node objects, no edges)
            for node in nodes:
                nodes[node] = Node(str(node))

            # search for leafy trees
            for obj in objects:

                # accounts for multiple drc/dst objects
                leaves = []
                for i, sublist in enumerate(steps):
                    for string in sublist:
                        if string.count(',') > 0:
                            if obj in string:
                                leaves.append(i)
                        else:
                            if obj in sublist:
                                leaves.append(i)
                leaves = sorted(list(set(leaves)))

                if len(leaves) > 1:
                    viable_edges = []

                    # compute cross-product
                    for leaf1 in leaves:
                        for leaf2 in leaves:
                            if str(leaf1) != str(leaf2) and sorted((leaf1, leaf2)) not in viable_edges:
                                viable_edges.append(sorted((leaf1, leaf2)))

                    # form edge networks
                    for edge in viable_edges:
                        n1, n2 = nodes[edge[0]], nodes[edge[1]]
                        n1.add_edge(n2)
                        n2.add_edge(n1)
                        nodes[int(n1.name)], nodes[int(n2.name)] = n1, n2
            return nodes

        # 3. Determine number of trees and regroup by connected nodes
        def connected_nodes(nodes):
            proto_trees = []
            nodes = set(nodes)

            while nodes:
                n = nodes.pop()
                group = {n}
                queue = [n]
                while queue:
                    n = queue.pop(0)
                    neighbors = n.edges
                    neighbors.difference_update(group)
                    nodes.difference_update(neighbors)
                    group.update(neighbors)
                    queue.extend(neighbors)
                proto_trees.append(group)
            return proto_trees

        # 4. Convert nodes to nested dictionary of parent-children relations
        # i.e. adding depth -- also deals with tree-node sorting and path
        # optimization
        def build_tree_dict(trees, steps):
            # node sorting in trees
            sorted_trees = []
            for tree in trees:
                sorted_trees.append(
                    sorted(tree, key=lambda x: int(x.name)))

            # retrieve values of the nodes (the protocol's containers)
            # for each tree ... may want to use dictionary eventually
            all_values = []
            for tree in sorted_trees:
                values = [steps[int(node.name)] for node in tree]
                all_values.append(values)

            # create relational tuples:
            all_digs = []
            singles = []
            dst_potentials = []
            for tree_idx in range(len(sorted_trees)):
                edge_flag = False
                tree_digs = []
                for node_idx in range(len(sorted_trees[tree_idx])):

                    # digs: directed graph vectors
                    digs = []
                    dst_nodes = []
                    node_values = all_values[tree_idx][node_idx]
                    src_node = str(sorted_trees[tree_idx][node_idx].name)

                    # ACTION ON MULTIPLE OBJECTS (E.G. TRANSFER FROM SRC -> DST
                    # WELLS)
                    # Outcome space: {1-1, 1-many, many-1, many-many}
                    if len(node_values) == 2:
                        # single destination (x-1)
                        if node_values[1].count(",") == 0:
                            dst_nodes = [i for i, sublist in enumerate(
                                steps) if node_values[1] == sublist[0]]
                        # multiple destinations (x-many)
                        elif node_values[1].count(",") > 0:
                            dst_nodes = []
                            for dst in node_values[1].replace(", ", ""):
                                for i, sublist in enumerate(steps):
                                    if i not in dst_nodes and dst == sublist[0]:
                                        dst_nodes.append(i)

                    # ACTION ON A SINGLE OBJECT
                    elif len(node_values) == 1:
                        dst_nodes = [i for i, sublist in enumerate(
                            steps) if node_values[0] == sublist[0]]

                    # Constructing tuples in (child, parent) format
                    for dst_node in dst_nodes:
                        dig = (int(dst_node), int(src_node))
                        digs.append(dig)

                    # else: an edge-case for dictionaries constructed with no edges
                    # initiates tree separation via flag
                    if digs != []:
                        edge_flag = False
                        tree_digs.append(digs)
                    else:
                        edge_flag = True
                        digs = [(int(src_node), int(src_node))]
                        tree_digs.append(digs)

                # digraph cycle detection: avoids cycles by overlooking set
                # repeats
                true_tree_digs = []
                for digs in tree_digs:
                    for dig in digs:
                        if tuple(sorted(dig, reverse=True)) not in true_tree_digs:
                            true_tree_digs.append(
                                tuple(sorted(dig, reverse=True)))

                # edge-case for dictionaries constructed with no edges
                if true_tree_digs != [] and edge_flag == False:
                    all_digs.append(true_tree_digs)
                elif edge_flag == True:
                    all_digs.extend(tree_digs)

            # Enforces forest ordering
            all_digs = sorted(all_digs, key=lambda x: x[0])

            # job tree traversal to find all paths:
            forest = []
            for digs_set in all_digs:

                # pass 1: initialize nodes dictionary
                nodes = OrderedDict()
                for tup in digs_set:
                    id, parent_id = tup
                    # ensure all nodes accounted for
                    nodes[id] = OrderedDict({'id': id})
                    nodes[parent_id] = OrderedDict({'id': parent_id})

                # pass 2: create trees and parent-child relations
                for tup in digs_set:
                    id, parent_id = tup
                    node = nodes[id]
                    # links node to its parent
                    if id != parent_id:
                        # add new_node as child to parent
                        parent = nodes[parent_id]
                        if not 'children' in parent:
                            # ensure parent has a 'children' field
                            parent['children'] = []
                        children = parent['children']
                        children.append(node)

                desired_tree_idx = sorted(list(nodes.keys()))[0]
                forest.append(nodes[desired_tree_idx])
            return forest

        # 5. Convert dictionary-stored nodes to unflattened, nested list of
        # parent-children relations
        def dict_to_list(forest):
            forest_list = []
            for tree in forest:
                tString = str(json.dumps(tree))
                tString = tString.replace('"id": ', "").replace('"children": ', "").replace(
                    '[{', "[").replace('}]', "]").replace('{', "[").replace('}', "]")

                # find largest repeated branch (if applicable)
                # maybe think about using prefix trees or SIMD extensions for better
                # efficiency
                x, y, length, match = 0, 0, 0, ''
                for y in range(len(tString)):
                    for x in range(len(tString)):
                        substring = tString[y:x]
                        if len(list(re.finditer(re.escape(substring), tString))) > 1 and len(substring) > length:
                            match = substring
                            length = len(substring)

                # checking for legitimate branch repeat
                if "[" in match and "]" in match:
                    hits = []
                    index = 0
                    if len(tString) > 3:
                        while index < len(tString):
                            index = tString.find(str(match), index)
                            if index == -1:
                                break
                            hits.append(index)
                            index += len(match)

                    # find all locations of repeated branch and remove
                    if len(hits) > 1:
                        for start_loc in hits[1:]:
                            tString = tString[:start_loc] + \
                                tString[start_loc:].replace(match, "]", 1)

                # increment all numbers in string to match the protocol
                newString = ""
                numString = ""
                for el in tString:
                    if el.isdigit():  # build number
                        numString += el
                    else:
                        if numString != "":  # convert it to int and reinstantaite numString
                            numString = str(int(numString) + 1)
                        newString += numString
                        newString += el
                        numString = ""
                tString = newString
                del newString

                forest_list.append(ast.literal_eval(tString))
            return forest_list

        # 6. Print job tree(s)
        def print_tree(lst, level=0):
            print('    ' * (level - 1) + '+---' * (level > 0) + str(lst[0]))
            for l in lst[1:]:
                if type(l) is list:
                    print_tree(l, level + 1)
                else:
                    print('    ' * level + '+---' + l)

        # 1
        steps = depth_one(self.object_list)
        # 2
        nodes = assign_nodes(steps)
        # 3
        proto_forest = connected_nodes(nodes)
        # 4
        forest = build_tree_dict(proto_forest, steps)
        # 5
        self.forest_list = dict_to_list(forest)
        # 6
        print("\n" + "A suggested Job Tree based on container dependency: \n")
        for tree_list in self.forest_list:
            print_tree(tree_list)

Example 72

Project: django-adminactions Source File: mass_update.py
def mass_update(modeladmin, request, queryset):  # noqa
    """
        mass update queryset
    """

    def not_required(field, **kwargs):
        """ force all fields as not required"""
        kwargs['required'] = False
        return field.formfield(**kwargs)

    def _doit():
        errors = {}
        updated = 0
        for record in queryset:
            for field_name, value_or_func in list(form.cleaned_data.items()):
                if callable(value_or_func):
                    old_value = getattr(record, field_name)
                    setattr(record, field_name, value_or_func(old_value))
                else:
                    setattr(record, field_name, value_or_func)
            if clean:
                record.clean()
            record.save()
            updated += 1
        if updated:
            messages.info(request, _("Updated %s records") % updated)

        if len(errors):
            messages.error(request, "%s records not updated due errors" % len(errors))
        adminaction_end.send(sender=modeladmin.model,
                             action='mass_update',
                             request=request,
                             queryset=queryset,
                             modeladmin=modeladmin,
                             form=form,
                             errors=errors,
                             updated=updated)

    opts = modeladmin.model._meta
    perm = "{0}.{1}".format(opts.app_label, get_permission_codename('adminactions_massupdate', opts))
    if not request.user.has_perm(perm):
        messages.error(request, _('Sorry you do not have rights to execute this action'))
        return

    try:
        adminaction_requested.send(sender=modeladmin.model,
                                   action='mass_update',
                                   request=request,
                                   queryset=queryset,
                                   modeladmin=modeladmin)
    except ActionInterrupted as e:
        messages.error(request, str(e))
        return

    # Allows to specified a custom mass update Form in the ModelAdmin
    mass_update_form = getattr(modeladmin, 'mass_update_form', MassUpdateForm)

    MForm = modelform_factory(modeladmin.model, form=mass_update_form,
                              exclude=('pk',),
                              formfield_callback=not_required)
    grouped = defaultdict(lambda: [])
    selected_fields = []
    initial = {'_selected_action': request.POST.getlist(helpers.ACTION_CHECKBOX_NAME),
               'select_across': request.POST.get('select_across') == '1',
               'action': 'mass_update'}

    if 'apply' in request.POST:
        form = MForm(request.POST)
        if form.is_valid():
            try:
                adminaction_start.send(sender=modeladmin.model,
                                       action='mass_update',
                                       request=request,
                                       queryset=queryset,
                                       modeladmin=modeladmin,
                                       form=form)
            except ActionInterrupted as e:
                messages.error(request, str(e))
                return HttpResponseRedirect(request.get_full_path())

            # need_transaction = form.cleaned_data.get('_unique_transaction', False)
            validate = form.cleaned_data.get('_validate', False)
            clean = form.cleaned_data.get('_clean', False)

            if validate:
                with compat.atomic():
                    _doit()

            else:
                values = {}
                for field_name, value in list(form.cleaned_data.items()):
                    if isinstance(form.fields[field_name], ModelMultipleChoiceField):
                        messages.error(request, "Unable no mass update ManyToManyField without 'validate'")
                        return HttpResponseRedirect(request.get_full_path())
                    elif callable(value):
                        messages.error(request, "Unable no mass update using operators without 'validate'")
                        return HttpResponseRedirect(request.get_full_path())
                    elif field_name not in ['_selected_action', '_validate', 'select_across', 'action',
                                            '_unique_transaction', '_clean']:
                        values[field_name] = value
                queryset.update(**values)

            return HttpResponseRedirect(request.get_full_path())
    else:
        initial.update({'action': 'mass_update', '_validate': 1})
        # form = MForm(initial=initial)
        prefill_with = request.POST.get('prefill-with', None)
        prefill_instance = None
        try:
            # Gets the instance directly from the queryset for data security
            prefill_instance = queryset.get(pk=prefill_with)
        except ObjectDoesNotExist:
            pass

        form = MForm(initial=initial, instance=prefill_instance)

    for el in queryset.all()[:10]:
        for f in modeladmin.model._meta.fields:
            if f.name not in form._no_sample_for:
                if hasattr(f, 'flatchoices') and f.flatchoices:
                    grouped[f.name] = list(dict(getattr(f, 'flatchoices')).values())
                elif hasattr(f, 'choices') and f.choices:
                    grouped[f.name] = list(dict(getattr(f, 'choices')).values())
                elif isinstance(f, df.BooleanField):
                    grouped[f.name] = [True, False]
                else:
                    value = getattr(el, f.name)
                    if value is not None and value not in grouped[f.name]:
                        grouped[f.name].append(value)
                    initial[f.name] = initial.get(f.name, value)

    adminForm = helpers.AdminForm(form, modeladmin.get_fieldsets(request), {}, [], model_admin=modeladmin)
    media = modeladmin.media + adminForm.media
    dthandler = lambda obj: obj.isoformat() if isinstance(obj, datetime.date) else str(obj)
    tpl = 'adminactions/mass_update.html'
    ctx = {'adminform': adminForm,
           'form': form,
           'action_short_description': mass_update.short_description,
           'title': u"%s (%s)" % (
               mass_update.short_description.capitalize(),
               smart_text(modeladmin.opts.verbose_name_plural),
           ),
           'grouped': grouped,
           'fieldvalues': json.dumps(grouped, default=dthandler),
           'change': True,
           'selected_fields': selected_fields,
           'is_popup': False,
           'save_as': False,
           'has_delete_permission': False,
           'has_add_permission': False,
           'has_change_permission': True,
           'opts': modeladmin.model._meta,
           'app_label': modeladmin.model._meta.app_label,
           # 'action': 'mass_update',
           # 'select_across': request.POST.get('select_across')=='1',
           'media': mark_safe(media),
           'selection': queryset}
    if django.VERSION[:2] > (1, 7):
        ctx.update(modeladmin.admin_site.each_context(request))
    else:
        ctx.update(modeladmin.admin_site.each_context())

    if django.VERSION[:2] > (1, 8):
        return render(request, tpl, context=ctx)
    else:
        return render_to_response(tpl, RequestContext(request, ctx))

Example 73

Project: treeio Source File: reports.py
@contextfunction
def display_chart(context, chart, skip_group=False):
    "Return HTML for chart"

    request = context['request']

    response_format = 'html'
    if 'response_format' in context:
        response_format = context['response_format']

    options = loads(chart.options)

    content = _get_report_content(chart.report, request)

    objs = content['set']

    chart_dict = {}

    field_name = options['grouping']

    model = loads(chart.report.model)

    chart_dict['yAxis'] = {'allowDecimals': False,
                           'title': {
                               'text': model.name.split('.')[-1] + " Count vs. " + field_name.replace('_', ' ').title()}
                           }
    chart_dict['xAxis'] = {}
    try:
        xfield = objs[0]._meta.get_field_by_name(field_name)[0]
    except:
        chart.delete()
        return

    def get_date(g, mindate):
        if g and g != datetime.min.date():
            return g
        else:
            return mindate

    if xfield.get_internal_type() == 'ManyToManyField':
        l = []
        for obj in objs:
            for mi in getattr(obj, field_name).all():
                l.append(unicode(mi))
    elif xfield.get_internal_type() == 'DateTimeField' or xfield.get_internal_type() == 'DateField':
        chart_dict['xAxis']['labels'] = {  # 'rotation':90,
                                           'align': 'left',
                                           'x': 3,
                                           'y': 15}
        l, m, datelist = [], [], []
        maxdate = None
        mindate = None
        for obj in objs:
            if getattr(obj, field_name):
                x = getattr(obj, field_name)
                if xfield.get_internal_type() == 'DateTimeField':
                    x = x.date()
                if not maxdate or x > maxdate:
                    maxdate = x
                if not mindate or x < mindate:
                    mindate = x
                datelist.append(x)
                if unicode(x) not in m:
                    m.append(unicode(x))
            else:
                datelist.append(datetime.min.date())
        while datetime.min.date() in datelist:
            datelist.append(mindate)
            datelist.remove(datetime.min.date())
        datelist = sorted(datelist, key=lambda g: get_date(g, mindate))
        l = [unicode(g) for g in datelist]

        # chart_dict['xAxis']['categories']=m

        chart_dict['xAxis']['type'] = 'datetime'
        td = maxdate - mindate
        # print (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
        chart_dict['zoomType'] = 'x'
        chart_dict['xAxis']['tickInterval'] = (
                                                  td.microseconds + (
                                                      td.seconds + td.days * 24 * 3600) * 10 ** 6) / 10 ** 4
        # chart_dict['xAxis']['tickWidth']= 0,
        chart_dict['maxZoom'] = 14 * 24 * 3600000  # 2wks
        # chart_dict['xAxis']['gridLineWidth']= 1,
        chart_dict['series'] = [{'name': model.name.split('.')[-1], 'data': []}]
        for x in set(l):
            chart_dict['series'][0]['data'].append(('%s UTC' % x, l.count(x)))

    else:
        l = [unicode(obj.get_field_value(field_name)) for obj in objs]

    if 'series' not in chart_dict:
        chart_dict['series'] = []
        # chart_dict['series'].append({'name':field_name, 'data': [{'name': x, 'y':l.count(x)} for x in set(l)]})
        chart_dict['series'].append({'name': field_name.replace(
            '_', ' ').title(), 'data': [[x, l.count(x)] for x in set(l)]})
        # for x in set(l):
        # chart_dict['series'].append({'name':x, 'data': l.count(x)})
        # chart_dict['series'].append({'data':[{'name':x, 'y': [l.count(x)]} for x in set(l)]})
        if 'xAxis' not in chart_dict:
            chart_dict['xAxis']['categories'] = [x for x in set(l)]
        # Chart type specific options

        if 'legend' in options and options['legend'] == 'on':
            chart_dict['legend'] = {
                'layout': 'vertical',
                'align': 'right',
                'verticalAlign': 'top',
                'x': -10,
                'y': 100,
                'borderWidth': 0
            }
    if 'title' in options:
        chart_dict['title'] = {'text': options['title']}

    # Create a hash and use it as a unqiue div id and var name for the chart.
    hasher = hashlib.md5()
    hasher.update(str(random()))
    id = 'chartcontainer' + str(hasher.hexdigest())
    # Disable animation for when saving as PDF
    chart_dict['chart'] = {'renderTo': id,
                           'defaultSeriesType': options['type']}
    # chart_dict['plotOptions'] = {'series': {'animation': False}}

    chart_dict['plotOptions'] = {'pie': {
        'allowPointSelect': True,
        'cursor': 'pointer',
        'dataLabels': {
            'enabled': False
        },
        'showInLegend': True
    }}

    chart_dict['credits'] = {'enabled': False}

    rendered_options = json.dumps(chart_dict)

    rendered_options = rendered_options[
                       :-1] + ", tooltip: {formatter: function() {return '<b>'+ this.point.name +'</b>: '+ this.y;}}}"

    if 'type' in chart_dict['xAxis'] and chart_dict['xAxis']['type'] == 'datetime':
        rendered_options += """
        datedata = [];
        jQuery.each(options.series[0].data, function(i,item){
        date = Date.parse(item[0]);
        count = item[1];
        datedata.push([date, count]);
        });
      options.series[0].data = datedata;

      function merge_options(obj1,obj2){
        var obj3 = {};
        for (attrname in obj1) { obj3[attrname] = obj1[attrname]; }
        for (attrname in obj2) { obj3[attrname] = obj2[attrname]; }
        return obj3;
      }
      var dateoptions =  {


      tooltip: {
         shared: true,
         crosshairs: true

      },

      };


    options = merge_options(options, dateoptions);

              """

    return Markup(render_to_string('reports/tags/chart',
                                   {'rendered_options': rendered_options,
                                    'id': id,
                                    'chart_id': chart.id,
                                    'chart': chart,
                                    'name': options['title']},
                                   context_instance=RequestContext(request),
                                   response_format=response_format))

Example 74

Project: Wallace Source File: clock.py
@scheduler.scheduled_job('interval', minutes=0.5)
def check_db_for_missing_notifications():
    """Check the database for missing notifications."""
    aws_access_key_id = os.environ['aws_access_key_id']
    aws_secret_access_key = os.environ['aws_secret_access_key']
    if config.getboolean('Shell Parameters', 'launch_in_sandbox_mode'):
        conn = MTurkConnection(
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key,
            host='mechanicalturk.sandbox.amazonaws.com')
    else:
        conn = MTurkConnection(
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key)

    # get all participants with status < 100
    participants = Participant.query.filter_by(status="working").all()

    # get current time
    current_time = datetime.now()

    # get experiment duration in seconds
    duration = float(config.get('HIT Configuration', 'duration')) * 60 * 60

    # for each participant, if current_time - start_time > duration + 5 mins
    for p in participants:
        p_time = (current_time - p.creation_time).total_seconds()

        if p_time > (duration + 120):
            print ("Error: participant {} with status {} has been playing for too "
                   "long and no notification has arrived - "
                   "running emergency code".format(p.id, p.status))

            # get their assignment
            assignment_id = p.assignment_id

            # ask amazon for the status of the assignment
            try:
                assignment = conn.get_assignment(assignment_id)[0]
                status = assignment.AssignmentStatus
            except:
                status = None
            print "assignment status from AWS is {}".format(status)
            hit_id = p.hit_id

            # general email settings:
            username = os.getenv('wallace_email_username')
            fromaddr = username + "@gmail.com"
            email_password = os.getenv("wallace_email_key")
            toaddr = config.get('HIT Configuration', 'contact_email_on_error')
            whimsical = os.getenv("whimsical")

            if status == "Approved":
                # if its been approved, set the status accordingly
                print "status set to approved"
                p.status = "approved"
                session.commit()
            elif status == "Rejected":
                print "status set to rejected"
                # if its been rejected, set the status accordingly
                p.status = "rejected"
                session.commit()
            elif status == "Submitted":
                # if it has been submitted then resend a submitted notification
                args = {
                    'Event.1.EventType': 'AssignmentSubmitted',
                    'Event.1.AssignmentId': assignment_id
                }
                requests.post(
                    "http://" + os.environ['HOST'] + '/notifications',
                    data=args)

                # send the researcher an email to let them know
                if whimsical:
                    msg = MIMEText(
                        """Dearest Friend,\n\nI am writing to let you know that at
 {}, during my regular (and thoroughly enjoyable) perousal of the most charming
  participant data table, I happened to notice that assignment {} has been
 taking longer than we were expecting. I recall you had suggested {} minutes as
 an upper limit for what was an acceptable length of time for each assignement
 , however this assignment had been underway for a shocking {} minutes, a full
 {} minutes over your allowance. I immediately dispatched a telegram to our
 mutual friends at AWS and they were able to assure me that although the
 notification had failed to be correctly processed, the assignment had in fact
 been completed. Rather than trouble you, I dealt with this myself and I can
 assure you there is no immediate cause for concern. Nonetheless, for my own
 peace of mind, I would appreciate you taking the time to look into this matter
 at your earliest convenience.\n\nI remain your faithful and obedient servant,
\nAlfred R. Wallace\n\n P.S. Please do not respond to this message, I am busy
 with other matters.""".format(
                        datetime.now(),
                        assignment_id,
                        round(duration/60),
                        round(p_time/60),
                        round((p_time-duration)/60)))
                    msg['Subject'] = "A matter of minor concern."
                else:
                    msg = MIMEText(
                        """Dear experimenter,\n\nThis is an automated email from
 Wallace. You are receiving this email because the Wallace platform has
 discovered evidence that a notification from Amazon Web Services failed to
 arrive at the server. Wallace has automatically contacted AWS and has
 determined the dropped notification was a submitted notification (i.e. the
 participant has finished the experiment). This is a non-fatal error and so
 Wallace has auto-corrected the problem. Nonetheless you may wish to check the
 database.\n\nBest,\nThe Wallace dev. team.\n\n Error details:\nAssignment: {}
\nAllowed time: {}\nTime since participant started: {}""").format(
                        assignment_id,
                        round(duration/60),
                        round(p_time/60))
                    msg['Subject'] = "Wallace automated email - minor error."

                # This method commented out as gmail now blocks emails from
                # new locations
                # server = smtplib.SMTP('smtp.gmail.com:587')
                # server.starttls()
                # server.login(username, email_password)
                # server.sendmail(fromaddr, toaddr, msg.as_string())
                # server.quit()
                print ("Error - submitted notification for participant {} missed. "
                       "Database automatically corrected, but proceed with caution."
                       .format(p.id))
            else:
                # if it has not been submitted shut everything down
                # first turn off autorecruit
                host = os.environ['HOST']
                host = host[:-len(".herokuapp.com")]
                args = json.dumps({"auto_recruit": "false"})
                headers = {
                    "Accept": "application/vnd.heroku+json; version=3",
                    "Content-Type": "application/json"
                }
                heroku_email_address = os.getenv('heroku_email_address')
                heroku_password = os.getenv('heroku_password')
                requests.patch(
                    "https://api.heroku.com/apps/{}/config-vars".format(host),
                    data=args,
                    auth=(heroku_email_address, heroku_password),
                    headers=headers)

                # then force expire the hit via boto
                conn.expire_hit(hit_id)

                # send the researcher an email to let them know
                if whimsical:
                    msg = MIMEText(
                        """Dearest Friend,\n\nI am afraid I write to you with most
 grave tidings. At {}, during a routine check of the usually most delightful
 participant data table, I happened to notice that assignment {} has been
 taking longer than we were expecting. I recall you had suggested {} minutes as
 an upper limit for what was an acceptable length of time for each assignment,
 however this assignment had been underway for a shocking {} minutes, a full {}
 minutes over your allowance. I immediately dispatched a telegram to our mutual
 friends at AWS and they infact informed me that they had already sent us a
 notification which we must have failed to process, implying that the
 assignment had not been successfully completed. Of course when the seriousness
 of this scenario dawned on me I had to depend on my trusting walking stick for
 support: without the notification I didn't know to remove the old assignment's
 data from the tables and AWS will have already sent their replacement, meaning
 that the tables may already be in a most unsound state!\n\nI am sorry to
 trouble you with this, however, I do not know how to proceed so rather than
 trying to remedy the scenario myself, I have instead temporarily ceased
 operations by expiring the HIT with the fellows at AWS and have refrained from
 posting any further invitations myself. Once you see fit I would be most
 appreciative if you could attend to this issue with the caution, sensitivity
 and intelligence for which I know you so well.\n\nI remain your faithful and
 obedient servant,\nAlfred R. Wallace\n\nP.S. Please do not respond to this
 message, I am busy with other matters.""".format(
                        datetime.now(),
                        assignment_id,
                        round(duration/60),
                        round(p_time/60),
                        round((p_time-duration)/60)))
                    msg['Subject'] = "Most troubling news."
                else:
                    msg = MIMEText(
                        """Dear experimenter,\n\nThis is an automated email from
 Wallace. You are receiving this email because the Wallace platform has
 discovered evidence that a notification from Amazon Web Services failed to
 arrive at the server. Wallace has automatically contacted AWS and has
 determined the dropped notification was an abandoned/returned notification
 (i.e. the participant had returned the experiment or had run out of time).
 This is a serious error and so Wallace has paused the experiment - expiring
 the HIT on MTurk and setting auto_recruit to false. Participants currently
 playing will be able to finish, however no further participants will be
 recruited until you do so manually. We strongly suggest you use the details
 below to check the database to make sure the missing notification has not caused
 additional problems before resuming.\nIf you are receiving a lot of these
 emails this suggests something is wrong with your experiment code.\n\nBest,
\nThe Wallace dev. team.\n\n Error details:\nAssignment: {}
\nAllowed time: {}\nTime since participant started: {}""").format(
                        assignment_id,
                        round(duration/60),
                        round(p_time/60))
                    msg['Subject'] = "Wallace automated email - major error."

                # This method commented out as gmail now blocks emails from
                # new locations
                # server = smtplib.SMTP('smtp.gmail.com:587')
                # server.starttls()
                # server.login(username, email_password)
                # server.sendmail(fromaddr, toaddr, msg.as_string())
                # server.quit()

                # send a notificationmissing notification
                args = {
                    'Event.1.EventType': 'NotificationMissing',
                    'Event.1.AssignmentId': assignment_id
                }
                requests.post(
                    "http://" + os.environ['HOST'] + '/notifications',
                    data=args)

                print ("Error - abandoned/returned notification for participant {} missed. "
                       "Experiment shut down. Please check database and then manually "
                       "resume experiment."
                       .format(p.id))

Example 75

Project: ochothon Source File: deploy.py
    def run(self):
        try:

            #
            # - we need to pass the framework master IPs around (ugly)
            #
            assert 'MARATHON_MASTER' in os.environ, '$MARATHON_MASTER not specified (check your portal pod)'
            master = choice(os.environ['MARATHON_MASTER'].split(','))
            headers = \
                {
                    'content-type': 'application/json',
                    'accept': 'application/json'
                }

            with open(self.template, 'r') as f:

                #
                # - parse the template yaml file (e.g container definition)
                #
                raw = yaml.load(f)
                assert raw, 'empty YAML input (user error ?)'

                #
                # - merge with our defaults
                # - we want at least the cluster & image settings
                # - TCP 8080 is added by default to the port list
                #
                defaults = \
                    {
                        'start': True,
                        'debug': False,
                        'settings': {},
                        'ports': [8080],
                        'verbatim': {}
                    }

                cfg = merge(defaults, raw)
                assert 'cluster' in cfg, 'cluster identifier undefined (user error ?)'
                assert 'image' in cfg, 'docker image undefined (user error ?)'

                #
                # - if a suffix is specified append it to the cluster identifier
                #
                if self.suffix:
                    cfg['cluster'] = '%s-%s' % (cfg['cluster'], self.suffix)

                #
                # - timestamp the application (we really want a new uniquely identified application)
                # - lookup the optional overrides and merge with our pod settings if specified
                # - this is what happens when the -o option is used
                #
                stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H-%M-%S')
                qualified = '%s.%s' % (self.namespace, cfg['cluster'])
                application = 'ochopod.%s-%s' % (qualified, stamp)
                if qualified in self.overrides:

                    blk = self.overrides[qualified]
                    logger.debug('%s : overriding %d settings (%s)' % (self.template, len(blk), qualified))
                    cfg['settings'] = merge(cfg['settings'], blk)

                def _nullcheck(cfg, prefix):

                    #
                    # - walk through the settings and flag any null value
                    #
                    missing = []
                    if cfg is not None:
                        for key, value in cfg.items():
                            if value is None:
                                missing += ['%s.%s' % ('.'.join(prefix), key)]
                            elif isinstance(value, dict):
                                missing += _nullcheck(value, prefix + [key])

                    return missing

                missing = _nullcheck(cfg['settings'], ['pod'])
                assert not missing, '%d setting(s) missing ->\n\t - %s' % (len(missing), '\n\t - '.join(missing))

                #
                # - if we still have no target default it to 1 single pod
                #
                if not self.pods:
                    self.pods = 1

                #
                # - setup our port list
                # - the port binding is specified either by an integer (container port -> dynamic mesos port), by
                #   two integers (container port -> host port) or by an integer followed by a * (container port ->
                #   same port on the host)
                # - on top of that, all those options allow to specify whether the protocol is TCP or UDP by adding
                #   the desired protocol after the binding (e.g. '8080 tcp' or '8125 * udp'. TCP is the default if no
                #   protocol is specified.
                # - the marathon pods must by design map /etc/mesos
                #
                def _parse_port(token):
                    
                    #
                    # - tries to return an int if possible, a string otherwise
                    #
                    def get_token_no_protocol(token):
                        # - remove the protocol piece
                        t = token[:-4].strip()
                        try:
                            return int(t)
                        except ValueError:
                            return t
                    
                    if isinstance(token, str) and token.lower().endswith(' udp'):
                        protocol = 'udp'
                        token_no_protocol = get_token_no_protocol(token)
                        
                    elif isinstance(token, str) and token.lower().endswith(' tcp'):
                        protocol = 'tcp'
                        token_no_protocol = get_token_no_protocol(token)
                    else:
                        # - TCP is the default
                        protocol = 'tcp'
                        token_no_protocol = token    
                    
                    if isinstance(token_no_protocol, int):
                        return {'containerPort': token_no_protocol, 'protocol': protocol}
                    elif isinstance(token_no_protocol, str) and token_no_protocol.endswith(' *'):
                        port = int(token_no_protocol[:-2])
                        return {'containerPort': port, 'hostPort': port, 'protocol': protocol}
                    elif isinstance(token_no_protocol, str):
                        ports = token_no_protocol.split(' ')
                        assert len(ports) == 2, 'invalid port syntax (must be two integers separated by 1+ spaces optionally followed by the protocol (tcp or udp, defaults to tcp))'
                        return {'containerPort': int(ports[0]), 'hostPort': int(ports[1]), 'protocol': protocol}
                    else:
                        assert 0, 'invalid port syntax ("%s")' % token

                #
                # - craft the docker image specifier
                # - if -r is used make sure to add (or override) the :<label> suffix
                #
                image = cfg['image']
                tokens = image.split(':')
                image = '%s:%s' % (tokens[0], self.release) if self.release else image

                #
                # - note the marathon-ec2 ochopod bindings will set the application hint automatically
                #   via environment variable (e.g no need to specify it here)
                # - make sure to mount /etc/mesos and /opt/mesosphere to account for various mesos installs
                #
                ports = [_parse_port(token) for token in cfg['ports']] if 'ports' in cfg else []
                spec = \
                    {
                        'id': application,
                        'instances': self.pods,
                        'env':
                            {
                                'ochopod_cluster': cfg['cluster'],
                                'ochopod_debug': str(cfg['debug']).lower(),
                                'ochopod_start': str(cfg['start']).lower(),
                                'ochopod_namespace': self.namespace,
                                'pod': json.dumps(cfg['settings'])
                            },
                        'container':
                            {
                                'type': 'DOCKER',
                                'docker':
                                    {
                                        'forcePullImage': True,
                                        'image': image,
                                        'network': 'BRIDGE',
                                        'portMappings': ports
                                    },
                                'volumes':
                                    [
                                        {
                                            'containerPath': '/etc/mesos',
                                            'hostPath': '/etc/mesos',
                                            'mode': 'RO'
                                        },
                                        {
                                            'containerPath': '/opt/mesosphere',
                                            'hostPath': '/opt/mesosphere',
                                            'mode': 'RO'
                                        }
                                    ]
                            }
                    }

                #
                # - if we have a 'verbatim' block in our image definition yaml, merge it now
                #
                if 'verbatim' in cfg:
                    spec = merge(cfg['verbatim'], spec)

                #
                # - pick a marathon master at random
                # - fire the POST /v2/apps to create our application
                # - this will indirectly spawn our pods
                #
                url = 'http://%s/v2/apps' % master
                reply = post(url, data=json.dumps(spec), headers=headers)
                code = reply.status_code
                logger.debug('-> %s (HTTP %d)' % (url, code))
                assert code == 200 or code == 201, 'submission failed (HTTP %d)' % code

                #
                # - wait for all the pods to be in the 'running' mode
                # - the 'application' hint is set by design to the marathon application identifier
                # - the sequence counters allocated to our new pods are returned as well
                #
                target = ['dead', 'running'] if self.strict else ['dead', 'stopped', 'running']
                @retry(timeout=self.timeout, pause=3, default={})
                def _spin():
                    def _query(zk):
                        replies = fire(zk, qualified, 'info')
                        return [(hints['process'], seq) for seq, hints, _ in replies.values()
                                if hints['application'] == application and hints['process'] in target]

                    js = run(self.proxy, _query)
                    assert len(js) == self.pods, 'not all pods running yet'
                    return js

                js = _spin()
                running = sum(1 for state, _ in js if state is not 'dead')
                up = [seq for _, seq in js]
                self.out['up'] = up
                self.out['ok'] = self.pods == running
                logger.debug('%s : %d/%d pods are running ' % (self.template, running, self.pods))

                if not up:

                    #
                    # - nothing is running (typically because the image has an issue and is not
                    #   not booting the ochopod script for instance, which happens often)
                    # - in that case fire a HTTP DELETE against the marathon application to clean it up
                    #
                    url = 'http://%s/v2/apps/%s' % (master, application)
                    reply = delete(url, headers=headers)
                    code = reply.status_code
                    logger.debug('-> %s (HTTP %d)' % (url, code))
                    assert code == 200 or code == 204, 'application deletion failed (HTTP %d)' % code

        except AssertionError as failure:

            logger.debug('%s : failed to deploy -> %s' % (self.template, failure))

        except YAMLError as failure:

            if hasattr(failure, 'problem_mark'):
                mark = failure.problem_mark
                logger.debug('%s : invalid deploy.yml (line %s, column %s)' % (self.template, mark.line+1, mark.column+1))

        except Exception as failure:

            logger.debug('%s : failed to deploy -> %s' % (self.template, diagnostic(failure)))

Example 76

Project: ansible-plugin-copyv Source File: copyv.py
    def run(self, conn, tmp_path, module_name, module_args, inject, complex_args=None, **kwargs):
        ''' handler for file transfer operations '''

        # load up options
        options = {}
        if complex_args:
            options.update(complex_args)
        options.update(utils.parse_kv(module_args))
        source  = options.get('src', None)
        content = options.get('content', None)
        dest    = options.get('dest', None)
        raw     = utils.boolean(options.get('raw', 'no'))
        force   = utils.boolean(options.get('force', 'yes'))

        # content with newlines is going to be escaped to safely load in yaml
        # now we need to unescape it so that the newlines are evaluated properly
        # when writing the file to disk
        if content:
            if isinstance(content, unicode):
                try:
                    content = content.decode('unicode-escape')
                except UnicodeDecodeError:
                    pass

        if (source is None and content is None and not 'first_available_file' in inject) or dest is None:
            result=dict(failed=True, msg="src (or content) and dest are required")
            return ReturnData(conn=conn, result=result)
        elif (source is not None or 'first_available_file' in inject) and content is not None:
            result=dict(failed=True, msg="src and content are mutually exclusive")
            return ReturnData(conn=conn, result=result)

        # Check if the source ends with a "/"
        source_trailing_slash = False
        if source:
            source_trailing_slash = source.endswith("/")

        # Define content_tempfile in case we set it after finding content populated.
        content_tempfile = None

        # If content is defined make a temp file and write the content into it.
        if content is not None:
            try:
                # If content comes to us as a dict it should be decoded json.
                # We need to encode it back into a string to write it out.
                if type(content) is dict:
                    content_tempfile = self._create_content_tempfile(json.dumps(content))
                else:
                    content_tempfile = self._create_content_tempfile(content)
                source = content_tempfile
            except Exception, err:
                result = dict(failed=True, msg="could not write content temp file: %s" % err)
                return ReturnData(conn=conn, result=result)
        # if we have first_available_file in our vars
        # look up the files and use the first one we find as src
        elif 'first_available_file' in inject:
            found = False
            for fn in inject.get('first_available_file'):
                fn_orig = fn
                fnt = template.template(self.runner.basedir, fn, inject)
                fnd = utils.path_dwim(self.runner.basedir, fnt)
                if not os.path.exists(fnd) and '_original_file' in inject:
                    fnd = utils.path_dwim_relative(inject['_original_file'], 'files', fnt, self.runner.basedir, check=False)
                if os.path.exists(fnd):
                    source = fnd
                    found = True
                    break
            if not found:
                results = dict(failed=True, msg="could not find src in first_available_file list")
                return ReturnData(conn=conn, result=results)
        else:
            source = template.template(self.runner.basedir, source, inject)
            if '_original_file' in inject:
                source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir)
            else:
                source = utils.path_dwim(self.runner.basedir, source)

        # A list of source file tuples (full_path, relative_path) which will try to copy to the destination
        source_files = []

        # If source is a directory populate our list else source is a file and translate it to a tuple.
        if os.path.isdir(source):
            # Get the amount of spaces to remove to get the relative path.
            if source_trailing_slash:
                sz = len(source) + 1
            else:
                sz = len(source.rsplit('/', 1)[0]) + 1

            # Walk the directory and append the file tuples to source_files.
            for base_path, sub_folders, files in os.walk(source):
                for file in files:
                    full_path = os.path.join(base_path, file)
                    rel_path = full_path[sz:]
                    source_files.append((full_path, rel_path))

            # If it's recursive copy, destination is always a dir,
            # explicitly mark it so (note - copy module relies on this).
            if not conn.shell.path_has_trailing_slash(dest):
                dest = conn.shell.join_path(dest, '')
        else:
            source_files.append((source, os.path.basename(source)))

        changed = False
        diffs = []
        module_result = {"changed": False}

        # A register for if we executed a module.
        # Used to cut down on command calls when not recursive.
        module_executed = False

        # Tell _execute_module to delete the file if there is one file.
        delete_remote_tmp = (len(source_files) == 1)

        # If this is a recursive action create a tmp_path that we can share as the _exec_module create is too late.
        if not delete_remote_tmp:
            if "-tmp-" not in tmp_path:
                tmp_path = self.runner._make_tmp_path(conn)

        # expand any user home dir specifier
        dest = self.runner._remote_expand_user(conn, dest, tmp_path)

        vault = VaultLib(password=self.runner.vault_pass)

        for source_full, source_rel in source_files:
            
            vault_temp_file = None
            data = None

            try:
                data = open(source_full).read()
            except IOError:
                raise errors.AnsibleError("file could not read: %s" % source_full)

            if vault.is_encrypted(data):
                # if the file is encrypted and no password was specified,
                # the decrypt call would throw an error, but we check first
                # since the decrypt function doesn't know the file name
                if self.runner.vault_pass is None:
                    raise errors.AnsibleError("A vault password must be specified to decrypt %s" % source_full)
                    
                data = vault.decrypt(data)
                # Make a temp file
                vault_temp_file = self._create_content_tempfile(data)
                source_full = vault_temp_file;
            
            # Generate a hash of the local file.
            local_checksum = utils.checksum(source_full)

            # If local_checksum is not defined we can't find the file so we should fail out.
            if local_checksum is None:
                result = dict(failed=True, msg="could not find src=%s" % source_full)
                return ReturnData(conn=conn, result=result)

            # This is kind of optimization - if user told us destination is
            # dir, do path manipulation right away, otherwise we still check
            # for dest being a dir via remote call below.
            if conn.shell.path_has_trailing_slash(dest):
                dest_file = conn.shell.join_path(dest, source_rel)
            else:
                dest_file = conn.shell.join_path(dest)

            # Attempt to get the remote checksum
            remote_checksum = self.runner._remote_checksum(conn, tmp_path, dest_file, inject)

            if remote_checksum == '3':
                # The remote_checksum was executed on a directory.
                if content is not None:
                    # If source was defined as content remove the temporary file and fail out.
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    result = dict(failed=True, msg="can not use content with a dir as dest")
                    return ReturnData(conn=conn, result=result)
                else:
                    # Append the relative source location to the destination and retry remote_checksum
                    dest_file = conn.shell.join_path(dest, source_rel)
                    remote_checksum = self.runner._remote_checksum(conn, tmp_path, dest_file, inject)

            if remote_checksum == '4':
                result = dict(msg="python isn't present on the system.  Unable to compute checksum", failed=True)
                return ReturnData(conn=conn, result=result)

            if remote_checksum != '1' and not force:
                # remote_file exists so continue to next iteration.
                continue

            if local_checksum != remote_checksum:
                # The checksums don't match and we will change or error out.
                changed = True

                # Create a tmp_path if missing only if this is not recursive.
                # If this is recursive we already have a tmp_path.
                if delete_remote_tmp:
                    if "-tmp-" not in tmp_path:
                        tmp_path = self.runner._make_tmp_path(conn)

                if self.runner.diff and not raw:
                    diff = self._get_diff_data(conn, tmp_path, inject, dest_file, source_full)
                else:
                    diff = {}

                if self.runner.noop_on_check(inject):
                    self._remove_tempfile_if_content_defined(content, content_tempfile)
                    diffs.append(diff)
                    changed = True
                    module_result = dict(changed=True)
                    continue

                # Define a remote directory that we will copy the file to.
                tmp_src = tmp_path + 'source'

                if not raw:
                    conn.put_file(source_full, tmp_src)
                else:
                    conn.put_file(source_full, dest_file)

                # We have copied the file remotely and no longer require our content_tempfile
                self._remove_tempfile_if_content_defined(content, content_tempfile)

                # Remove the vault tempfile if we have one
                if vault_temp_file:
                    os.remove(vault_temp_file);
                    vault_temp_file = None

                # fix file permissions when the copy is done as a different user
                if self.runner.become and self.runner.become_user != 'root' and not raw:
                    self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp_path)

                if raw:
                    # Continue to next iteration if raw is defined.
                    continue

                # Run the copy module

                # src and dest here come after original and override them
                # we pass dest only to make sure it includes trailing slash in case of recursive copy
                new_module_args = dict(
                    src=tmp_src,
                    dest=dest,
                    original_basename=source_rel
                )
                if self.runner.noop_on_check(inject):
                    new_module_args['CHECKMODE'] = True
                if self.runner.no_log:
                    new_module_args['NO_LOG'] = True

                module_args_tmp = utils.merge_module_args(module_args, new_module_args)

                module_return = self.runner._execute_module(conn, tmp_path, 'copy', module_args_tmp, inject=inject, complex_args=complex_args, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            else:
                # no need to transfer the file, already correct hash, but still need to call
                # the file module in case we want to change attributes
                self._remove_tempfile_if_content_defined(content, content_tempfile)
                
                # Remove the vault tempfile if we have one
                if vault_temp_file:
                    os.remove(vault_temp_file);
                    vault_temp_file = None

                if raw:
                    # Continue to next iteration if raw is defined.
                    # self.runner._remove_tmp_path(conn, tmp_path)
                    continue

                tmp_src = tmp_path + source_rel

                # Build temporary module_args.
                new_module_args = dict(
                    src=tmp_src,
                    dest=dest,
                    original_basename=source_rel
                )
                if self.runner.noop_on_check(inject):
                    new_module_args['CHECKMODE'] = True
                if self.runner.no_log:
                    new_module_args['NO_LOG'] = True

                module_args_tmp = utils.merge_module_args(module_args, new_module_args)

                # Execute the file module.
                module_return = self.runner._execute_module(conn, tmp_path, 'file', module_args_tmp, inject=inject, complex_args=complex_args, delete_remote_tmp=delete_remote_tmp)
                module_executed = True

            module_result = module_return.result
            if not module_result.get('checksum'):
                module_result['checksum'] = local_checksum
            if module_result.get('failed') == True:
                return module_return
            if module_result.get('changed') == True:
                changed = True

        # Delete tmp_path if we were recursive or if we did not execute a module.
        if (not C.DEFAULT_KEEP_REMOTE_FILES and not delete_remote_tmp) \
            or (not C.DEFAULT_KEEP_REMOTE_FILES and delete_remote_tmp and not module_executed):
            self.runner._remove_tmp_path(conn, tmp_path)

        # the file module returns the file path as 'path', but 
        # the copy module uses 'dest', so add it if it's not there
        if 'path' in module_result and 'dest' not in module_result:
            module_result['dest'] = module_result['path']

        # TODO: Support detailed status/diff for multiple files
        if len(source_files) == 1:
            result = module_result
        else:
            result = dict(dest=dest, src=source, changed=changed)
        if len(diffs) == 1:
            return ReturnData(conn=conn, result=result, diff=diffs[0])
        else:
            return ReturnData(conn=conn, result=result)

Example 77

Project: congress Source File: statutes.py
def proc_statute_volume(path, options):
    mods = etree.parse(path + "/mods.xml")
    mods_ns = {"mods": "http://www.loc.gov/mods/v3"}

    # Load the THOMAS committee names for this Congress, which is our best
    # bet for normalizing committee names in the GPO data.
    congress = mods.find("/mods:extension[2]/mods:congress", mods_ns).text
    utils.fetch_committee_names(congress, options)

    logging.warn("Processing %s (Congress %s)" % (path, congress))

    package_id = mods.find("/mods:extension[2]/mods:accessId", mods_ns).text

    for bill in mods.findall("/mods:relatedItem", mods_ns):
        # MODS files also contain information about:
        # ['BACKMATTER', 'FRONTMATTER', 'CONSTAMEND', 'PROCLAMATION', 'REORGPLAN']
        if bill.find("mods:extension/mods:granuleClass", mods_ns).text not in ["PUBLICLAW", "PRIVATELAW", "HCONRES", "SCONRES"]:
            continue

        # Get the title and source URL (used in error messages).
        title_text = bill.find("mods:titleInfo/mods:title", mods_ns).text.replace('""', '"')
        source_url = bill.find("mods:location/mods:url[@displayLabel='Content Detail']", mods_ns).text

        # Bill number
        bill_elements = bill.findall("mods:extension/mods:bill[@priority='primary']", mods_ns)
        if len(bill_elements) == 0:
            logging.error("No bill number identified for '%s' (%s)" % (title_text, source_url))
            continue
        elif len(bill_elements) > 1:
            logging.error("Multiple bill numbers identified for '%s'" % title_text)
            for be in bill_elements:
                logging.error("  -- " + etree.tostring(be).strip())
            logging.error("  @ " + source_url)
            continue
        else:
            bill_congress = bill_elements[0].attrib["congress"]
            bill_type = bill_elements[0].attrib["type"].lower()
            bill_number = bill_elements[0].attrib["number"]
            bill_id = "%s%s-%s" % (bill_type, bill_number, bill_congress)

        # Title
        titles = []
        titles.append({
            "title": title_text,
            "as": "enacted",
            "type": "official",
            "is_for_portion": False,
        })

        # Subject
        descriptor = bill.find("mods:extension/mods:descriptor", mods_ns)
        if descriptor is not None:
            subject = descriptor.text
        else:
            subject = None

        # Committees
        committees = []
        cong_committee = bill.find("mods:extension/mods:congCommittee", mods_ns)
        if cong_committee is not None:
            chambers = {"H": "House", "S": "Senate", "J": "Joint"}
            committee = chambers[cong_committee.attrib["chamber"]] + " " + cong_committee.find("mods:name", mods_ns).text
            committee_info = {
                "committee": committee,
                "activity": [],  # XXX
                "committee_id": utils.committee_names[committee] if committee in utils.committee_names else None,
            }
            committees.append(committee_info)

        # The 'granuleDate' is the enactment date?
        granule_date = bill.find("mods:extension/mods:granuleDate", mods_ns).text

        sources = [{
            "source": "statutes",
            "package_id": package_id,
            "access_id": bill.find("mods:extension/mods:accessId", mods_ns).text,
            "source_url": source_url,
            "volume": bill.find("mods:extension/mods:volume", mods_ns).text,
            "page": bill.find("mods:part[@type='article']/mods:extent[@unit='pages']/mods:start", mods_ns).text,
            "position": bill.find("mods:extension/mods:pagePosition", mods_ns).text,
        }]

        law_elements = bill.findall("mods:extension/mods:law", mods_ns)

        # XXX: If <law> is missing, this assumes it is a concurrent resolution.
        #      This may be a problem if the code is updated to accept joint resolutions for constitutional amendments.
        if (law_elements is None) or (len(law_elements) != 1):
            other_chamber = {"HOUSE": "s", "SENATE": "h"}

            actions = [{
                "type": "vote",
                "vote_type": "vote2",
                "where": other_chamber[bill.find("mods:extension/mods:originChamber", mods_ns).text],
                "result": "pass",  # XXX
                "how": "unknown",  # XXX
                #        "text": "",
                "acted_at": granule_date,  # XXX
                "status": "PASSED:CONCURRENTRES",
                "references": [],  # XXX
            }]
        else:
            law_congress = law_elements[0].attrib["congress"]
            law_number = law_elements[0].attrib["number"]
            law_type = ("private" if (law_elements[0].attrib["isPrivate"] == "true") else "public")

            # Check for typos in the metadata.
            if law_congress != bill_congress:
                logging.error("Congress mismatch for %s%s: %s or %s? (%s)" % (bill_type, bill_number, bill_congress, law_congress, source_url))
                continue

            actions = [{
                "congress": law_congress,
                "number": law_number,
                "type": "enacted",
                "law": law_type,
                "text": "Became %s Law No: %s-%s." % (law_type.capitalize(), law_congress, law_number),
                "acted_at": granule_date,  # XXX
                "status": "ENACTED:SIGNED",  # XXX: Check for overridden vetoes!
                "references": [],  # XXX
            }]

        status, status_date = bill_info.latest_status(actions)

        bill_data = {
            'bill_id': bill_id,
            'bill_type': bill_type,
            'number': bill_number,
            'congress': bill_congress,

            'introduced_at': None,  # XXX
            'sponsor': None,  # XXX
            'cosponsors': [],  # XXX

            'actions': actions,  # XXX
            'history': bill_info.history_from_actions(actions),
            'status': status,
            'status_at': status_date,
            'enacted_as': bill_info.slip_law_from(actions),

            'titles': titles,
            'official_title': bill_info.current_title_for(titles, "official"),
            'short_title': bill_info.current_title_for(titles, "short"),  # XXX
            'popular_title': bill_info.current_title_for(titles, "popular"),  # XXX

            'subjects_top_term': subject,
            'subjects': [],

            'related_bills': [],  # XXX: <associatedBills> usually only lists the current bill.
            'committees': committees,
            'amendments': [],  # XXX

            'sources': sources,
            'updated_at': datetime.datetime.fromtimestamp(time.time()),
        }

        if not options.get('textversions', False):
            bill_info.output_bill(bill_data, options)

        # XXX: Can't use bill_versions.fetch_version() because it depends on fdsys.
        version_code = "enr"
        bill_version_id = "%s%s-%s-%s" % (bill_type, bill_number, bill_congress, version_code)
        bill_version = {
            'bill_version_id': bill_version_id,
            'version_code': version_code,
            'issued_on': status_date,
            'urls': {"pdf": bill.find("mods:location/mods:url[@displayLabel='PDF rendition']", mods_ns).text},
            'sources': sources,
        }
        utils.write(
            json.dumps(bill_version, sort_keys=True, indent=2, default=utils.format_datetime),
            bill_versions.output_for_bill_version(bill_version_id)
        )

        # Process the granule PDF.
        # - Hard-link it into the right place to be seen as bill text.
        # - Run "pdftotext -layout" to convert it to plain text and save it in the bill text location.
        pdf_file = path + "/" + sources[0]["access_id"] + "/docuement.pdf"
        if os.path.exists(pdf_file):
            dst_path = fdsys.output_for_bill(bill_data["bill_id"], "text-versions/" + version_code, is_data_dot=False)
            if options.get("linkpdf", False):
                os.link(pdf_file, dst_path + "/docuement.pdf")  # a good idea
            if options.get("extracttext", False):
                logging.error("Running pdftotext on %s..." % pdf_file)
                if subprocess.call(["pdftotext", "-layout", pdf_file, dst_path + "/docuement.txt"]) != 0:
                    raise Exception("pdftotext failed on %s" % pdf_file)

    return {'ok': True, 'saved': True}

Example 78

Project: bitex Source File: execution.py
  def match(self, session, order, order_matcher_disabled=False, broker_fee=0):
    other_side = []
    self_side = []
    if order.is_buy:
      self_side = self.buy_side
      other_side = self.sell_side
    elif order.is_sell:
      other_side = self.buy_side
      self_side = self.sell_side


    execution_reports = []
    trades_to_publish = []

    execution_side = '1' if order.is_buy else '2'

    rpt_order  = ExecutionReport( order, execution_side )
    execution_reports.append( ( order.user_id, rpt_order.toJson() )  )
    if order.user_id != order.account_id:
      execution_reports.append( ( order.account_id, rpt_order.toJson() )  )

    is_last_match_a_partial_execution_on_counter_order = False
    execution_counter = 0
    number_of_filled_counter_market_orders = 0

    if not order_matcher_disabled:
      for execution_counter in xrange(0, len(other_side) + 1):
        if execution_counter == len(other_side):
          break # workaround to make the execution_counter be counted until the last order.

        if not order.leaves_qty > 0:
          break

        counter_order = other_side[execution_counter]

        if not order.has_match(counter_order):
          break

        # check for self execution
        if order.account_id == counter_order.account_id:
          # self execution.... let's cancel the counter order
          counter_order.cancel_qty( counter_order.leaves_qty )

          # generate a cancel report
          cancel_rpt_counter_order  = ExecutionReport( counter_order, execution_side )
          execution_reports.append( ( counter_order.user_id, cancel_rpt_counter_order.toJson() )  )
          if counter_order.user_id != counter_order.account_id:
            execution_reports.append( ( counter_order.account_id, cancel_rpt_counter_order.toJson() )  )

          # go to the next order
          is_last_match_a_partial_execution_on_counter_order = False
          continue

        # Get the desired executed price and qty, by matching against the counter_order
        executed_qty = order.match( counter_order, order.leaves_qty)

        if counter_order.type == '1': # Market Order
          executed_price = order.price
          number_of_filled_counter_market_orders += 1
        else:
          executed_price = counter_order.price

        # let's get the available qty to execute on the order side
        available_qty_on_order_side = order.get_available_qty_to_execute(session,
                                                                         '1' if order.is_buy else '2',
                                                                         executed_qty,
                                                                         executed_price )

        qty_to_cancel_from_order = 0
        if available_qty_on_order_side <  executed_qty:
          # ops ... looks like the order.user didn't have enough to execute the order
          executed_qty = available_qty_on_order_side

          # cancel the remaining  qty
          qty_to_cancel_from_order = order.leaves_qty - executed_qty


        # check if the order got fully cancelled
        if not executed_qty:
          order.cancel_qty( qty_to_cancel_from_order )
          cancel_rpt_order  = ExecutionReport( order, execution_side )
          execution_reports.append( ( order.user_id, cancel_rpt_order.toJson() )  )
          if order.user_id != order.account_id:
            execution_reports.append( ( order.account_id, cancel_rpt_order.toJson() )  )
          break


        # let's get the available qty to execute on the counter side
        available_qty_on_counter_side = counter_order.get_available_qty_to_execute(session,
                                                                                   '1' if counter_order.is_buy else '2',
                                                                                   executed_qty,
                                                                                   executed_price )

        qty_to_cancel_from_counter_order = 0
        if available_qty_on_counter_side <  executed_qty:
          if qty_to_cancel_from_order:
            qty_to_cancel_from_order -= executed_qty - available_qty_on_order_side

            # ops ... looks like the counter_order.user didn't have enough to execute the order
          executed_qty = available_qty_on_counter_side

          # cancel the remaining  qty
          qty_to_cancel_from_counter_order = counter_order.leaves_qty - executed_qty


        # check if the counter order was fully cancelled due the lack
        if not executed_qty:
          # just cancel the counter order, and go to the next order.
          counter_order.cancel_qty( qty_to_cancel_from_counter_order )

          # generate a cancel report
          cancel_rpt_counter_order  = ExecutionReport( counter_order, execution_side )
          execution_reports.append( ( counter_order.user_id, cancel_rpt_counter_order.toJson() )  )
          if counter_order.user_id != counter_order.account_id:
            execution_reports.append( ( counter_order.account_id, cancel_rpt_counter_order.toJson() )  )

          # go to the next order
          is_last_match_a_partial_execution_on_counter_order = False
          continue

        # lets perform the execution
        if executed_qty:
          order.execute( executed_qty, executed_price )
          counter_order.execute(executed_qty, executed_price )

          trade = Trade.create(session, order, counter_order, self.symbol, executed_qty, executed_price, None, broker_fee )
          trades_to_publish.append(trade)

          rpt_order         = ExecutionReport( order, execution_side )
          execution_reports.append( ( order.user_id, rpt_order.toJson() )  )
          if order.user_id != order.account_id:
            execution_reports.append( ( order.account_id, rpt_order.toJson() )  )

          rpt_counter_order = ExecutionReport( counter_order, execution_side )
          execution_reports.append( ( counter_order.user_id, rpt_counter_order.toJson() )  )
          if counter_order.user_id != counter_order.account_id:
            execution_reports.append( ( counter_order.account_id, rpt_counter_order.toJson() )  )

          def generate_email_subject_and_body( session, order, trade ):
            from json import  dumps
            from pyblinktrade.json_encoder import  JsonEncoder
            from models import Currency

            qty_currency = order.symbol[:3]
            formatted_qty = Currency.format_number( session, qty_currency, trade.size / 1.e8 )


            price_currency = order.symbol[3:]
            formatted_price = Currency.format_number( session, price_currency, trade.price / 1.e8 )

            formatted_total_price = Currency.format_number( session, price_currency, trade.size/1.e8 * trade.price/1.e8 )

            email_subject =  'E'
            email_template = "order-execution"
            email_params = {
              'username': order.user.username,
              'order_id': order.id,
              'trade_id': trade.id,
              'side': order.side,
              'executed_when': trade.created,
              'qty': formatted_qty,
              'price': formatted_price,
              'total': formatted_total_price
            }
            return  email_subject, email_template, dumps(email_params, cls=JsonEncoder)

          email_data = generate_email_subject_and_body(session, counter_order, trade)
          UserEmail.create( session = session,
                            user_id = counter_order.account_id,
                            broker_id = counter_order.broker_id,
                            subject = email_data[0],
                            template= email_data[1],
                            language= counter_order.email_lang,
                            params  = email_data[2])


        #
        # let's do the partial cancels
        #

        # Cancel the qty from the current order
        if qty_to_cancel_from_order:
          order.cancel_qty(qty_to_cancel_from_order)

          # generate a cancel report
          cancel_rpt_order  = ExecutionReport( order, execution_side )
          execution_reports.append( ( order.user_id, cancel_rpt_order.toJson() )  )

          if order.user_id != order.account_id:
            execution_reports.append( ( order.account_id, cancel_rpt_order.toJson() )  )


        if qty_to_cancel_from_counter_order:
          counter_order.cancel_qty(qty_to_cancel_from_counter_order)

          # generate a cancel report
          cancel_rpt_counter_order  = ExecutionReport( counter_order, execution_side )
          execution_reports.append( ( counter_order.user_id, cancel_rpt_counter_order.toJson() )  )
          if counter_order.user_id != counter_order.account_id:
            execution_reports.append( ( counter_order.account_id, cancel_rpt_counter_order.toJson() )  )

        if counter_order.leaves_qty > 0:
          is_last_match_a_partial_execution_on_counter_order = True


    md_entry_type = '0' if order.is_buy else '1'
    counter_md_entry_type = '1' if order.is_buy else '0'

    # let's include the order in the book if the order is not fully executed.
    if order.leaves_qty > 0:
      insert_pos = bisect.bisect_right(self_side, order)
      self_side.insert( insert_pos, order )

      if order.type == '2': # Limited orders go to the book.
        MarketDataPublisher.publish_new_order( self.symbol, md_entry_type , insert_pos, order)

    # don't send the first execution report (NEW) if the order was fully cancelled
    if order.is_cancelled and order.cuem_qty == 0:
      execution_reports.pop(0)

    # Publish all execution reports
    for user_id, execution_report in execution_reports:
      TradeApplication.instance().publish( user_id, execution_report )

    # Publish Market Data for the counter order
    if execution_counter:
      if is_last_match_a_partial_execution_on_counter_order:
        del other_side[0: execution_counter-1]
        MarketDataPublisher.publish_executions( self.symbol,
                                                 counter_md_entry_type,
                                                 execution_counter - 1 - number_of_filled_counter_market_orders,
                                                 other_side[0] )
      else:
        del other_side[0: execution_counter]
        MarketDataPublisher.publish_executions( self.symbol,
                                                 counter_md_entry_type,
                                                 execution_counter - number_of_filled_counter_market_orders )

    if trades_to_publish:
      MarketDataPublisher.publish_trades(self.symbol, trades_to_publish)
    return ""

Example 79

Project: django-admin-timeline Source File: views.py
@csrf_exempt
@never_cache
@staff_member_required
def log(request, template_name=TEMPLATE_NAME, \
        template_name_ajax=TEMPLATE_NAME_AJAX):
    """
    Get number of log entires. Serves both non-AJAX and AJAX driven requests.

    Since we have a breakdown of entries per day per entry and we have an AJAX
    driven infinite scroll and we want to avoid having duplicated date headers,
    we always pass a variable named "last_date" when making another request
    to our main AJAX-driven view. So... this is our case scenario:

    Initial timeline rendered as a normal HTML (non AJAX request) (from a list
    of log entries). We send date of last element as "last_date" to the context
    too, which will be used an an initial value for a global JavaScript
    variable. Later on that date will be used to send it to the AJAX driven
    view and used in rendering ("render_to_string" method). After we have
    rendered the HTML to send back, we get the last date of the last element
    and send it along with the HTML rendered to our view in JSON response.
    When receiving the JSON response, we update the above mentioned global
    JavaScript variable with the value given.

    :param request: django.http.HttpRequest
    :param template_name: str
    :param template_name_ajax: str
    :return: django.http.HttpResponse

    This view accepts the following POST variables (all optional).
    :param page: int - Page number to get.
    :param user_id: int - If set, used to filter the user by.
    :param last_date: str - Example value "2012-05-24".
    :param start_date: str - If set, used as a start date to filter the actions
        with. Example value "2012-05-24".
    :param end_date: str - If set, used as an end date to filter the actions
        with. Example value "2012-05-24".

    NOTE: If it gets too complicatd with filtering, we need to have forms to
    validate and process the POST data.
    """
    def _get_date_from_string(s):
        """
        Gets date from a string given.

        :param s: str - date in string format
        :return: datetime.datetime
        """
        try:
            return datetime.date(*map(lambda x: int(x), s.split("-")))
        except Exception as e:
            return ""

    try:
        page = int(request.POST.get('page', 1))
        if page < 1:
            page = 1
    except Exception as e:
        page = 1

    users = []
    content_types = []
    filter_form = None

    if 'POST' == request.method:
        post = dict(request.POST)
        if 'users[]' in post:
            post['users'] = post.pop('users[]')
        if 'content_types[]' in post:
            post['content_types'] = post.pop('content_types[]')

        filter_form = FilterForm(post)
        if filter_form.is_valid():
            users = filter_form.cleaned_data['users']
            content_types = filter_form.cleaned_data['content_types']
        else:
            pass # Anything to do here?
    else:
        filter_form = FilterForm()

    # Some kind of a pagination
    start = (page - 1) * NUMBER_OF_ENTRIES_PER_PAGE
    end = page * NUMBER_OF_ENTRIES_PER_PAGE

    # Getting admin log entires taking page number into consideration.
    log_entries = LogEntry.objects.all().select_related('content_type', 'user')

    start_date = _get_date_from_string(request.POST.get('start_date'))
    end_date = _get_date_from_string(request.POST.get('end_date'))

    if start_date:
        log_entries = log_entries.filter(action_time__gte=start_date) # TODO

    if end_date:
        log_entries = log_entries.filter(action_time__lte=end_date) # TODO

    # If users given, filtering by users
    if users:
        log_entries = log_entries.filter(user__id__in=users)

    # If content types given, filtering by content types
    if content_types:
        log_entries = log_entries.filter(content_type__id__in=content_types)

    # Applying limits / freezing the queryset
    log_entries = log_entries[start:end]

    if log_entries:
        last_date = date_format(
            log_entries[len(log_entries) - 1].action_time, "Y-m-d"
            )
    else:
        last_date = request.POST.get('last_date', None)

    # Using different template for AJAX driven requests
    if request.is_ajax():
        # Context to render the AJAX driven HTML with
        context = {
            'admin_log': log_entries,
            'number_of_entries_per_page': NUMBER_OF_ENTRIES_PER_PAGE,
            'page': page,
            'last_date': request.POST.get('last_date', None),
            'SINGLE_LOG_ENTRY_DATE_FORMAT': SINGLE_LOG_ENTRY_DATE_FORMAT,
            'LOG_ENTRIES_DAY_HEADINGS_DATE_FORMAT': \
                LOG_ENTRIES_DAY_HEADINGS_DATE_FORMAT
        }

        # Rendering HTML for an AJAX driven request
        html = render_to_string(
            template_name_ajax,
            context,
            context_instance=RequestContext(request)
        )

        # Context to send back to user in a JSON response
        context = {
            'html': html,
            'last_date': last_date,
            'success': 1 if len(log_entries) else 0
        }
        return HttpResponse(json.dumps(context))

    # Context for a non-AJAX request
    context = {
        'admin_log': log_entries,
        'number_of_entries_per_page': NUMBER_OF_ENTRIES_PER_PAGE,
        'page': page,
        'last_date': last_date,
        'start_date': date_format(start_date, "Y-m-d") if start_date else "",
        'end_date': date_format(end_date, "Y-m-d") if end_date else "",
        'users': [int(u) for u in users],
        'content_types': [int(ct) for ct in content_types],
        'filter_form': filter_form,
        'SINGLE_LOG_ENTRY_DATE_FORMAT': SINGLE_LOG_ENTRY_DATE_FORMAT,
        'LOG_ENTRIES_DAY_HEADINGS_DATE_FORMAT': \
            LOG_ENTRIES_DAY_HEADINGS_DATE_FORMAT,
        'title': _("Timeline") # For template breadcrumbs, etc.
    }

    return render_to_response(
        template_name, context, context_instance=RequestContext(request)
        )

Example 80

Project: nzb-notify Source File: NotifyTelegram.py
    def _notify(self, title, body, notify_type, **kwargs):
        """
        Perform Telegram Notification
        """

        headers = {
            'User-Agent': self.app_id,
            'Content-Type': 'application/json',
        }

        # error tracking (used for function return)
        has_error = False

        image_url = None
        if self.include_image:
            image_content = self.image_raw(
                notify_type,
            )
            if image_content is not None:
                # We have an image to work with; set up our boolean
                has_image = True

                # prepare our eimage URL
                image_url = '%s%s/%s' % (
                    TELEGRAM_BOT_URL,
                    self.bot_token,
                    'sendPhoto'
                )

                # Set up our upload
                files = {'photo': ('%s.png' % notify_type, image_content)}

        url = '%s%s/%s' % (
            TELEGRAM_BOT_URL,
            self.bot_token,
            'sendMessage'
        )

        payload = {}

        if self.notify_format == NotifyFormat.HTML:
            payload['parse_mode'] = 'HTML'
            payload['text'] = '<b>%s</b>\r\n%s' % (title, body)

        else: # Text
            #payload['parse_mode'] = 'Markdown'
            payload['parse_mode'] = 'HTML'
            payload['text'] = '<b>%s</b>\r\n%s' % (
                self.escape_html(title),
                self.escape_html(body),
            )

        # Create a copy of the chat_ids list
        chat_ids = list(self.chat_ids)
        while len(chat_ids):
            chat_id = chat_ids.pop(0)
            chat_id = IS_CHAT_ID_RE.match(chat_id)
            if not chat_id:
                self.logger.warning(
                    "The specified chat_id '%s' is invalid; skipping." % (
                        chat_id,
                    )
                )
                continue

            if chat_id.group('name') is not None:
                # Name
                payload['chat_id'] = '@%s' % chat_id.group('name')

            else:
                # ID
                payload['chat_id'] = chat_id.group('idno')

            if image_url is not None:
                image_payload = {
                    'chat_id': payload['chat_id'],
                    'disable_notification': True,
                }
                self.logger.debug('Telegram (image) POST URL: %s' % image_url)
                self.logger.debug('Telegram (image) Payload: %s' % str(image_payload))

                try:
                    r = requests.post(
                        image_url,
                        data=image_payload,
                        headers={
                            'User-Agent': self.app_id,
                        },
                        files=files,
                    )
                    if r.status_code != requests.codes.ok:
                        # We had a problem

                        try:
                            # Try to get the error message if we can:
                            error_msg = loads(r.text)['description']
                        except:
                            error_msg = None

                        try:
                            if error_msg:
                                self.logger.warning(
                                    'Failed to send Telegram Image:%s ' % \
                                    payload['chat_id'] +\
                                    'notification: (%s) %s.' % (
                                        r.status_code, error_msg,
                                ))

                            else:
                                self.logger.warning(
                                    'Failed to send Telegram Image:%s ' % \
                                    payload['chat_id'] +\
                                    'notification: %s (error=%s).' % (
                                        HTTP_ERROR_MAP[r.status_code],
                                        r.status_code,
                                ))

                        except IndexError:
                            self.logger.warning(
                                'Failed to send Telegram Image:%s ' % \
                                payload['chat_id'] +\
                                'notification (error=%s).' % (
                                    r.status_code,
                            ))

                        has_error = True
                        continue

                except requests.ConnectionError as e:
                    self.logger.warning(
                        'A Connection error occured sending Telegram:%s ' % (
                            payload['chat_id']) + 'notification.'
                    )
                    self.logger.debug('Socket Exception: %s' % str(e))
                    has_error = True
                    continue

            self.logger.debug('Telegram POST URL: %s' % url)
            self.logger.debug('Telegram Payload: %s' % str(payload))

            try:
                r = requests.post(
                    url,
                    data=dumps(payload),
                    headers=headers,
                )
                if r.status_code != requests.codes.ok:
                    # We had a problem

                    try:
                        # Try to get the error message if we can:
                        error_msg = loads(r.text)['description']
                    except:
                        error_msg = None

                    try:
                        if error_msg:
                            self.logger.warning(
                                'Failed to send Telegram:%s ' % payload['chat_id'] +\
                                'notification: (%s) %s.' % (
                                    r.status_code, error_msg,
                            ))

                        else:
                            self.logger.warning(
                                'Failed to send Telegram:%s ' % payload['chat_id'] +\
                                'notification: %s (error=%s).' % (
                                    HTTP_ERROR_MAP[r.status_code],
                                    r.status_code,
                            ))

                    except IndexError:
                        self.logger.warning(
                            'Failed to send Telegram:%s ' % payload['chat_id'] +\
                            'notification (error=%s).' % (
                                r.status_code,
                        ))

                    #self.logger.debug('Response Details: %s' % r.raw.read())
                    # Return; we're done
                    has_error = True

            except requests.ConnectionError as e:
                self.logger.warning(
                    'A Connection error occured sending Telegram:%s ' % (
                        payload['chat_id']) + 'notification.'
                )
                self.logger.debug('Socket Exception: %s' % str(e))
                has_error = True

            if len(chat_ids):
                # Prevent thrashing requests
                self.throttle()

        return has_error

Example 81

Project: aws-lambda-ddns-function Source File: union.py
def lambda_handler(event, context):
    """ Check to see whether a DynamoDB table already exists.  If not, create it.  This table is used to keep a record of
    instances that have been created along with their attributes.  This is necessary because when you terminate an instance
    its attributes are no longer available, so they have to be fetched from the table."""
    tables = dynamodb_client.list_tables()
    if 'DDNS' in tables['TableNames']:
        print 'DynamoDB table already exists'
    else:
        create_table('DDNS')

    # Set variables
    # Get the state from the Event stream
    state = event['detail']['state']

    # Get the instance id, region, and tag collection
    instance_id = event['detail']['instance-id']
    region = event['region']
    table = dynamodb_resource.Table('DDNS')

    if state == 'running':
        time.sleep(60)
        instance = compute.describe_instances(InstanceIds=[instance_id])
        # Remove response metadata from the response
        instance.pop('ResponseMetadata')
        # Remove null values from the response.  You cannot save a dict/JSON docuement in DynamoDB if it contains null
        # values
        instance = remove_empty_from_dict(instance)
        instance_dump = json.dumps(instance,default=json_serial)
        instance_attributes = json.loads(instance_dump)
        table.put_item(
            Item={
                'InstanceId': instance_id,
                'InstanceAttributes': instance_attributes
            }
        )
    else:
        # Fetch item from DynamoDB
        instance = table.get_item(
        Key={
            'InstanceId': instance_id
        },
        AttributesToGet=[
            'InstanceAttributes'
            ]
        )
        instance = instance['Item']['InstanceAttributes']

    try:
        tags = instance['Reservations'][0]['Instances'][0]['Tags']
    except:
        tags = []
    # Get instance attributes
    private_ip = instance['Reservations'][0]['Instances'][0]['PrivateIpAddress']
    private_dns_name = instance['Reservations'][0]['Instances'][0]['PrivateDnsName']
    private_host_name = private_dns_name.split('.')[0]
    try:
        public_ip = instance['Reservations'][0]['Instances'][0]['PublicIpAddress']
        public_dns_name = instance['Reservations'][0]['Instances'][0]['PublicDnsName']
        public_host_name = public_dns_name.split('.')[0]
    except BaseException as e:
        print 'Instance has no public IP or host name', e

    # Get the subnet mask of the instance
    subnet_id = instance['Reservations'][0]['Instances'][0]['SubnetId']
    subnet = ec2.Subnet(subnet_id)
    cidr_block = subnet.cidr_block
    subnet_mask = int(cidr_block.split('/')[-1])

    reversed_ip_address = reverse_list(private_ip)
    reversed_domain_prefix = get_reversed_domain_prefix(subnet_mask, private_ip)
    reversed_domain_prefix = reverse_list(reversed_domain_prefix)

    # Set the reverse lookup zone
    reversed_lookup_zone = reversed_domain_prefix + 'in-addr.arpa.'
    print 'The reverse lookup zone for this instance is:', reversed_lookup_zone

    # Get VPC id
    vpc_id = instance['Reservations'][0]['Instances'][0]['VpcId']
    vpc = ec2.Vpc(vpc_id)

    # Are DNS Hostnames and DNS Support enabled?
    if is_dns_hostnames_enabled(vpc):
        print 'DNS hostnames enabled for %s' % vpc_id
    else:
        print 'DNS hostnames disabled for %s.  You have to enable DNS hostnames to use Route 53 private hosted zones.' % vpc_id
    if is_dns_support_enabled(vpc):
        print 'DNS support enabled for %s' % vpc_id
    else:
        print 'DNS support disabled for %s.  You have to enabled DNS support to use Route 53 private hosted zones.' % vpc_id

    # Create the public and private hosted zone collections.  These are collections of zones in Route 53.
    hosted_zones = route53.list_hosted_zones()
    private_hosted_zones = filter(lambda x: x['Config']['PrivateZone'] is True, hosted_zones['HostedZones'])
    private_hosted_zone_collection = map(lambda x: x['Name'], private_hosted_zones)
    public_hosted_zones = filter(lambda x: x['Config']['PrivateZone'] is False, hosted_zones['HostedZones'])
    public_hosted_zones_collection = map(lambda x: x['Name'], public_hosted_zones)
    # Check to see whether a reverse lookup zone for the instance already exists.  If it does, check to see whether
    # the reverse lookup zone is associated with the instance's VPC.  If it isn't create the association.  You don't
    # need to do this when you create the reverse lookup zone because the association is done automatically.
    if filter(lambda record: record['Name'] == reversed_lookup_zone, hosted_zones['HostedZones']):
        print 'Reverse lookup zone found:', reversed_lookup_zone
        reverse_lookup_zone_id = get_zone_id(reversed_lookup_zone)
        reverse_hosted_zone_properties = get_hosted_zone_properties(reverse_lookup_zone_id)
        if vpc_id in map(lambda x: x['VPCId'], reverse_hosted_zone_properties['VPCs']):
            print 'Reverse lookup zone %s is associated with VPC %s' % (reverse_lookup_zone_id, vpc_id)
        else:
            print 'Associating zone %s with VPC %s' % (reverse_lookup_zone_id, vpc_id)
            try:
                associate_zone(reverse_lookup_zone_id, region, vpc_id)
            except BaseException as e:
                print e
    else:
        print 'No matching reverse lookup zone'
        # create private hosted zone for reverse lookups
        if state == 'running':
            create_reverse_lookup_zone(instance, reversed_domain_prefix, region)
            reverse_lookup_zone_id = get_zone_id(reversed_lookup_zone)
    # Wait a random amount of time.  This is a poor-mans back-off if a lot of instances are launched all at once.
    time.sleep(random.random())

    # Loop through the instance's tags, looking for the zone and cname tags.  If either of these tags exist, check
    # to make sure that the name is valid.  If it is and if there's a matching zone in DNS, create A and PTR records.
    for tag in tags:
        if 'ZONE' in tag.get('Key',{}).lstrip().upper():
            if is_valid_hostname(tag.get('Value')):
                if tag.get('Value').lstrip().lower() in private_hosted_zone_collection:
                    print 'Private zone found:', tag.get('Value')
                    private_hosted_zone_name = tag.get('Value').lstrip().lower()
                    private_hosted_zone_id = get_zone_id(private_hosted_zone_name)
                    private_hosted_zone_properties = get_hosted_zone_properties(private_hosted_zone_id)
                    if state == 'running':
                        if vpc_id in map(lambda x: x['VPCId'], private_hosted_zone_properties['VPCs']):
                            print 'Private hosted zone %s is associated with VPC %s' % (private_hosted_zone_id, vpc_id)
                        else:
                            print 'Associating zone %s with VPC %s' % (private_hosted_zone_id, vpc_id)
                            try:
                                associate_zone(private_hosted_zone_id, region, vpc_id)
                            except BaseException as e:
                                print 'You cannot create an association with a VPC with an overlapping subdomain.\n', e
                                exit()
                        try:
                            create_resource_record(private_hosted_zone_id, private_host_name, private_hosted_zone_name, 'A', private_ip)
                            create_resource_record(reverse_lookup_zone_id, reversed_ip_address, 'in-addr.arpa', 'PTR', private_dns_name)
                        except BaseException as e:
                            print e
                    else:
                        try:
                            delete_resource_record(private_hosted_zone_id, private_host_name, private_hosted_zone_name, 'A', private_ip)
                            delete_resource_record(reverse_lookup_zone_id, reversed_ip_address, 'in-addr.arpa', 'PTR', private_dns_name)
                        except BaseException as e:
                            print e
                    # create PTR record
                elif tag.get('Value').lstrip().lower() in public_hosted_zones_collection:
                    print 'Public zone found', tag.get('Value')
                    public_hosted_zone_name = tag.get('Value').lstrip().lower()
                    public_hosted_zone_id = get_zone_id(public_hosted_zone_name)
                    # create A record in public zone
                    if state =='running':
                        try:
                            create_resource_record(public_hosted_zone_id, public_host_name, public_hosted_zone_name, 'A', public_ip)
                        except BaseException as e:
                            print e
                    else:
                        try:
                            delete_resource_record(public_hosted_zone_id, public_host_name, public_hosted_zone_name, 'A', public_ip)
                        except BaseException as e:
                            print e
                else:
                    print 'No matching zone found for %s' % tag.get('Value')
            else:
                print '%s is not a valid host name' % tag.get('Value')
        # Consider making this an elif CNAME
        else:
            print 'The tag \'%s\' is not a zone tag' % tag.get('Key')
        if 'CNAME' in tag.get('Key',{}).lstrip().upper():
            if is_valid_hostname(tag.get('Value')):
                cname = tag.get('Value').lstrip().lower()
                cname_host_name = cname.split('.')[0]
                cname_domain_suffix = cname[cname.find('.')+1:]
                cname_domain_suffix_id = get_zone_id(cname_domain_suffix)
                for cname_private_hosted_zone in private_hosted_zone_collection:
                    cname_private_hosted_zone_id = get_zone_id(cname_private_hosted_zone)
                    if cname_domain_suffix_id == cname_private_hosted_zone_id:
                        if cname.endswith(cname_private_hosted_zone):
                            #create CNAME record in private zone
                            if state == 'running':
                                try:
                                    create_resource_record(cname_private_hosted_zone_id, cname_host_name, cname_private_hosted_zone, 'CNAME', private_dns_name)
                                except BaseException as e:
                                    print e
                            else:
                                try:
                                    delete_resource_record(cname_private_hosted_zone_id, cname_host_name, cname_private_hosted_zone, 'CNAME', private_dns_name)
                                except BaseException as e:
                                    print e
                for cname_public_hosted_zone in public_hosted_zones_collection:
                    if cname.endswith(cname_public_hosted_zone):
                        cname_public_hosted_zone_id = get_zone_id(cname_public_hosted_zone)
                        #create CNAME record in public zone
                        if state == 'running':
                            try:
                                create_resource_record(cname_public_hosted_zone_id, cname_host_name, cname_public_hosted_zone, 'CNAME', public_dns_name)
                            except BaseException as e:
                                print e
                        else:
                            try:
                                delete_resource_record(cname_public_hosted_zone_id, cname_host_name, cname_public_hosted_zone, 'CNAME', public_dns_name)
                            except BaseException as e:
                                print e
    # Is there a DHCP option set?
    # Get DHCP option set configuration
    try:
        dhcp_options_id = vpc.dhcp_options_id
        dhcp_configurations = get_dhcp_configurations(dhcp_options_id)
    except BaseException as e:
        print 'No DHCP option set assigned to this VPC\n', e
        exit()
    # Look to see whether there's a DHCP option set assigned to the VPC.  If there is, use the value of the domain name
    # to create resource records in the appropriate Route 53 private hosted zone. This will also check to see whether
    # there's an association between the instance's VPC and the private hosted zone.  If there isn't, it will create it.
    for configuration in dhcp_configurations:
        if configuration[0] in private_hosted_zone_collection:
            private_hosted_zone_name = configuration[0]
            print 'Private zone found %s' % private_hosted_zone_name
            # TODO need a way to prevent overlapping subdomains
            private_hosted_zone_id = get_zone_id(private_hosted_zone_name)
            private_hosted_zone_properties = get_hosted_zone_properties(private_hosted_zone_id)
            # create A records and PTR records
            if state == 'running':
                if vpc_id in map(lambda x: x['VPCId'], private_hosted_zone_properties['VPCs']):
                    print 'Private hosted zone %s is associated with VPC %s' % (private_hosted_zone_id, vpc_id)
                else:
                    print 'Associating zone %s with VPC %s' % (private_hosted_zone_id, vpc_id)
                    try:
                        associate_zone(private_hosted_zone_id, region,vpc_id)
                    except BaseException as e:
                        print 'You cannot create an association with a VPC with an overlapping subdomain.\n', e
                        exit()
                try:
                    create_resource_record(private_hosted_zone_id, private_host_name, private_hosted_zone_name, 'A', private_ip)
                    create_resource_record(reverse_lookup_zone_id, reversed_ip_address, 'in-addr.arpa', 'PTR', private_dns_name)
                except BaseException as e:
                    print e
            else:
                try:
                    delete_resource_record(private_hosted_zone_id, private_host_name, private_hosted_zone_name, 'A', private_ip)
                    delete_resource_record(reverse_lookup_zone_id, reversed_ip_address, 'in-addr.arpa', 'PTR', private_dns_name)
                except BaseException as e:
                    print e
        else:
            print 'No matching zone for %s' % configuration[0]

Example 82

Project: django-dynamic-choices Source File: admin.py
def dynamic_admin_factory(admin_cls):

    change_form_template = 'admin/dynamic_choices/change_form.html'

    class meta_cls(type(admin_cls)):

        "Metaclass that ensure form and inlines are dynamic"
        def __new__(cls, name, bases, attrs):
            # If there's already a form defined we make sure to subclass it
            if 'form' in attrs:
                attrs['form'] = dynamic_model_form_factory(attrs['form'])
            else:
                attrs['form'] = DynamicModelForm

            # Make sure the specified add|change_form_template
            # extends "admin/dynamic_choices/change_form.html"
            for t, default in [('add_form_template', None),
                               ('change_form_template', change_form_template)]:
                if t in attrs:
                    if not template_extends(attrs[t], change_form_template):
                        raise ImproperlyConfigured(
                            "Make sure %s.%s template extends '%s' in order to enable DynamicAdmin" % (
                                name, t, change_form_template
                            )
                        )
                else:
                    attrs[t] = default

            # If there's some inlines defined we make sure that their form is dynamic
            # see dynamic_inline_factory
            if 'inlines' in attrs:
                attrs['inlines'] = [dynamic_inline_factory(inline_cls) for inline_cls in attrs['inlines']]

            return super(meta_cls, cls).__new__(cls, name, bases, attrs)

    class cls(with_metaclass(meta_cls, admin_cls)):
        def _media(self):
            media = super(cls, self).media
            media.add_js(('js/dynamic-choices.js',
                          'js/dynamic-choices-admin.js'))
            return media
        media = property(_media)

        def get_urls(self):
            def wrap(view):
                def wrapper(*args, **kwargs):
                    return self.admin_site.admin_view(view)(*args, **kwargs)
                return update_wrapper(wrapper, view)

            info = self.model._meta.app_label, self.model._meta.model_name

            urlpatterns = [
                url(r'(?:add|(?P<object_id>\w+))/choices/$',
                    wrap(self.dynamic_choices),
                    name="%s_%s_dynamic_admin" % info),
            ] + super(cls, self).get_urls()

            return urlpatterns

        def get_dynamic_choices_binder(self, request):

            def id(field):
                return "[name='%s']" % field

            def inline_field_selector(fieldset, field):
                return "[name^='%s-'][name$='-%s']" % (fieldset, field)

            fields = {}

            def add_fields(to_fields, to_field, bind_fields):
                if not (to_field in to_fields):
                    to_fields[to_field] = set()
                to_fields[to_field].update(bind_fields)

            model_name = self.model._meta.model_name

            # Use get_form in order to allow formfield override
            # We should create a fake request from referer but all this
            # hack will be fixed when the code is embed directly in the page
            form = self.get_form(request)()
            rels = form.get_dynamic_relationships()
            for rel in rels:
                field_name = rel.split(LOOKUP_SEP)[0]
                if rel in form.fields:
                    add_fields(fields, id(field_name), [id(field) for field in rels[rel] if field in form.fields])

            inlines = {}
            for formset, _inline in self.get_formsets_with_inlines(request):
                inline = {}
                formset_form = formset.form()
                inline_rels = formset_form.get_dynamic_relationships()
                prefix = formset.get_default_prefix()
                for rel in inline_rels:
                    if LOOKUP_SEP in rel:
                        base, field = rel.split(LOOKUP_SEP)[0:2]
                        if base == model_name and field in form.fields:
                            bound_fields = [
                                inline_field_selector(prefix, f)
                                for f in inline_rels[rel] if f in formset_form.fields
                            ]
                            add_fields(fields, id(field), bound_fields)
                        elif base in formset_form.fields:
                            add_fields(inline, base, inline_rels[rel])
                    elif rel in formset_form.fields:
                        add_fields(inline, rel, inline_rels[rel])
                if len(inline):
                    inlines[prefix] = inline

            # Replace sets in order to allow JSON serialization
            for field, bound_fields in fields.items():
                fields[field] = list(bound_fields)

            for fieldset, inline_fields in inlines.items():
                for field, bound_fields in inline_fields.items():
                    inlines[fieldset][field] = list(bound_fields)

            return SafeText("django.dynamicAdmin(%s, %s);" % (json.dumps(fields), json.dumps(inlines)))

        def dynamic_choices(self, request, object_id=None):

            opts = self.model._meta
            obj = self.get_object(request, object_id)
            # Make sure the specified object exists
            if object_id is not None and obj is None:
                raise Http404('%(name)s object with primary key %(key)r does not exist.' % {
                              'name': force_text(opts.verbose_name), 'key': escape(object_id)})

            form = self.get_form(request)(request.GET, instance=obj)
            data = get_dynamic_choices_from_form(form)

            for formset, _inline in self.get_formsets_with_inlines(request, obj):
                prefix = formset.get_default_prefix()
                try:
                    fs = formset(request.GET, instance=obj)
                    forms = fs.forms + [fs.empty_form]
                except ValidationError:
                    return HttpResponseBadRequest("Missing %s ManagementForm data" % prefix)
                for form in forms:
                    data.update(get_dynamic_choices_from_form(form))

            if 'DYNAMIC_CHOICES_FIELDS' in request.GET:
                fields = request.GET.get('DYNAMIC_CHOICES_FIELDS').split(',')
                for field in list(data):
                    if field not in fields:
                        del data[field]

            return HttpResponse(lazy_encoder.encode(data), content_type='application/json')

        if django.VERSION >= (1, 7):
            _get_formsets_with_inlines = admin_cls.get_formsets_with_inlines
        else:
            def _get_formsets_with_inlines(self, request, obj=None):
                formsets = super(cls, self).get_formsets(request, obj)
                inlines = self.get_inline_instances(request, obj)
                for formset, inline in zip(formsets, inlines):
                    yield formset, inline

            def get_formsets(self, request, obj=None):
                for formset, _inline in self.get_formsets_with_inlines(request, obj):
                    yield formset

        def get_formsets_with_inlines(self, request, obj=None):
            # Make sure to pass request data to fieldsets
            # so they can use it to define choices
            initial = {}
            model = self.model
            opts = model._meta
            data = getattr(request, request.method).items()
            # If an object is provided we collect data
            if obj is not None:
                initial.update(model_to_dict(obj))
            # Make sure to collect parent model data
            # and provide it to fieldsets in the form of
            # parent__field from request if its provided.
            # This data should be more "up-to-date".
            for k, v in data:
                if v:
                    try:
                        f = opts.get_field(k)
                    except models.FieldDoesNotExist:
                        continue
                    if isinstance(f, models.ManyToManyField):
                        initial[k] = v.split(",")
                    else:
                        initial[k] = v

            for formset, inline in self._get_formsets_with_inlines(request, obj):
                fk = _get_foreign_key(self.model, inline.model, fk_name=inline.fk_name).name
                fk_initial = dict(('%s__%s' % (fk, k), v) for k, v in initial.items())
                # If we must provide additional data
                # we must wrap the formset in a subclass
                # because passing 'initial' key argument is intercepted
                # and not provided to subclasses by BaseInlineFormSet.__init__
                if len(initial):
                    formset = dynamic_formset_factory(formset, fk_initial)
                yield formset, inline

        def add_view(self, request, form_url='', extra_context=None):
            context = {'dynamic_choices_binder': self.get_dynamic_choices_binder(request)}
            context.update(extra_context or {})
            return super(cls, self).add_view(request, form_url='', extra_context=context)

        def change_view(self, request, object_id, extra_context=None):
            context = {'dynamic_choices_binder': self.get_dynamic_choices_binder(request)}
            context.update(extra_context or {})
            return super(cls, self).change_view(request, object_id, extra_context=context)

    return cls

Example 83

Project: wikipedia-tags-in-osm Source File: launch_script.py
    def __init__(self):
        #Options
        text = """Starting from a list of Wikipedia categories written by the user in
'config.cfg' file, the script:
- downloads/updates a national OSM data file
- downloads from (from Quick Intersection) Wikipedia data regarding the selected
 categories (subcategories and articles names)
- creates webpages for showing which articles are already tagged and
 which ones are not.
"""
        parser = argparse.ArgumentParser(description=text)
        group = parser.add_mutually_exclusive_group()
        #Manage OSM data
        parser.add_argument("-d", "--download_osm",
                            help="Download OSM data of the country (from Geofabrik)",
                            action="store_true")
        parser.add_argument("-u", "--update_osm",
                            help="Update downloaded OSM data of the country (through osmupdate)",
                            action="store_true")
        #Analyze data from Wikipedia and OSM
        parser.add_argument("-a", "--analyze",
                            help="Analyze Wikipedia data (categories' sub-categories and articles) ed OSM data (existing Wikipedia tags)",
                            action="store_true")
        parser.add_argument("--category_info",
                            help="Analyze data and print informations regarding a specific category",
                            action="store")
        parser.add_argument("-t", "--show_missing_templates",
                            help="Mark on web pages the articles that miss geo template (Coord)",
                            action="store_true")
        parser.add_argument("-c", "--show_link_to_wikipedia_coordinates",
                            help="If a non-tagged article have the coordinates on Wikipedia, show on the web pages a link to zoom on its position with JOSM/iD",
                            action="store_true")
        parser.add_argument("-o", "--show_coordinates_from_osm",
                            help="Calculate OSM coordinates of articles (point for nodes, centroids for ways and relations)",
                            action="store_true")
        parser.add_argument("-n", "--infer_coordinates_from_wikipedia",
                            help="Use Nuts4Nuts to calculate the coordinates of a non tagged article whithout coordinates on Wikipedia",
                            action="store_true")
        group.add_argument("-p", "--print_categories_list",
                           help="Analyze data and print project's categories.",
                           action="store_true")
        #Create webpages
        group.add_argument("-w", "--create_webpages",
                           help="Analyze data and create web pages",
                           action="store_true")
        parser.add_argument("-s", "--save_stats",
                            help="If web pages have been created, store the updated number of tagged articles (default: ask to user).",
                            action="store_true")
        parser.add_argument("--browser",
                            help="Open the web pages with the system browser after creation.",
                            action="store_true")
        parser.add_argument("--copy",
                            help="Copy html folder to the directory configured on `config.cfg` (eg. dropbox dir).",
                            action="store_true")
        parser.add_argument("--locale",
                            nargs='+',
                            dest='locales',
                            metavar='LANG',
                            help="Generate pages in the specified locales. Default: use the system locale. ")

        self.args = parser.parse_args()
        if self.args.category_info or self.args.category_info\
           or self.args.create_webpages or self.args.print_categories_list\
           or self.args.show_missing_templates\
           or self.args.show_coordinates_from_osm:
            self.args.analyze = True

        # Default value for locale
        # get system locale
        sys_locale_langcode, sys_locale_encoding = locale.getdefaultlocale()

        if not self.args.locales:
            self.args.locales = [sys_locale_langcode]

        if len(sys.argv) == 1:
            parser.print_help()
            sys.exit(1)
        os.chdir(os.path.dirname(sys.argv[0]))

        #Configurations
        themesAndCatsNames = self.read_config()

### Manage OpenStreetMap data ##########################################
        #Analyse national OSM data file and create lists of already
        #tagged Wikipedia articles.

        #Download/update OSM data
        if self.args.download_osm or self.args.update_osm:
            if self.args.download_osm:
                OSM.download_osm_data(self)
            if self.args.update_osm:
                status = OSM.update_osm_data(self)
        if self.args.download_osm or (self.args.update_osm and status):
            OSM.filter_wikipedia_data_in_osm_file(self)
        if self.args.update_osm and not status:
            print "OSM data where already uptodate or osmupdate has been interrupted.\
To repeat the updating process, launch the script again with the `-u` option."

        if not self.args.analyze:
            #"There's nothing left for me to tell you"
            sys.exit(1)
        else:
            if not os.path.isfile(self.wOSMFile):
                OSM.filter_wikipedia_data_in_osm_file(self)
            #Extract Wikipedia articles tagged in OSM with preferred language.
            #If an article is tagged in a foreign language, ask to Wikpedia
            #what is the corresponding article of the preferred language, so
            #that we can flag it as tagged aswell.
            print "\n- Read from the OSM file the articles already tagged"
            parseOSMData = ParseOSMData(self)
            #list of Wikipedia tags in OSM
            self.tagsInOSM = parseOSMData.allTags
            self.tagsData = parseOSMData.tagsData
            #list of tagged Wikipedia articles
            self.taggedTitles = parseOSMData.titles
            #tags with errors
            self.wrongTags = parseOSMData.wrongTags
            #ugly tags (with url, language capitalized...), not errors
            self.badTags = parseOSMData.badTags
            #add articles manually flagged as tagged in data/workaround/tagged.csv
            #in case the parser misses them (strange tags)
            self.add_tagged_articles()

            if self.args.show_coordinates_from_osm:
                print "\n--- Add OSM coordinates to the articles"
                parseOSMData.get_centroids()

### Manage Wikipedia data ##############################################
        #Read from 'non-mappable' file the categories and articles that
        #aren't mappable e.g. "Paintings in the X museum",
        #self.nonMappable = {mainCategory.name : {"articles" : [], "subcategories" : []}}
        self.nonMappable = self.read_non_mappable_items()

        #Check if we have Wikipedia data from Quick Intersection of all the
        #categories in the project (config.cfg file)
        themesAndCatsNames = wikipedia_downloader.check_catscan_data(self, themesAndCatsNames)

        #Organize Wikipedia data.
        #self.themes = [Theme(), ...]
        #  Theme().categories = [Category(), ...]
        #    Category().subcategories = [Category(), ...]
        #    Category().articles = [Article(), ...]
        #categories without Quick Intersection data
        self.categoriesWithoutData = []
        allThemes = Themes(self, themesAndCatsNames)
        self.themes = allThemes.themesList

        #Organize data in regions, for a different visualization
        #self.regions = [Region()]
        #  Region().categories = [Category(), ... ]
        self.regions = []
        if self.regionsNames != []:
            self.regions = Regions(self).regionsList

        #Print names of all categories
        if self.args.print_categories_list:
            self.display_categories_names()
            if not self.args.category_info:
                #"There's nothing left for me to tell you"
                sys.exit(1)

### Merge OSM info into Wikipedia data #################################
        #Add to Wikipedia categories and articles istances info about
        #their status in OSM: (tagged/not tagged), osm ids and counters
        print ("\n- Check which articles are already tagged in the country's "
               "OSM file")
        for theme in self.themes:
            for category in theme.categories:
                category.check_articles_in_osm()
        self.titlesInOSM, self.titlesNotInOSM = allThemes.lists_of_titles_in_osm_or_not()

        #Ask to Wikipedia which articles have/have not Coord template.
        #Articles with article.hasTemplate == False will be marked on web pages.
        if self.args.show_missing_templates:
            print "\n- Check which articles miss geo template (Coord) in Wikipedia"
            self.templatesStatus = wikipedia_downloader.read_old_templates_status(self)
            wikipedia_downloader.update_templates_status(self)
            #Set hasTemplate = False to articles without Coord template
            for theme in self.themes:
                for category in theme.categories:
                    category.set_has_template_in_articles()

        #If an article is not already tagged in OSM but Wikipedia knows its
        #position, it is possible to add a link to zoom to that position
        #with JOSM.
        if self.args.show_link_to_wikipedia_coordinates:
            print "\n- Check the non tagged articles whose position is known by Wikipedia"
            wikipedia_downloader.add_wikipedia_coordinates(self)
            #Save GeoJSON file with titles and coordinates known by Wikipedia
            self.save_titles_with_coords_geojson()

        if self.args.infer_coordinates_from_wikipedia:
            print "\n- Use Nuts4Nuts to infer coordinates of non tagged articles, whose position is unknown by Wikipedia"
            nuts4nuts_infer.infer_coordinates_with_nuts4nuts(self)

        #For debugging
        # print info about a specific category
        if self.args.category_info:
            self.print_category_info(self.args.category_info.replace(" ", "_"))
            if self.args.create_webpages:
                raw_input("\nContinue?[Press any key]")
        # write categories trees to text files (uncomment lines)
        if self.print_categories_to_text_files == "true":
            for theme in self.themes:
                for category in theme.categories:
                    category.print_category_tree_to_file()

        #Read and update stats with the number of tagged articles
        self.dates, self.days = self.read_past_stats()
        download_other_countries = False
        self.todayDate, today = self.read_new_stats(download_other_countries)
        self.days.append(today)
        self.dates.append(self.todayDate)
        if len(self.dates) > 1 and self.todayDate == self.dates[-2]:
                #This is the second analysis of today.
                #Overwrite the previous statistics
                del self.dates[-2]
                del self.days[-2]
                print "\n This is the second time that data ara analyzed today. \
The number of tagged articles will replace that of the lust run in the tags' numbers table."

        #Count tags added by each user
        self.users = Users(self).users

        #Create a json file with the data (needed by non_mappable.html)
        tree = {"mappable": True,
                "name": "Main",
                "size": 1,
                "children": []}
        for theme in self.themes:
            for category in theme.categories:
                tree["children"].append(category.build_json_tree())
        ifile = open(os.path.join(self.HTMLDIR, "json", "main.json"), "w")
        data = json.dumps(tree)
        ifile.write(data)
        ifile.close()

        #Create webpages
        if self.args.create_webpages:
            # Restrict to the supported locales
            self.locales = frozenset(self.SUPPORTED_LOCALES).intersection(
                frozenset(self.args.locales))

            non_supported_locales = frozenset(self.args.locales) - \
                                        frozenset(self.SUPPORTED_LOCALES)

            for locale_langcode in non_supported_locales:
                print 'Warning: dropping unsupported locale: {0}'.format(
                       locale_langcode)

            # if no supported locale is chosen fallback to en_US
            if not self.locales:
                self.locales = frozenset(['en_US'])

            for locale_langcode in self.locales:
                self.translations = Translations.load("locale",
                                                      [locale_langcode]
                                                      )
                self._ = self.translations.ugettext
                print "\n- Create web pages with locale: ", locale_langcode
                Creator(self, locale_langcode)

                if self.args.browser:
                    url = os.path.join('html', locale_langcode, 'index.html')
                    # using .get() suppress stdout output from browser, won't
                    # suppress stderr
                    webbrowser.get().open_new(url)

            # Create the index.html in the main HTMLDIR to redirect to one
            # locales directory
            for lang in self.locales:
                if self.WIKIPEDIALANG in lang:
                    Redirect(self, lang)
                    break

            #Save stats
            if self.args.save_stats:
                self.save_stats_to_csv()
                print "\nNew stats have been saved."
            else:
                print "\nNo stats saved."

        #Copy files from html dir to outdir (for example a Dropbox directory)
        if self.args.copy:
            self.copy_html_files_to_outdir()

        print "\nDone."

Example 84

Project: nzbToMedia Source File: autoProcessTV.py
    def processEpisode(self, section, dirName, inputName=None, failed=False, clientAgent="manual", download_id=None, inputCategory=None, failureLink=None):

        cfg = dict(core.CFG[section][inputCategory])

        host = cfg["host"]
        port = cfg["port"]
        ssl = int(cfg.get("ssl", 0))
        web_root = cfg.get("web_root", "")
        protocol = "https://" if ssl else "http://"

        if not server_responding("{0}{1}:{2}{3}".format(protocol, host, port, web_root)):
            logger.error("Server did not respond. Exiting", section)
            return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]

        # auto-detect correct fork
        fork, fork_params = autoFork(section, inputCategory)

        username = cfg.get("username", "")
        password = cfg.get("password", "")
        apikey = cfg.get("apikey", "")
        delete_failed = int(cfg.get("delete_failed", 0))
        nzbExtractionBy = cfg.get("nzbExtractionBy", "Downloader")
        process_method = cfg.get("process_method")
        remote_path = int(cfg.get("remote_path", 0))
        wait_for = int(cfg.get("wait_for", 2))
        force = int(cfg.get("force", 0))
        delete_on = int(cfg.get("delete_on", 0))
        ignore_subs = int(cfg.get("ignore_subs", 0))
        extract = int(cfg.get("extract", 0))

        if not os.path.isdir(dirName) and os.path.isfile(dirName):  # If the input directory is a file, assume single file download and split dir/name.
            dirName = os.path.split(os.path.normpath(dirName))[0]

        SpecificPath = os.path.join(dirName, str(inputName))
        cleanName = os.path.splitext(SpecificPath)
        if cleanName[1] == ".nzb":
            SpecificPath = cleanName[0]
        if os.path.isdir(SpecificPath):
            dirName = SpecificPath

        # Attempt to create the directory if it doesn't exist and ignore any
        # error stating that it already exists. This fixes a bug where SickRage
        # won't process the directory because it doesn't exist.
        try:
            os.makedirs(dirName)  # Attempt to create the directory
        except OSError as e:
            # Re-raise the error if it wasn't about the directory not existing
            if e.errno != errno.EEXIST:
                raise

        if 'process_method' not in fork_params or (clientAgent in ['nzbget', 'sabnzbd'] and nzbExtractionBy != "Destination"):
            if inputName:
                process_all_exceptions(inputName, dirName)
                inputName, dirName = convert_to_ascii(inputName, dirName)

            # Now check if tv files exist in destination. 
            if not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
                if listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
                    logger.debug('Checking for archives to extract in directory: {0}'.format(dirName))
                    core.extractFiles(dirName)
                    inputName, dirName = convert_to_ascii(inputName, dirName)

            if listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):  # Check that a video exists. if not, assume failed.
                flatten(dirName)

        # Check video files for corruption
        status = int(failed)
        good_files = 0
        num_files = 0
        for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
            num_files += 1
            if transcoder.isVideoGood(video, status):
                good_files += 1
                import_subs(video)
        if num_files > 0:
            if good_files == num_files and not status == 0:
                logger.info('Found Valid Videos. Setting status Success')
                status = 0
                failed = 0
            if good_files < num_files and status == 0:
                logger.info('Found corrupt videos. Setting status Failed')
                status = 1
                failed = 1
                if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
                    print('[NZB] MARK=BAD')
                if failureLink:
                    failureLink += '&corrupt=true'
        elif clientAgent == "manual":
            logger.warning("No media files found in directory {0} to manually process.".format(dirName), section)
            return [0, ""]  # Success (as far as this script is concerned)
        elif nzbExtractionBy == "Destination":
            logger.info("Check for media files ignored because nzbExtractionBy is set to Destination.")
            if int(failed) == 0:
                logger.info("Setting Status Success.")
                status = 0
                failed = 0
            else:
                logger.info("Downloader reported an error during download or verification. Processing this as a failed download.")
                status = 1
                failed = 1
        else:
            logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dirName), section)
            status = 1
            failed = 1
            if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
                print('[NZB] MARK=BAD')

        if status == 0 and core.TRANSCODE == 1:  # only transcode successful downloads
            result, newDirName = transcoder.Transcode_directory(dirName)
            if result == 0:
                logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dirName), section)
                dirName = newDirName

                chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8)
                logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section)
                if chmod_directory:
                    logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dirName), section)
                    core.rchmod(dirName, chmod_directory)
            else:
                logger.error("FAILED: Transcoding failed for files in {0}".format(dirName), section)
                return [1, "{0}: Failed to post-process - Transcoding failed".format(section)]

        # configure SB params to pass
        fork_params['quiet'] = 1
        fork_params['proc_type'] = 'manual'
        if inputName is not None:
            fork_params['nzbName'] = inputName

        for param in copy.copy(fork_params):
            if param == "failed":
                fork_params[param] = failed

            if param in ["dirName", "dir", "proc_dir"]:
                fork_params[param] = dirName
                if remote_path:
                    fork_params[param] = remoteDir(dirName)

            if param == "process_method":
                if process_method:
                    fork_params[param] = process_method
                else:
                    del fork_params[param]

            if param == "force":
                if force:
                    fork_params[param] = force
                else:
                    del fork_params[param]

            if param == "delete_on":
                if delete_on:
                    fork_params[param] = delete_on
                else:
                    del fork_params[param]

            if param == "ignore_subs":
                if ignore_subs:
                    fork_params[param] = ignore_subs
                else:
                    del fork_params[param]

        # delete any unused params so we don't pass them to SB by mistake
        [fork_params.pop(k) for k, v in fork_params.items() if v is None]

        if status == 0:
            logger.postprocess("SUCCESS: The download succeeded, sending a post-process request", section)
        else:
            core.FAILED = True
            if failureLink:
                reportNzb(failureLink, clientAgent)
            if 'failed' in fork_params:
                logger.postprocess("FAILED: The download failed. Sending 'failed' process request to {0} branch".format(fork), section)
            elif section == "NzbDrone":
                logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(fork), section)
                return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)]  # Return as failed to flag this in the downloader.
            else:
                logger.postprocess("FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process".format(fork), section)
                if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
                    logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
                    rmDir(dirName)
                return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)]  # Return as failed to flag this in the downloader.

        url = None
        if section == "SickBeard":
            url = "{0}{1}:{2}{3}/home/postprocess/processEpisode".format(protocol, host, port, web_root)
        elif section == "NzbDrone":
            url = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root)
            url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root)
            headers = {"X-Api-Key": apikey}
            # params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'}
            if remote_path:
                logger.debug("remote_path: {0}".format(remoteDir(dirName)), section)
                data = {"name": "DownloadedEpisodesScan", "path": remoteDir(dirName), "downloadClientId": download_id}
            else:
                logger.debug("path: {0}".format(dirName), section)
                data = {"name": "DownloadedEpisodesScan", "path": dirName, "downloadClientId": download_id}
            if not download_id:
                data.pop("downloadClientId")
            data = json.dumps(data)

        try:
            if section == "SickBeard":
                logger.debug("Opening URL: {0} with params: {1}".format(url, fork_params), section)
                s = requests.Session()
                login = "{0}{1}:{2}{3}/login".format(protocol, host, port, web_root)
                login_params = {'username': username, 'password': password}
                s.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60))
                r = s.get(url, auth=(username, password), params=fork_params, stream=True, verify=False, timeout=(30, 1800))
            elif section == "NzbDrone":
                logger.debug("Opening URL: {0} with data: {1}".format(url, data), section)
                r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800))
        except requests.ConnectionError:
            logger.error("Unable to open URL: {0}".format(url), section)
            return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]

        if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
            logger.error("Server returned status {0}".format(r.status_code), section)
            return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]

        Success = False
        Started = False
        if section == "SickBeard":
            for line in r.iter_lines():
                if line:
                    logger.postprocess("{0}".format(line), section)
                    if "Moving file from" in line:
                        inputName = os.path.split(line)[1]
                    if "Processing succeeded" in line or "Successfully processed" in line:
                        Success = True
        elif section == "NzbDrone":
            try:
                res = json.loads(r.content)
                scan_id = int(res['id'])
                logger.debug("Scan started with id: {0}".format(scan_id), section)
                Started = True
            except Exception as e:
                logger.warning("No scan id was returned due to: {0}".format(e), section)
                scan_id = None
                Started = False

        if status != 0 and delete_failed and not os.path.dirname(dirName) == dirName:
            logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
            rmDir(dirName)

        if Success:
            return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
        elif section == "NzbDrone" and Started:
            n = 0
            params = {}
            url = "{0}/{1}".format(url, scan_id)
            while n < 6:  # set up wait_for minutes to see if command completes..
                time.sleep(10 * wait_for)
                command_status = self.command_complete(url, params, headers, section)
                if command_status and command_status in ['completed', 'failed']:
                    break
                n += 1
            if command_status:
                logger.debug("The Scan command return status: {0}".format(command_status), section)
            if not os.path.exists(dirName):
                logger.debug("The directory {0} has been removed. Renaming was successful.".format(dirName), section)
                return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
            elif command_status and command_status in ['completed']:
                logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
                return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
            elif command_status and command_status in ['failed']:
                logger.debug("The Scan command has failed. Renaming was not successful.", section)
                # return [1, "%s: Failed to post-process %s" % (section, inputName) ]
            if self.CDH(url2, headers, section=section):
                logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section)
                return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)]
            else:
                logger.warning("The Scan command did not return a valid status. Renaming was not successful.", section)
                return [1, "{0}: Failed to post-process {1}".format(section, inputName)]
        else:
            return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]  # We did not receive Success confirmation.

Example 85

Project: PokemonGo-Map Source File: runserver.py
def main():
    # Patch threading to make exceptions catchable
    install_thread_excepthook()

    # Make sure exceptions get logged
    sys.excepthook = handle_exception

    args = get_args()

    # Add file logging if enabled
    if args.verbose and args.verbose != 'nofile':
        filelog = logging.FileHandler(args.verbose)
        filelog.setFormatter(logging.Formatter('%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] %(message)s'))
        logging.getLogger('').addHandler(filelog)
    if args.very_verbose and args.very_verbose != 'nofile':
        filelog = logging.FileHandler(args.very_verbose)
        filelog.setFormatter(logging.Formatter('%(asctime)s [%(threadName)16s][%(module)14s][%(levelname)8s] %(message)s'))
        logging.getLogger('').addHandler(filelog)

    # Check if we have the proper encryption library file and get its path
    encryption_lib_path = get_encryption_lib_path(args)
    if encryption_lib_path is "":
        sys.exit(1)

    if args.verbose or args.very_verbose:
        log.setLevel(logging.DEBUG)
    else:
        log.setLevel(logging.INFO)

    # Let's not forget to run Grunt / Only needed when running with webserver
    if not args.no_server:
        if not os.path.exists(os.path.join(os.path.dirname(__file__), 'static/dist')):
            log.critical('Missing front-end assets (static/dist) -- please run "npm install && npm run build" before starting the server')
            sys.exit()

    # These are very noisey, let's shush them up a bit
    logging.getLogger('peewee').setLevel(logging.INFO)
    logging.getLogger('requests').setLevel(logging.WARNING)
    logging.getLogger('pgoapi.pgoapi').setLevel(logging.WARNING)
    logging.getLogger('pgoapi.rpc_api').setLevel(logging.INFO)
    logging.getLogger('werkzeug').setLevel(logging.ERROR)

    config['parse_pokemon'] = not args.no_pokemon
    config['parse_pokestops'] = not args.no_pokestops
    config['parse_gyms'] = not args.no_gyms

    # Turn these back up if debugging
    if args.verbose or args.very_verbose:
        logging.getLogger('pgoapi').setLevel(logging.DEBUG)
    if args.very_verbose:
        logging.getLogger('peewee').setLevel(logging.DEBUG)
        logging.getLogger('requests').setLevel(logging.DEBUG)
        logging.getLogger('pgoapi.pgoapi').setLevel(logging.DEBUG)
        logging.getLogger('pgoapi.rpc_api').setLevel(logging.DEBUG)
        logging.getLogger('rpc_api').setLevel(logging.DEBUG)
        logging.getLogger('werkzeug').setLevel(logging.DEBUG)

    # use lat/lng directly if matches such a pattern
    prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$")
    res = prog.match(args.location)
    if res:
        log.debug('Using coordinates from CLI directly')
        position = (float(res.group(1)), float(res.group(2)), 0)
    else:
        log.debug('Looking up coordinates in API')
        position = util.get_pos_by_name(args.location)

    # Use the latitude and longitude to get the local altitude from Google
    try:
        url = 'https://maps.googleapis.com/maps/api/elevation/json?locations={},{}'.format(
            str(position[0]), str(position[1]))
        altitude = requests.get(url).json()[u'results'][0][u'elevation']
        log.debug('Local altitude is: %sm', altitude)
        position = (position[0], position[1], altitude)
    except (requests.exceptions.RequestException, IndexError, KeyError):
        log.error('Unable to retrieve altitude from Google APIs; setting to 0')

    if not any(position):
        log.error('Could not get a position by name, aborting')
        sys.exit()

    log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)',
             position[0], position[1], position[2])

    if args.no_pokemon:
        log.info('Parsing of Pokemon disabled')
    if args.no_pokestops:
        log.info('Parsing of Pokestops disabled')
    if args.no_gyms:
        log.info('Parsing of Gyms disabled')
    if args.encounter:
        log.info('Encountering pokemon enabled')

    config['LOCALE'] = args.locale
    config['CHINA'] = args.china

    app = Pogom(__name__)
    db = init_database(app)
    if args.clear_db:
        log.info('Clearing database')
        if args.db_type == 'mysql':
            drop_tables(db)
        elif os.path.isfile(args.db):
            os.remove(args.db)
    create_tables(db)

    app.set_current_location(position)

    # Control the search status (running or not) across threads
    pause_bit = Event()
    pause_bit.clear()
    if args.on_demand_timeout > 0:
        pause_bit.set()

    heartbeat = [now()]

    # Setup the location tracking queue and push the first location on
    new_location_queue = Queue()
    new_location_queue.put(position)

    # DB Updates
    db_updates_queue = Queue()

    # Thread(s) to process database updates
    for i in range(args.db_threads):
        log.debug('Starting db-updater worker thread %d', i)
        t = Thread(target=db_updater, name='db-updater-{}'.format(i), args=(args, db_updates_queue))
        t.daemon = True
        t.start()

    # db clearner; really only need one ever
    if not args.disable_clean:
        t = Thread(target=clean_db_loop, name='db-cleaner', args=(args,))
        t.daemon = True
        t.start()

    # WH Updates
    wh_updates_queue = Queue()

    # Thread to process webhook updates
    for i in range(args.wh_threads):
        log.debug('Starting wh-updater worker thread %d', i)
        t = Thread(target=wh_updater, name='wh-updater-{}'.format(i), args=(args, wh_updates_queue))
        t.daemon = True
        t.start()

    if not args.only_server:

        # Check all proxies before continue so we know they are good
        if args.proxy and not args.proxy_skip_check:

            # Overwrite old args.proxy with new working list
            args.proxy = check_proxies(args)

        # Gather the pokemons!

        # attempt to dump the spawn points (do this before starting threads of endure the woe)
        if args.spawnpoint_scanning and args.spawnpoint_scanning != 'nofile' and args.dump_spawnpoints:
            with open(args.spawnpoint_scanning, 'w+') as file:
                log.info('Saving spawn points to %s', args.spawnpoint_scanning)
                spawns = Pokemon.get_spawnpoints_in_hex(position, args.step_limit)
                file.write(json.dumps(spawns))
                log.info('Finished exporting spawn points')

        argset = (args, new_location_queue, pause_bit, heartbeat, encryption_lib_path, db_updates_queue, wh_updates_queue)

        log.debug('Starting a %s search thread', args.scheduler)
        search_thread = Thread(target=search_overseer_thread, name='search-overseer', args=argset)
        search_thread.daemon = True
        search_thread.start()

    if args.cors:
        CORS(app)

    # No more stale JS
    init_cache_busting(app)

    app.set_search_control(pause_bit)
    app.set_heartbeat_control(heartbeat)
    app.set_location_queue(new_location_queue)

    config['ROOT_PATH'] = app.root_path
    config['GMAPS_KEY'] = args.gmaps_key

    if args.no_server:
        # This loop allows for ctrl-c interupts to work since flask won't be holding the program open
        while search_thread.is_alive():
            time.sleep(60)
    else:
        ssl_context = None
        if args.ssl_certificate and args.ssl_privatekey \
                and os.path.exists(args.ssl_certificate) and os.path.exists(args.ssl_privatekey):
            ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
            ssl_context.load_cert_chain(args.ssl_certificate, args.ssl_privatekey)
            log.info('Web server in SSL mode.')
        if args.verbose or args.very_verbose:
            app.run(threaded=True, use_reloader=False, debug=True, host=args.host, port=args.port, ssl_context=ssl_context)
        else:
            app.run(threaded=True, use_reloader=False, debug=False, host=args.host, port=args.port, ssl_context=ssl_context)

Example 86

Project: coala-bears Source File: CoffeeLintBear.py
Function: generate_config
    @staticmethod
    @deprecate_settings(indent_size='tab_width',
                        allow_increment=(
                            'no_decr_or_incrementation_operators', negate),
                        allow_no_parameters=(
                            'no_empty_parameter_list', negate),
                        allow_empty_functions=('no_empty_functions', negate),
                        allow_this_statements=('no_this', negate),
                        allow_implicit_parentheses=(
                            'no_implicit_parentheses', negate),
                        allow_interpolation_in_single_quotes=(
                            'no_interpolation_in_single_quotes', negate),
                        allow_stand_alone_at_sign=(
                            'no_stand_alone_at_sign', negate),
                        allow_throwing_strings=(
                            'disable_throwing_strings', negate),
                        allow_unnecessary_double_quotes=(
                            'no_unnecessary_double_quotes', negate),
                        allow_bitwise_operators=(
                            'use_english_operator', negate),
                        force_braces='no_implicit_braces')
    def generate_config(filename, file,
                        max_line_length: int=79,
                        max_line_length_affect_comments: bool=True,
                        space_before_and_after_arrow: bool=True,
                        check_braces_spacing: bool=False,
                        braces_spacing_width: int=1,
                        spacing_in_empty_braces: int=0,
                        class_naming_camelCase: bool=True,
                        spaces_before_and_after_colon: bool=False,
                        spaces_before_colon: int=0,
                        spaces_after_colon: int=1,
                        enforce_newline_at_EOF: bool=True,
                        use_spaces: bool=True,
                        indent_size: int=2,
                        number_of_newlines_after_classes: int=2,
                        prohibit_embedding_javascript_snippet: bool=True,
                        force_braces: bool=False,
                        allow_implicit_parentheses: bool=True,
                        allow_interpolation_in_single_quotes: bool=True,
                        allow_stand_alone_at_sign: bool=False,
                        allow_throwing_strings: bool=False,
                        allow_trailing_semicolons: bool=False,
                        allow_trailing_whitespaces: bool=False,
                        allow_unnecessary_double_quotes: bool=True,
                        allow_bitwise_operators: bool=True,
                        spaces_around_operators: bool=True,
                        space_after_comma: bool=True,
                        cyclomatic_complexity: int=0,
                        prevent_duplicate_keys: bool=True,
                        consistent_line_endings_style: str='',
                        allow_this_statements: bool=True,
                        allow_increment: bool=True,
                        allow_no_parameters: bool=True,
                        allow_empty_functions: bool=False,
                        enforce_parentheses_on_non_empty_constructors:
                            bool=True
                        ):
        """
        :param max_line_length:
            Maximum number of characters per line.
        :param max_line_length_affect_comments:
            Determines if ``max_line_length`` should also affects comments or
            not.
        :param space_before_and_after_arrow:
            Determines if spaces should be used before and after the arrow.
        :param check_braces_spacing:
            Checks if proper spacing is used inside curly braces.
        :param braces_spacing_width:
            Determines the number of blank spaces after the opening ``{`` and
            before the closing brace ``}`` given that there is something within
            the braces.
        :param spacing_in_empty_braces:
            Determines the number of blank spaces after the opening ``{`` and
            before the closing brace ``}`` given empty content.
        :param class_naming_camelCase:
            Checks whether the classes name should be in camel-case or not.
        :param spaces_before_and_after_colon:
            Checks the number of spaces before and after colon.
        :param spaces_before_colon:
            Determines the number of blank spaces before colon when
            ``spaces_before_and_after_colon == True``.
        :param spaces_after_colon:
            Determines the number of space after colon when
            ``spaces_before_and_after_colon == True``.
        :param enforce_newline_at_EOF:
            Checks if the file ends with a single newline.
        :param use_spaces:
            Forbids tabs in indentation and applies two spaces for this
            purpose.
        :param indent_size:
            Number of spaces per indentation level.
        :param number_of_newlines_after_classes:
            Determines the number of newlines that separate the class
            definition and the rest of the code.
        :param prohibit_embedding_javascript_snippet:
            Prevents some JavaScript elements like ``eval`` to affect
            CoffeeScript.
        :param force_braces:
            Prohibits implicit braces when declaring object literals.

            Example: If ``force_braces = True`` then
            ```
            1:2, 3:4
            ```
            is prohibited, whereas
            ```
            {1:2, 3:4}
            ```
            is accepted.
        :param allow_implicit_parentheses:
            Allows implicit parentheses.
        :param allow_interpolation_in_single_quotes:
            Allows string interpolation in a single quoted string.

            Example: If ``allow_interpolation_in_single_quotes = False`` then
            ```
            f = '#{bar}'
            ```
            is prohibited, whereas
            ```
            f = "#{bar}"
            ```
            is correct.
        :param allow_stand_alone_at_sign:
            Allows the use of stand alone  ``@``.

            Example: If ``allow_stand_alone_at_sign = False``
            ```
            @ notok
            not(@).ok
            @::
            ```
            are prohibited, whereas
            ```
            @alright
            @(fn)
            @ok()
            @[ok]
            @ok()
            ```
            are accepted.
        :param allow_throwing_strings:
            Allows throwing string literals or interpolation.

            Example: If ``allow_throwing_strings = False``
            ```
            throw 'my error'
            throw "#{1234}"
            ```
            will not be permitted.
        :param allow_trailing_semicolons:
            Prohibits trailing semicolons when ``False`` since they are
            not useful. The semicolon is meaningful only if there's another
            instruction on the same line.

            Example: If ``allow_trailing_semicolon = False``
            ```
            x = '1234'; console.log(x)
            ```
            Here the semicolon is meaningful.
            ```
            alert('end of line');
            ```
            This semicolon is redundant.
        :param allow_trailing_whitespaces:
            Checks whether to allow trailing whitespacess in the code or not.
        :param allow_unnecessary_double_quotes:
            Allows enclosing strings in double quotes.
        :param allow_bitwise_operators:
            Determines if ``and``, ``or``, ``is`` and ``isnt`` should be used
            instead of ``&&``, ``||``, ``==`` and ``!=``.
        :param spaces_around_operators:
            Enforces that operators have spaces around them.
        :param space_after_comma:
            Checks if there is a blank space after commas.
        :param cyclomatic_complexity:
            Maximum cyclomatic complexity of the file.
        :param prevent_duplicate_keys:
            Prevents defining duplicate keys in object literals and classes.
        :param enforce_parentheses_on_non_empty_constructors:
            Requires constructors with parameters to include parentheses.

            Example:
            ```
            class Foo
            # Warn about missing parentheses here
            a = new Foo
            b = new bar.foo.Foo
            # The parentheses make it clear no parameters are intended
            c = new Foo()
            d = new bar.foo.Foo()
            e = new Foo 1, 2
            f = new bar.foo.Foo 1, 2
            ```
        :param consistent_line_endings_style:
            The option to ``line_endings``, its value is either ``unix`` or
            ``windows``.
        :param allow_this_statements:
            Allows the use of ``this``. ``@`` should be used if ``False``.
        :param allow_increment:
            Allows the use of increment and decrement arithmetic operators.
        :param allow_no_parameters:
            Allows empty parameter lists in function definitions.
        :param allow_empty_functions:
            Allows declaring empty functions.
        """
        coffee_configs = {"max_line_length":
                          {"value": max_line_length,
                           "level": "error",
                           "limitComments":
                               max_line_length_affect_comments}}
        coffee_configs["arrow_spacing"] = (
            {"level": "error" if space_before_and_after_arrow else "ignore"})
        if check_braces_spacing:
            coffee_configs["braces_spacing"] = (
                {"level": "error",
                 "spaces": braces_spacing_width,
                 "empty_object_spaces": spacing_in_empty_braces})
        if class_naming_camelCase:
            coffee_configs["camel_case_classes"] = {"level": "error"}
        if spaces_before_and_after_colon:
            coffee_configs["colon_assignment_spacing"] = (
                {"level": "error",
                 "spacing": {"left": spaces_before_colon,
                             "right": spaces_after_colon}})
        coffee_configs["eol_last"] = (
            {"level": "error" if enforce_newline_at_EOF else "ignore"})
        coffee_configs["newlines_after_classes"] = (
            {"value": number_of_newlines_after_classes,
             "level": "error"})
        coffee_configs["no_backticks"] = (
            {"level": "error"
                if prohibit_embedding_javascript_snippet else "ignore"})
        if force_braces:
            coffee_configs["no_implicit_braces"] = (
                {"level": "error", "strict": True})
        if not allow_implicit_parentheses:
            coffee_configs["no_implicit_parens"] = (
                {"strict": True, "level": "error"})
        coffee_configs["no_interpolation_in_single_quotes"] = (
            {"level": "error"
                if not allow_interpolation_in_single_quotes else "ignore"})
        if not allow_stand_alone_at_sign:
            coffee_configs["no_stand_alone_at"] = {"level": "error"}
        if use_spaces:
            coffee_configs["no_tabs"] = {"level": "error"}
        coffee_configs["indentation"] = (
            {"value": indent_size, "level": "error"})
        coffee_configs["no_throwing_strings"] = (
            {"level": "error" if not allow_throwing_strings else "ignore"})
        coffee_configs["no_trailing_semicolons"] = (
            {"level": "error" if not allow_trailing_semicolons else "ignore"})
        if not allow_trailing_whitespaces:
            coffee_configs["no_trailing_whitespace"] = (
                {"level": "error",
                 "allowed_in_comments": True,
                 "allowed_in_empty_lines": True})
        if not allow_unnecessary_double_quotes:
            coffee_configs["no_unnecessary_double_quotes"] = {"level": "error"}
        if not allow_bitwise_operators:
            coffee_configs["prefer_english_operator"] = (
                {"level": "error", "doubleNotLevel": "ignore"})
        if spaces_around_operators:
            coffee_configs["space_operators"] = {"level": "error"}
        if space_after_comma:
            coffee_configs["spacing_after_comma"] = {"level": "warn"}
        coffee_configs["cyclomatic_complexity"] = (
                {"value": cyclomatic_complexity,
                 "level": ("error" if cyclomatic_complexity else 'ignore')})
        coffee_configs["duplicate_key"] = (
            {"level": "error" if prevent_duplicate_keys else "ignore"})
        if enforce_parentheses_on_non_empty_constructors:
            coffee_configs["non_empty_constructor_needs_parens"] = (
                {"level": "error"})
        if consistent_line_endings_style:
            coffee_configs["line_endings"] = (
                {"level": "error", "value": consistent_line_endings_style})
        if not allow_this_statements:
            coffee_configs["no_this"] = {"level": "error"}
        if not allow_increment:
            coffee_configs["no_plusplus"] = {"level": "error"}
        coffee_configs["no_empty_param_list"] = (
            {"level": "error" if not allow_no_parameters else "ignore"})
        coffee_configs["no_empty_functions"] = (
            {"level": "error" if not allow_empty_functions else "ignore"})

        return json.dumps(coffee_configs)

Example 87

Project: community-evolution-analysis Source File: CommunityRanking_NONadaptive.py
    def commRanking(self,numTopComms, prevTimeslots,xLablNum):
        import itertools, tfidf 
        # from nltk.corpus import stopwords
        from wordcloud import  make_wordcloud
        from PIL import Image

        '''Detect the evolving communities'''
        uniCommIdsEvol=self.uniCommIdsEvol
        timeslots=self.timeslots

        tempcommRanking = {}
        #structure: tempcommRanking={Id:[persistence,stability,commCentrality,degreeness]}
        commRanking,fluctuation,lifetime = {},{},0
        for Id in self.uniCommIds:
            uniqueTimeSlLen = len(set(uniCommIdsEvol[Id][0]))
            timeSlLen=len(uniCommIdsEvol[Id][0])
            tempcommRanking[Id] = []
            tempcommRanking[Id].append(uniqueTimeSlLen / timeslots)#persistence
            tempcommRanking[Id].append((sum(np.diff(list(set(uniCommIdsEvol[Id][0]))) == 1) + 1) / (timeslots + 1))#stability
            tempcommRanking[Id].append(product([x+1 for x in uniCommIdsEvol[Id][1]]) / uniqueTimeSlLen)#commCentrality
            # tempcommRanking[Id].append(sum(uniCommIdsEvol[Id][4]) / timeslots)#Degreeness
            # tempcommRanking[Id].append(sum(uniCommIdsEvol[Id][5])/timeSlLen)#degree centrality
            # tempcommRanking[Id].append(sum(uniCommIdsEvol[Id][6])/timeSlLen)#betweeness centrality
            # '''Checking Theseus Ship'''
            # theseus=1+len(list(set(uniCommIdsEvol[Id][3][0]) & set(uniCommIdsEvol[Id][3][-1]))) / len(set(np.append(uniCommIdsEvol[Id][3][0], uniCommIdsEvol[Id][3][-1])))
            # tempcommRanking[Id].append(theseus)
            commRanking[Id] = np.prod(tempcommRanking[Id])

            #Construct average jaccardian between timeslots for each dyn comm
            if timeSlLen not in fluctuation:
                fluctuation[timeSlLen]=[(sum(uniCommIdsEvol[Id][7])/(timeSlLen-1))] #[1-sum(np.diff(list(set(uniCommIdsEvol[Id][0]))) == 1)/(lifetime-1)]
            else:
                fluctuation[timeSlLen].append((sum(uniCommIdsEvol[Id][7])/(timeSlLen-1)))#1-sum(np.diff(list(set(uniCommIdsEvol[Id][0]))) == 1)/(lifetime-1))
            lifetime=max(lifetime,timeSlLen)

        '''All the communities ranked in order of importance'''
        rankedCommunities = sorted(commRanking, key=commRanking.get, reverse=True)
        if numTopComms>len(rankedCommunities):
            numTopComms=len(rankedCommunities)

        '''Jaccardian for lifespans which appear only once are discarded (outliers)'''
        flux=[]
        for lifeT in range(lifetime+1):
            if lifeT in fluctuation and len(fluctuation[lifeT])>1:
                flux.append(sum(fluctuation[lifeT])/len(fluctuation[lifeT]))
            else:
                flux.append(0)

        '''Constructing community size heatmap data'''
        commSizeHeatData = np.zeros([numTopComms, timeslots])
        for rCIdx, comms in enumerate(rankedCommunities[0:numTopComms]):
            for sizeIdx, timesteps in enumerate(uniCommIdsEvol[comms][0]):
                if commSizeHeatData[rCIdx, timesteps] != 0:
                    commSizeHeatData[rCIdx, timesteps] = max(np.log(uniCommIdsEvol[comms][2][sizeIdx]),commSizeHeatData[rCIdx, timesteps])
                else:
                    commSizeHeatData[rCIdx, timesteps] = np.log(uniCommIdsEvol[comms][2][sizeIdx])
        normedHeatdata = commSizeHeatData/commSizeHeatData.max()

        '''Writing ranked communities to json files + MongoDB'''
        dataset_name=self.dataset_path.split('/')
        dataset_name=dataset_name[-1]
        rankedCommunitiesFinal = {}
        twitterDataFile = open(self.dataset_path + '/data/nonadaptive/results/rankedCommunities.json', "w")#, encoding="utf-8-sig")
        jsondata = dict()
        jsondata["ranked_communities"] = []

        '''Create corpus and stopwords'''
        # stop = stopwords.words('english')
        stop = []
        # grstopwords=pickle.load(open("./greek_stopwords.pck", 'rb'))
        # stop.extend(grstopwords)
        definiteStop = ['gt','amp','rt','via']
        stop.extend(definiteStop)
        if not os.path.exists(self.dataset_path + "/data/nonadaptive/tmp/datasetCorpus.pck"):
            idf = self.corpusExtraction(rankedCommunities[:numTopComms])
        else:
            idf = pickle.load(open(self.dataset_path + "/data/nonadaptive/tmp/datasetCorpus.pck", 'rb'))
            print('loaded corpus from file')
        #-------------------------
        regex1 = re.compile("(?:\@|#|https?\://)\S+",re.UNICODE)
        regex2 = re.compile("\w+'?\w",re.UNICODE)

        width,height = 400,200
        blank_image = Image.new("RGB", (timeslots*width, (numTopComms*2+2)*height),(255,255,255)) #make blank for colage
        for tmptime in range(timeslots):
            timeimage = make_wordcloud([self.timeLimit[tmptime],'the date'],[10,2], width=width, height=height)
            blank_image.paste(timeimage, (tmptime*width,height))

        for rank, rcomms in enumerate(rankedCommunities[:numTopComms]):
            tmslUsrs, tmpTags, tmptweetids, commTwText, tmpUrls, topic, tmpkeywrds = [], [], [], [], [], [], []
            strRank = '{0}'.format(str(rank).zfill(2))
            rankedCommunitiesFinal[strRank] = [rcomms]
            rankedCommunitiesFinal[strRank].append(commRanking[rcomms])
            rankedCommunitiesFinal[strRank].append(uniCommIdsEvol[rcomms][3])
            timeSlotApp = [self.timeLimit[x] for x in uniCommIdsEvol[rcomms][0]]

            '''make and save wordclouds'''
            if not os.path.exists(self.dataset_path + "/data/nonadaptive/results/wordclouds/"+self.fileTitle+'/'+str(rank)):
                os.makedirs(self.dataset_path + "/data/nonadaptive/results/wordclouds/"+self.fileTitle+'/'+str(rank))

            for tmsl, users in enumerate(uniCommIdsEvol[rcomms][3]):
                uscentr, tmptweetText = [], []
                for us in users:
                    uscentr.append([us, self.userPgRnkBag[uniCommIdsEvol[rcomms][0][tmsl]][us]])
                    # uscentr = sorted(uscentr, key=itemgetter(1), reverse=True)
                    if us in self.tagBag[uniCommIdsEvol[rcomms][0][tmsl]]:
                        tmpTags.extend(self.tagBag[uniCommIdsEvol[rcomms][0][tmsl]][us])
                    if us in self.urlBag[uniCommIdsEvol[rcomms][0][tmsl]]:
                        tmpUrls.append(self.urlBag[uniCommIdsEvol[rcomms][0][tmsl]][us])
                    if us in self.tweetIdBag[uniCommIdsEvol[rcomms][0][tmsl]]:
                        tmptweetids.extend(self.tweetIdBag[uniCommIdsEvol[rcomms][0][tmsl]][us])
                    if us in self.tweetTextBag[uniCommIdsEvol[rcomms][0][tmsl]]:
                        tmptweetText.extend(self.tweetTextBag[uniCommIdsEvol[rcomms][0][tmsl]][us])
                uscentr = sorted(uscentr, key=itemgetter(1), reverse=True)
                tmslUsrs.append({str(uniCommIdsEvol[rcomms][0][tmsl]): uscentr})
                tmptweetText = [i.replace("\n", "").replace('\t',' ') for i in tmptweetText]
                seen = set()
                seen_add = seen.add
                tmptweetText2 = [x for x in tmptweetText if x not in seen and not seen_add(x)]
                commTwText.append({timeSlotApp[tmsl]: tmptweetText2})
                #topic extraction
                topicList = " ".join(tmptweetText2)
                topicList = topicList.lower()
                topicList = regex1.sub('', topicList)
                topicList = regex2.findall(topicList)
                topicList = collections.Counter(topicList)
                tmpkeys = topicList.keys()
                if len(topicList)>5:
                    for i in list(tmpkeys):
                            if not i or i in stop or i.startswith(('htt','(@','t.co')) or len(i)<=2:
                                del topicList[i]
                else:
                    for i in list(tmpkeys):
                        if i in definiteStop or not i:
                            del topicList[i]

                timeSlLen=len(uniCommIdsEvol[Id][0])
                tmpTopic=tfidf.comm_tfidf(topicList,idf,10)
                topic.append({timeSlotApp[tmsl]: tmpTopic})
                # tmpTopic = [x[0] for x in tmpTopic]
                '''wordcloud image'''
                popkeys = [x[0] for x in tmpTopic]
                popvals = [x[1] for x in tmpTopic]
                if len(popvals)<2:
                    try:
                        if popvals[0]<1:
                            popvals[0]=1
                    except:
                        pass
                '''Create intermediate image'''
                position = (rank+1)*2
                backgroundcolor = int((1-(normedHeatdata[rank,uniCommIdsEvol[rcomms][0][tmsl]]))*255)
                locimage = make_wordcloud(popkeys,popvals, width=width, height=height,backgroundweight=backgroundcolor)#, fname=self.dataset_path + '/data/nonadaptive/results/wordclouds/'+self.fileTitle+'/'+str(rank)+'/'+timeSlotApp[tmsl]+'.pdf'
                blank_image.paste(locimage, (uniCommIdsEvol[rcomms][0][tmsl]*width,position*height))
                popusers = [x[0] for x in uscentr[:10]]
                popcentr = [x[1]*100 for x in uscentr[:10]]
                locimage = make_wordcloud(popusers,popcentr, width=width, height=height,backgroundweight=backgroundcolor)#, fname=self.dataset_path + '/data/nonadaptive/results/wordclouds/'+self.fileTitle+'/'+str(rank)+'/'+timeSlotApp[tmsl]+'usrs.pdf'
                blank_image.paste(locimage, (uniCommIdsEvol[rcomms][0][tmsl]*width,(position+1)*height))
                # tmpkeywrds.extend(tmpTopic)

            if tmpTags:
                popTags = [x.lower() for x in list(itertools.chain.from_iterable(tmpTags))]
                popTags = collections.Counter(popTags)
                popTags = popTags.most_common(10)
            else:
                popTags=[]
            if tmpUrls:
                if tmpUrls[0]:
                    tmpUrls=[x.lower() for x in list(itertools.chain.from_iterable(tmpUrls)) if x]
                    popUrls = collections.Counter(tmpUrls)
                    popUrls = popUrls.most_common(10)
                else:
                    popUrls=[]
            else:
                    popUrls=[]
            commTweetIds = list(set(tmptweetids))
            # popKeywords = collections.Counter(tmpkeywrds)
            # popKeywords = popKeywords.most_common(10)
            # popkeys = [x[0] for x in popKeywords]
            # popvals = [x[1] for x in popKeywords]
            # make_wordcloud(popkeys,popvals,self.dataset_path + '/data/nonadaptive/results/wordclouds/'+self.fileTitle+'/'+str(rank)+'.pdf')
            dycco={'community label': rcomms, 'rank': rank, 'timeslot appearance': timeSlotApp,# 'text': commTwText,
                 'persistence:': tempcommRanking[rcomms][0],'total score':commRanking[rcomms],'topic': topic,
                 'stability': tempcommRanking[rcomms][1],'community centrality': tempcommRanking[rcomms][2],
                 'community size per slot': uniCommIdsEvol[rcomms][2], 'users:centrality per timeslot': tmslUsrs,
                 'popTags': popTags, 'popUrls': popUrls}
            jsondycco=dycco.copy()
            # dyccos.insert(dycco)
            jsondata["ranked_communities"].append(jsondycco)
        twitterDataFile.write(json.dumps(jsondata, sort_keys=True))#,ensure_ascii=False).replace("\u200f",""))
        twitterDataFile.close()

        for tmptime in range(timeslots):
            timeimage = make_wordcloud([self.timeLimit[tmptime],'the date'],[10,2])
            blank_image.paste(timeimage, (tmptime*width,(position+2)*height))
        imsize=blank_image.size
        blank_image = blank_image.resize((round(imsize[0]/2),round(imsize[1]/2)),Image.ANTIALIAS)
        blank_image.save(self.dataset_path + "/data/nonadaptive/results/wordclouds/"+self.fileTitle+'_collage.pdf', quality=50)

        makefigures(commSizeHeatData,flux,self.fileTitle,self.day_month,commRanking,numTopComms,timeslots,uniCommIdsEvol,rankedCommunities,self.commPerTmslt,self.uniCommIds,prevTimeslots,self.dataset_path,self.xLablNum)
        return rankedCommunitiesFinal

Example 88

Project: inthe.am Source File: tasks.py
@shared_task(
    bind=True,
)
def process_email_message(self, message_id):
    from .models import TaskAttachment, TaskStore

    with open('/tmp/out.log', 'w') as out:
        import json
        out.write(
            json.dumps(settings.LOGGING, indent=4)
        )
        out.write(__name__)

    def get_secret_id_and_args(address):
        inbox_id = address[0:36]
        args = []

        arg_string = address[36:]
        for arg in re.split('__|\+', arg_string):
            if not arg:
                continue
            if '=' in arg:
                params = arg.split('=')
                if params[0] == 'priority':
                    params[1] = params[1].upper()
                args.append('%s:"%s"' % tuple(params))
            else:
                args.append('+%s' % arg)

        return inbox_id, args

    message = Message.objects.get(pk=message_id)
    message.read = now()
    message.save()

    store = None
    additional_args = []
    # Check for matching To: addresses.
    for address in message.to_addresses:
        try:
            inbox_id, additional_args = get_secret_id_and_args(
                address.split('@')[0]
            )

            store = TaskStore.objects.get(
                secret_id=inbox_id
            )
            break
        except (TaskStore.DoesNotExist, IndexError):
            pass

    # Check for 'Received' headers matching a known e-mail address.
    if store is None:
        email_regex = re.compile(r'([0-9a-fA-F-]{36}@inthe.am)')
        all_received_headers = message.get_email_object().get_all('Received')
        for header in all_received_headers:
            matched_email = email_regex.search(header)
            if matched_email:
                address = matched_email.group(1)
                try:
                    inbox_id, additional_args = get_secret_id_and_args(
                        address.split('@')[0]
                    )

                    store = TaskStore.objects.get(
                        secret_id=inbox_id
                    )
                    break
                except (TaskStore.DoesNotExist, IndexError):
                    pass

    if store is None:
        logger.error(
            "Could not find task store for e-mail message (ID %s) addressed "
            "to %s",
            message.pk,
            message.to_addresses
        )
        return

    allowed = False
    for address in store.email_whitelist.split('\n'):
        if glob(message.from_address[0], address):
            allowed = True

    if not allowed:
        log_args = (
            "Incoming task creation e-mail (ID: %s) from '%s' "
            "does not match email whitelist and was ignored." % (
                message.pk,
                message.from_address[0]
            ),
        )
        logger.info(*log_args)
        store.log_message(*log_args)
        return

    if (
        not message.subject
        or message.subject.lower() in ['add', 'create', 'new'],
    ):
        with git_checkpoint(store, 'Incoming E-mail'):
            task_args = [
                'add',
                'intheamoriginalemailsubject:"%s"' % message.subject,
                'intheamoriginalemailid:%s' % message.pk,
            ] + additional_args + shlex.split(
                message.text.split('\n\n')[0]  # Only use text up to the first
                                               # blank line.
            )
            stdout, stderr = store.client._execute_safe(*task_args)
            task = store.client.get_task(intheamoriginalemailid=message.pk)[1]
            task_id = str(task['uuid'])

            attachment_urls_raw = task.get('intheamattachments')
            if not attachment_urls_raw:
                attachment_urls = []
            else:
                attachment_urls = attachment_urls_raw.split('|')

            for record in message.attachments.all():
                attachment = record.docuement
                if attachment.file.size > settings.FILE_UPLOAD_MAXIMUM_BYTES:
                    logger.info(
                        "File %s too large (%s bytes).",
                        attachment.file.name,
                        attachment.file.size,
                    )
                    store.log_message(
                        "Attachments must be smaller than %s "
                        "bytes to be saved to a task, but the "
                        "attachment %s received for task ID %s "
                        "is %s bytes in size and was not saved "
                        "as a result." % (
                            settings.FILE_UPLOAD_MAXIMUM_BYTES,
                            attachment.file.name,
                            task_id,
                            attachment.file.size,
                        )
                    )
                    attachment.delete()
                    continue

                docuement = TaskAttachment.objects.create(
                    store=store,
                    task_id=task_id,
                    name=record.get_filename(),
                    size=attachment.file.size,
                )
                docuement.docuement.save(
                    record.get_filename(),
                    attachment.file,
                )
                attachment_urls.append(
                    docuement.docuement.url
                )
                store.client.task_annotate(
                    task, 'Attached File: %s' % docuement.docuement.url
                )

            if attachment_urls:
                task['intheamattachments'] = ' '.join(attachment_urls)
                store.client.task_update(task)

        log_args = (
            "Added task %s via e-mail %s from %s." % (
                task_id,
                message.pk,
                message.from_address[0]
            ),
        )
        logger.info(*log_args)
        store.log_message(*log_args)
    else:
        log_args = (
            "Unable to process e-mail %s from %s; unknown subject '%s'" % (
                message.pk,
                message.from_address[0],
                message.subject,
            ),
        )
        logger.info(*log_args)
        store.log_message(*log_args)

Example 89

Project: eden Source File: MapPlugin.py
    def render_plots(self,
                     specs,
                     width,
                     height
                     ):
        env = self.env
        DSL = env.DSL

        def generate_chart(file_path):
            time_serieses = []

            from scipy import stats
            regression_lines = []

            R = self.R
            c = R("c")
            spec_names = []
            starts = []
            ends = []
            yearly = []
            for label, spec in specs:
                query_expression = spec["query_expression"]
                expression = DSL.parse(query_expression)
                understood_expression_string = str(expression)
                spec_names.append(label)
                units = DSL.units(expression)
                unit_string = str(units)
                if units is None:
                    analysis_strings = []
                    def analysis_out(*things):
                        analysis_strings.append("".join(map(str, things)))
                    DSL.analysis(expression, analysis_out)
                    raise MeaninglessUnitsException(
                        "\n".join(analysis_strings)
                    )
                is_yearly_values = "Months(" in query_expression
                yearly.append(is_yearly_values)
                if is_yearly_values:
                    if "Prev" in query_expression:
                        # PreviousDecember handling:
                        grouping_key = "(time_period - ((time_period + 1000008 + %i +1) %% 12))" % start_month_0_indexed
                    else:
                        grouping_key = "(time_period - ((time_period + 1000008 + %i) %% 12))" % start_month_0_indexed
                else:
                    grouping_key = "time_period"
                code = DSL.R_Code_for_values(
                    expression,
                    grouping_key,
                    "place_id IN (%s)" % ",".join(map(str, spec["place_ids"]))
                )
                #print code
                values_by_time_period_data_frame = R(code)()
                data = {}
                if isinstance(
                    values_by_time_period_data_frame,
                    self.robjects.vectors.StrVector
                ):
                    raise Exception(str(values_by_time_period_data_frame))
                elif values_by_time_period_data_frame.ncol == 0:
                    pass
                else:
                    keys = values_by_time_period_data_frame.rx2("key")
                    values = values_by_time_period_data_frame.rx2("value")
                    try:
                        display_units = {
                            "Kelvin": "Celsius",
                        }[unit_string]
                    except KeyError:
                        converter = lambda x:x
                        display_units = unit_string
                    else:
                        converter = units_in_out[display_units]["out"]

                    linear_regression = R("{}")

                    previous_december_month_offset = [0,1][is_yearly_values and "Prev" in query_expression]

                    def month_number_to_float_year(month_number):
                        year, month = month_number_to_year_month(month_number+previous_december_month_offset)
                        return year + (float(month-1) / 12)

                    converted_keys = map(month_number_to_float_year, keys)
                    converted_values = map(converter, values)
                    regression_lines.append(
                        stats.linregress(converted_keys, converted_values)
                    )

                    add = data.__setitem__
                    for key, value in zip(keys, values):
                        #print key, value
                        add(key, value)
                    # assume monthly values and monthly time_period
                    start_month_number = min(data.iterkeys())
                    starts.append(start_month_number)
                    start_year, start_month = month_number_to_year_month(
                        start_month_number + previous_december_month_offset
                    )

                    end_month_number = max(data.iterkeys())
                    ends.append(end_month_number)
                    end_year, end_month = month_number_to_year_month(
                        end_month_number + previous_december_month_offset
                    )

                    values = []
                    for month_number in range(
                        start_month_number,
                        end_month_number+1,
                        [1,12][is_yearly_values]
                    ):
                        if not data.has_key(month_number):
                            values.append(None)
                        else:
                            values.append(converter(data[month_number]))

                    if is_yearly_values:
                        time_serieses.append(
                            R("ts")(
                                self.robjects.FloatVector(values),
                                start = c(start_year),
                                end = c(end_year),
                                frequency = 1
                            )
                        )
                    else:
                        time_serieses.append(
                            R("ts")(
                                self.robjects.FloatVector(values),
                                start = c(start_year, start_month),
                                end = c(end_year, end_month),
                                frequency = 12
                            )
                        )
            min_start = min(starts)
            max_end = max(ends)
            show_months = any(not is_yearly for is_yearly in yearly)
            if show_months:
                # label_step spaces out the x-axis marks sensibly based on
                # width by not marking all of them.
                ticks = (max_end - min_start) + 1
                # ticks should be made at 1,2,3,4,6,12 month intervals
                # or 1, 2, 5, 10, 20, 50 year intervals
                # depending on the usable width and the number of ticks
                # ticks should be at least 15 pixels apart
                usable_width = width - 100
                max_ticks = usable_width / 15.0
                Y = 12
                for step in [1,2,3,4,6,12,2*Y, 5*Y, 10*Y, 20*Y, 50*Y]:
                    if ticks/step <= max_ticks:
                        break

                axis_points = []
                axis_labels = []
                month_names = (
                    "Jan Feb Mar Apr May Jun "
                    "Jul Aug Sep Oct Nov Dec"
                ).split(" ")
                for month_number in range(min_start, max_end+1, step):
                    year, month = month_number_to_year_month(month_number)
                    month -= 1
                    axis_points.append(
                        year + (month / 12.0)
                    )
                    axis_labels.append(
                        "%s %i" % (month_names[month], year)
                    )
            else:
                # show only years
                axis_points = []
                axis_labels = []
                start_year, start_month = month_number_to_year_month(min_start)
                end_year, end_month = month_number_to_year_month(max_end)
                for year in range(start_year, end_year+1):
                    axis_points.append(year)
                    axis_labels.append(year)

            display_units = display_units.replace("Celsius", "\xc2\xb0Celsius")

            R.png(
                filename = file_path,
                width = width,
                height = height
            )

            plot_chart = R("""
function (
    xlab, ylab, n, names, axis_points,
    axis_labels, axis_orientation,
    plot_type,
    width, height,
    total_margin_height,
    line_interspacing,
    ...
) {
    split_names <- lapply(
        names,
        strwrap, width=(width - 100)/5
    )
    wrapped_names <- lapply(
        split_names,
        paste, collapse='\n'
    )
    legend_line_count = sum(sapply(split_names, length))
    legend_height_inches <- grconvertY(
        -(
            (legend_line_count * 11) +
            (length(wrapped_names) * 6) + 30
        ),
        "device",
        "inches"
    ) - grconvertY(0, "device", "inches")
    par(
        xpd = T,
        mai = (par()$mai + c(legend_height_inches , 0, 0, 0))
    )
    ts.plot(...,
        gpars = list(
            xlab = xlab,
            ylab = ylab,
            col = c(1:n),
            pch = c(21:25),
            type = plot_type,
            xaxt = 'n'
        )
    )
    axis(
        1,
        at = axis_points,
        labels = axis_labels,
        las = axis_orientation
    )
    legend(
        par()$usr[1],
        par()$usr[3] - (
            grconvertY(0, "device", "user") -
            grconvertY(70, "device", "user")
        ),
        wrapped_names,
        cex = 0.8,
        pt.bg = c(1:n),
        pch = c(21:25),
        bty = 'n',
        y.intersp = line_interspacing,
        text.width = 3
    )
}""" )
            for regression_line, i in zip(
                regression_lines,
                range(len(time_serieses))
            ):
                slope, intercept, r, p, stderr = regression_line
                if isnan(slope) or isnan(intercept):
                    spec_names[i] += "   {cannot calculate linear regression}"
                else:
                    if isnan(p):
                        p_str = "NaN"
                    else:
                        p_str = str(round_to_4_sd(p))
                    if isnan(stderr):
                        stderr_str = "NaN"
                    else:
                        stderr_str = str(round_to_4_sd(p))

                    slope_str, intercept_str, r_str = map(
                        str,
                        map(round_to_4_sd, (slope, intercept, r))
                    )

                    spec_names[i] += (
                        u"   {"
                            "y=%(slope_str)s x year %(add)s%(intercept_str)s, "
                            "r= %(r_str)s, "
                            "p= %(p_str)s, "
                            "S.E.= %(stderr_str)s"
                        "}"
                    ) % dict(
                        locals(),
                        add = [u"+ ",u""][intercept_str.startswith("-")]
                    )

            plot_chart(
                xlab = "",
                ylab = display_units,
                n = len(time_serieses),
                names = spec_names,
                axis_points = axis_points,
                axis_labels = axis_labels,
                axis_orientation = [0,2][show_months],
                plot_type= "lo"[is_yearly_values],
                width = width,
                height = height,
                # R uses Normalised Display coordinates.
                # these have been found by recursive improvement
                # they place the legend legibly. tested up to 8 lines
                total_margin_height = 150,
                line_interspacing = 1.8,
                *time_serieses
            )

            for regression_line, colour_number in zip(
                regression_lines,
                range(len(time_serieses))
            ):
                slope = regression_line[0]
                intercept = regression_line[1]
                if isnan(slope) or isnan(intercept):
                    pass
                else:
                    R.par(xpd = False)
                    R.abline(
                        intercept,
                        slope,
                        col = colour_number+1
                    )
            R("dev.off()")

            import Image, ImageEnhance

            RGBA = "RGBA"
            def reduce_opacity(image, opacity):
                """Returns an image with reduced opacity."""
                assert opacity >= 0 and opacity <= 1
                if image.mode != RGBA:
                    image = image.convert(RGBA)
                else:
                    image = image.copy()
                alpha = image.split()[3]
                alpha = ImageEnhance.Brightness(alpha).enhance(opacity)
                image.putalpha(alpha)
                return image

            def scale_preserving_aspect_ratio(image, ratio):
                return image.resize(
                    map(int, map(ratio.__mul__, image.size))
                )

            def watermark(image, mark, position, opacity=1):
                """Adds a watermark to an image."""
                if opacity < 1:
                    mark = reduce_opacity(mark, opacity)
                if image.mode != RGBA:
                    image = image.convert(RGBA)
                # create a transparent layer the size of the
                # image and draw the watermark in that layer.
                layer = Image.new(RGBA, image.size, (0,0,0,0))
                if position == 'tile':
                    for y in range(0, image.size[1], mark.size[1]):
                        for x in range(0, image.size[0], mark.size[0]):
                            layer.paste(mark, (x, y))
                elif position == 'scale':
                    # scale, but preserve the aspect ratio
                    ratio = min(
                        float(image.size[0]) / mark.size[0],
                        float(image.size[1]) / mark.size[1]
                    )
                    w = int(mark.size[0] * ratio)
                    h = int(mark.size[1] * ratio)
                    mark = mark.resize((w, h))
                    layer.paste(
                        mark,
                        (
                            (image.size[0] - w) / 2,
                            (image.size[1] - h) / 2
                        )
                    )
                else:
                    layer.paste(mark, position)
                # composite the watermark with the layer
                return Image.composite(layer, image, layer)

            image = Image.open(file_path)
            watermark_image_path = os.path.join(
                os.path.realpath("."),
                "applications",
                current.request.application,
                "static", "img",
                "Nepal-Government-Logo.png"
            )
            watermark_image = Image.open(watermark_image_path)
            #watermark_image = scale_preserving_aspect_ratio(watermark_image, 0.5)
            watermark(image, watermark_image, 'scale', 0.05).save(file_path)

        def serialiseDate(obj):
            if isinstance(obj, (datetime.date, datetime.datetime, datetime.time)):
                return obj.isoformat()[:19].replace("T"," ")
            else:
                raise TypeError("%r is not JSON serializable" % (obj,))

        return get_cached_or_generated_file(
            "".join((
                hashlib.md5(
                    json.dumps(
                        [specs, width, height],
                        sort_keys=True,
                        default=serialiseDate,
                    )
                ).hexdigest(),
                ".png"
            )),
            generate_chart
        )

Example 90

Project: conda Source File: main_config.py
def execute_config(args, parser):
    json_warnings = []
    json_get = {}

    if args.show_sources:
        if context.json:
            print(json.dumps(context.collect_all(), sort_keys=True,
                             indent=2, separators=(',', ': ')))
        else:
            lines = []
            for source, reprs in iteritems(context.collect_all()):
                lines.append("==> %s <==" % source)
                lines.extend(format_dict(reprs))
                lines.append('')
            print('\n'.join(lines))
        return

    if args.show:
        from collections import OrderedDict
        d = OrderedDict((key, getattr(context, key))
                        for key in sorted(('add_anaconda_token',
                                           'add_pip_as_python_dependency',
                                           'allow_softlinks',
                                           'always_copy',
                                           'always_yes',
                                           'auto_update_conda',
                                           'binstar_upload',
                                           'changeps1',
                                           'channel_alias',
                                           'channel_priority',
                                           'channels',
                                           'client_ssl_cert',
                                           'client_ssl_cert_key',
                                           'create_default_packages',
                                           'debug',
                                           'default_channels',
                                           'disallow',
                                           'envs_dirs',
                                           'json',
                                           'offline',
                                           'proxy_servers',
                                           'quiet',
                                           'shortcuts',
                                           'show_channel_urls',
                                           'ssl_verify',
                                           'track_features',
                                           'update_dependencies',
                                           'use_pip',
                                           'verbosity',
                                           )))
        if context.json:
            print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': '),
                  cls=EntityEncoder))
        else:
            print('\n'.join(format_dict(d)))
        context.validate_configuration()
        return

    if args.validate:
        context.validate_all()
        return

    if args.system:
        rc_path = sys_rc_path
    elif args.file:
        rc_path = args.file
    else:
        rc_path = user_rc_path

    # read existing condarc
    if os.path.exists(rc_path):
        with open(rc_path, 'r') as fh:
            rc_config = yaml_load(fh) or {}
    else:
        rc_config = {}

    # Get
    if args.get is not None:
        context.validate_all()
        if args.get == []:
            args.get = sorted(rc_config.keys())
        for key in args.get:
            if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
                if key not in rc_other:
                    message = "unknown key %s" % key
                    if not context.json:
                        print(message, file=sys.stderr)
                    else:
                        json_warnings.append(message)
                continue
            if key not in rc_config:
                continue

            if context.json:
                json_get[key] = rc_config[key]
                continue

            if isinstance(rc_config[key], (bool, string_types)):
                print("--set", key, rc_config[key])
            else:  # assume the key is a list-type
                # Note, since conda config --add prepends, these are printed in
                # the reverse order so that entering them in this order will
                # recreate the same file
                items = rc_config.get(key, [])
                numitems = len(items)
                for q, item in enumerate(reversed(items)):
                    # Use repr so that it can be pasted back in to conda config --add
                    if key == "channels" and q in (0, numitems-1):
                        print("--add", key, repr(item),
                              "  # lowest priority" if q == 0 else "  # highest priority")
                    else:
                        print("--add", key, repr(item))

    # prepend, append, add
    for arg, prepend in zip((args.prepend, args.append), (True, False)):
        for key, item in arg:
            if key == 'channels' and key not in rc_config:
                rc_config[key] = ['defaults']
            if key not in rc_list_keys:
                raise CondaValueError("key must be one of %s, not %r" %
                                      (', '.join(rc_list_keys), key))
            if not isinstance(rc_config.get(key, []), list):
                bad = rc_config[key].__class__.__name__
                raise CouldntParseError("key %r should be a list, not %s." % (key, bad))
            if key == 'default_channels' and rc_path != sys_rc_path:
                msg = "'default_channels' is only configurable for system installs"
                raise NotImplementedError(msg)
            arglist = rc_config.setdefault(key, [])
            if item in arglist:
                # Right now, all list keys should not contain duplicates
                message = "Warning: '%s' already in '%s' list, moving to the %s" % (
                    item, key, "top" if prepend else "bottom")
                arglist = rc_config[key] = [p for p in arglist if p != item]
                if not context.json:
                    print(message, file=sys.stderr)
                else:
                    json_warnings.append(message)
            arglist.insert(0 if prepend else len(arglist), item)

    # Set
    set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
    for key, item in args.set:
        # Check key and value
        if key in set_bools:
            rc_config[key] = boolify(item)
        elif key in set_strings:
            assert isinstance(item, string_types)
            rc_config[key] = item
        else:
            raise CondaValueError("Error key must be one of %s, not %s" %
                                  (', '.join(set_bools | set_strings), key))

    # Remove
    for key, item in args.remove:
        if key not in rc_config:
            if key != 'channels':
                raise CondaKeyError(key, "key %r is not in the config file" % key)
            rc_config[key] = ['defaults']
        if item not in rc_config[key]:
            raise CondaKeyError(key, "%r is not in the %r key of the config file" %
                                (item, key))
        rc_config[key] = [i for i in rc_config[key] if i != item]

    # Remove Key
    for key, in args.remove_key:
        if key not in rc_config:
            raise CondaKeyError(key, "key %r is not in the config file" %
                                key)
        del rc_config[key]

    # config.rc_keys
    with open(rc_path, 'w') as rc:
        rc.write(yaml_dump(rc_config))

    if context.json:
        stdout_json_success(
            rc_path=rc_path,
            warnings=json_warnings,
            get=json_get
        )
    return

Example 91

Project: maltrail Source File: httpd.py
def start_httpd(address=None, port=None, join=False, pem=None):
    """
    Starts HTTP server
    """

    class ThreadingServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
        def server_bind(self):
            self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
            BaseHTTPServer.HTTPServer.server_bind(self)

        def finish_request(self, *args, **kwargs):
            try:
                BaseHTTPServer.HTTPServer.finish_request(self, *args, **kwargs)
            except:
                if config.SHOW_DEBUG:
                    traceback.print_exc()

    class SSLThreadingServer(ThreadingServer):
        def __init__(self, server_address, pem, HandlerClass):
            import OpenSSL  # python-openssl

            ThreadingServer.__init__(self, server_address, HandlerClass)
            ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
            ctx.use_privatekey_file(pem)
            ctx.use_certificate_file(pem)
            self.socket = OpenSSL.SSL.Connection(ctx, socket.socket(self.address_family, self.socket_type))
            self.server_bind()
            self.server_activate()

        def shutdown_request(self, request):
            try:
                request.shutdown()
            except:
                if config.SHOW_DEBUG:
                    traceback.print_exc()

    class ReqHandler(BaseHTTPServer.BaseHTTPRequestHandler):
        def do_GET(self):
            path, query = self.path.split('?', 1) if '?' in self.path else (self.path, "")
            params = {}
            content = None
            skip = False

            if hasattr(self, "data"):
                params.update(urlparse.parse_qs(self.data))

            if query:
                params.update(urlparse.parse_qs(query))

            for key in params:
                if params[key]:
                    params[key] = params[key][-1]

            if path == '/':
                path = "index.html"

            path = path.strip('/')
            extension = os.path.splitext(path)[-1].lower()

            if hasattr(self, "_%s" % path):
                content = getattr(self, "_%s" % path)(params)

            else:
                path = path.replace('/', os.path.sep)
                path = os.path.abspath(os.path.join(HTML_DIR, path)).strip()

                if not os.path.isfile(path) and os.path.isfile("%s.html" % path):
                    path = "%s.html" % path

                if ".." not in os.path.relpath(path, HTML_DIR) and os.path.isfile(path) and (extension not in DISABLED_CONTENT_EXTENSIONS or os.path.split(path)[-1] in CONTENT_EXTENSIONS_EXCLUSIONS):
                    mtime = time.gmtime(os.path.getmtime(path))
                    if_modified_since = self.headers.get(HTTP_HEADER.IF_MODIFIED_SINCE)

                    if if_modified_since and extension not in (".htm", ".html"):
                        if_modified_since = [_ for _ in if_modified_since.split(';') if _.upper().endswith("GMT")][0]
                        if time.mktime(mtime) <= time.mktime(time.strptime(if_modified_since, HTTP_TIME_FORMAT)):
                            self.send_response(httplib.NOT_MODIFIED)
                            self.send_header(HTTP_HEADER.CONNECTION, "close")
                            skip = True

                    if not skip:
                        content = open(path, "rb").read()
                        last_modified = time.strftime(HTTP_TIME_FORMAT, mtime)
                        self.send_response(httplib.OK)
                        self.send_header(HTTP_HEADER.CONNECTION, "close")
                        self.send_header(HTTP_HEADER.CONTENT_TYPE, mimetypes.guess_type(path)[0] or "application/octet-stream")
                        self.send_header(HTTP_HEADER.LAST_MODIFIED, last_modified)
                        if extension not in (".htm", ".html"):
                            self.send_header(HTTP_HEADER.EXPIRES, "Sun, 17-Jan-2038 19:14:07 GMT")        # Reference: http://blog.httpwatch.com/2007/12/10/two-simple-rules-for-http-caching/
                            self.send_header(HTTP_HEADER.CACHE_CONTROL, "max-age=3600, must-revalidate")  # Reference: http://stackoverflow.com/a/5084555
                        else:
                            self.send_header(HTTP_HEADER.CACHE_CONTROL, "no-cache")

                else:
                    self.send_response(httplib.NOT_FOUND)
                    self.send_header(HTTP_HEADER.CONNECTION, "close")
                    content = '<!DOCTYPE html><html lang="en"><head><title>404 Not Found</title></head><body><h1>Not Found</h1><p>The requested URL %s was not found on this server.</p></body></html>' % self.path.split('?')[0]

            if content is not None:
                for match in re.finditer(r"<\!(\w+)\!>", content):
                    name = match.group(1)
                    _ = getattr(self, "_%s" % name.lower(), None)
                    if _:
                        content = self._format(content, **{ name: _() })

                if "gzip" in self.headers.getheader(HTTP_HEADER.ACCEPT_ENCODING, ""):
                    self.send_header(HTTP_HEADER.CONTENT_ENCODING, "gzip")
                    _ = cStringIO.StringIO()
                    compress = gzip.GzipFile("", "w+b", 9, _)
                    compress._stream = _
                    compress.write(content)
                    compress.flush()
                    compress.close()
                    content = compress._stream.getvalue()

                self.send_header(HTTP_HEADER.CONTENT_LENGTH, str(len(content)))

            self.end_headers()

            if content:
                self.wfile.write(content)

            self.wfile.flush()
            self.wfile.close()

        def do_POST(self):
            length = self.headers.getheader(HTTP_HEADER.CONTENT_LENGTH)
            data = self.rfile.read(int(length))
            data = urllib.unquote_plus(data)
            self.data = data
            self.do_GET()

        def get_session(self):
            retval = None
            cookie = self.headers.get(HTTP_HEADER.COOKIE)

            if cookie:
                match = re.search(r"%s\s*=\s*([^;]+)" % SESSION_COOKIE_NAME, cookie)
                if match:
                    session = match.group(1)
                    if session in SESSIONS:
                        if SESSIONS[session].client_ip != self.client_address[0]:
                            pass
                        elif SESSIONS[session].expiration > time.time():
                            retval = SESSIONS[session]
                        else:
                            del SESSIONS[session]

            return retval

        def delete_session(self):
            cookie = self.headers.get(HTTP_HEADER.COOKIE)

            if cookie:
                match = re.search(r"%s=(.+)" % SESSION_COOKIE_NAME, cookie)
                if match:
                    session = match.group(1)
                    if session in SESSIONS:
                        del SESSIONS[session]

        def version_string(self):
            return SERVER_HEADER

        def end_headers(self):
            if not hasattr(self, "_headers_ended"):
                BaseHTTPServer.BaseHTTPRequestHandler.end_headers(self)
                self._headers_ended = True

        def log_message(self, format, *args):
            return

        def finish(self):
            try:
                BaseHTTPServer.BaseHTTPRequestHandler.finish(self)
            except:
                if config.SHOW_DEBUG:
                    traceback.print_exc()

        def _version(self):
            return VERSION

        def _format(self, content, **params):
            if content:
                for key, value in params.items():
                    content = content.replace("<!%s!>" % key, value)

            return content

        def _login(self, params):
            valid = False

            if params.get("username") and params.get("hash") and params.get("nonce"):
                if params.get("nonce") not in DISPOSED_NONCES:
                    DISPOSED_NONCES.add(params.get("nonce"))
                    for entry in (config.USERS or []):
                        entry = re.sub(r"\s", "", entry)
                        username, stored_hash, uid, netfilter = entry.split(':')
                        if username == params.get("username"):
                            try:
                                if params.get("hash") == hashlib.sha256(stored_hash.strip() + params.get("nonce")).hexdigest():
                                    valid = True
                                    break
                            except:
                                if config.SHOW_DEBUG:
                                    traceback.print_exc()

            if valid:
                session_id = os.urandom(SESSION_ID_LENGTH).encode("hex")
                expiration = time.time() + 3600 * SESSION_EXPIRATION_HOURS

                self.send_response(httplib.OK)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                self.send_header(HTTP_HEADER.SET_COOKIE, "%s=%s; expires=%s; path=/; HttpOnly" % (SESSION_COOKIE_NAME, session_id, time.strftime(HTTP_TIME_FORMAT, time.gmtime(expiration))))

                if netfilter in ("", "0.0.0.0/0"):
                    netfilters = None
                else:
                    addresses = set()
                    netmasks = set()

                    for item in set(re.split(r"[;,]", netfilter)):
                        item = item.strip()
                        if '/' in item:
                            _ = item.split('/')[-1]
                            if _.isdigit() and int(_) >= 16:
                                lower = addr_to_int(item.split('/')[0])
                                mask = make_mask(int(_))
                                upper = lower | (0xffffffff ^ mask)
                                while lower <= upper:
                                    addresses.add(int_to_addr(lower))
                                    lower += 1
                            else:
                                netmasks.add(item)
                        elif '-' in item:
                            _ = item.split('-')
                            lower, upper = addr_to_int(_[0]), addr_to_int(_[1])
                            while lower <= upper:
                                addresses.add(int_to_addr(lower))
                                lower += 1
                        elif re.search(r"\d+\.\d+\.\d+\.\d+", item):
                            addresses.add(item)

                    netfilters = netmasks
                    if addresses:
                        netfilters.add(get_regex(addresses))

                SESSIONS[session_id] = AttribDict({"username": username, "uid": uid, "netfilters": netfilters, "expiration": expiration, "client_ip": self.client_address[0]})
            else:
                time.sleep(UNAUTHORIZED_SLEEP_TIME)
                self.send_response(httplib.UNAUTHORIZED)
                self.send_header(HTTP_HEADER.CONNECTION, "close")

            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
            content = "Login %s" % ("success" if valid else "failed")

            if not subprocess.mswindows:
                try:
                    subprocess.check_output("logger -p auth.info -t \"%s[%d]\" \"%s password for %s from %s port %s\"" % (NAME.lower(), os.getpid(), "Accepted" if valid else "Failed", params.get("username"), self.client_address[0], self.client_address[1]), stderr=subprocess.STDOUT, shell=True)
                except Exception:
                    if config.SHOW_DEBUG:
                        traceback.print_exc()

            return content

        def _logout(self, params):
            self.delete_session()
            self.send_response(httplib.FOUND)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.LOCATION, "/")

        def _whoami(self, params):
            session = self.get_session()
            username = session.username if session else ""

            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

            return username

        def _check_ip(self, params):
            session = self.get_session()

            if session is None:
                self.send_response(httplib.UNAUTHORIZED)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                return None

            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

            try:
                result_worst = worst_asns(params.get("address"))
                if result_worst:
                    result_ipcat = result_worst
                else:
                    _ = (ipcat_lookup(params.get("address")) or "").lower().split(' ')
                    result_ipcat = _[1] if _[0] == 'the' else _[0]
                return ("%s" if not params.get("callback") else "%s(%%s)" % params.get("callback")) % json.dumps({"ipcat": result_ipcat, "worst_asns": str(result_worst is not None).lower()})
            except:
                if config.SHOW_DEBUG:
                    traceback.print_exc()

        def _trails(self, params):
            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

            return open(TRAILS_FILE, "rb").read()

        def _ping(self, params):
            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

            return PING_RESPONSE

        def _events(self, params):
            session = self.get_session()

            if session is None:
                self.send_response(httplib.UNAUTHORIZED)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                return None

            start, end, size, total = None, None, -1, None
            content = None
            log_exists = False
            dates = params.get("date", "")

            if ".." in dates:
                pass
            elif '_' not in dates:
                try:
                    date = datetime.datetime.strptime(dates, "%Y-%m-%d").strftime("%Y-%m-%d")
                    event_log_path = os.path.join(config.LOG_DIR, "%s.log" % date)
                    if os.path.exists(event_log_path):
                        range_handle = open(event_log_path, "rb")
                        log_exists = True
                except ValueError:
                    print "[!] invalid date format in request"
                    log_exists = False
            else:
                logs_data = ""
                date_interval = dates.split("_", 1)
                try:
                    start_date = datetime.datetime.strptime(date_interval[0], "%Y-%m-%d").date()
                    end_date = datetime.datetime.strptime(date_interval[1], "%Y-%m-%d").date()
                    for i in xrange(int((end_date - start_date).days) + 1):
                        date = start_date + datetime.timedelta(i)
                        event_log_path = os.path.join(config.LOG_DIR, "%s.log" % date.strftime("%Y-%m-%d"))
                        if os.path.exists(event_log_path):
                            log_handle = open(event_log_path, "rb")
                            logs_data += log_handle.read()
                            log_handle.close()

                    range_handle = io.BytesIO(logs_data)
                    log_exists = True
                except ValueError:
                    print "[!] invalid date format in request"
                    log_exists = False

            if log_exists:
                range_handle.seek(0, 2)
                total = range_handle.tell()
                range_handle.seek(0)

                if self.headers.get(HTTP_HEADER.RANGE):
                    match = re.search(r"bytes=(\d+)-(\d+)", self.headers[HTTP_HEADER.RANGE])
                    if match:
                        start, end = int(match.group(1)), int(match.group(2))
                        max_size = end - start + 1
                        end = min(total - 1, end)
                        size = end - start + 1

                        if start == 0 or not session.range_handle:
                            session.range_handle = range_handle

                        if session.netfilters is None:
                            session.range_handle.seek(start)
                            self.send_response(httplib.PARTIAL_CONTENT)
                            self.send_header(HTTP_HEADER.CONNECTION, "close")
                            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
                            self.send_header(HTTP_HEADER.CONTENT_RANGE, "bytes %d-%d/%d" % (start, end, total))
                            content = session.range_handle.read(size)
                        else:
                            self.send_response(httplib.OK)
                            self.send_header(HTTP_HEADER.CONNECTION, "close")
                            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

                            buffer, addresses, netmasks, regex = cStringIO.StringIO(), set(), [], ""
                            for netfilter in session.netfilters:
                                if not netfilter:
                                    continue
                                if '/' in netfilter:
                                    netmasks.append(netfilter)
                                elif re.search(r"\A[\d.]+\Z", netfilter):
                                    addresses.add(netfilter)
                                elif '\.' in netfilter:
                                    regex = r"\b(%s)\b" % netfilter
                                else:
                                    print "[!] invalid network filter '%s'" % netfilter
                                    return

                            for line in session.range_handle:
                                display = False
                                ip = None

                                if regex:
                                    match = re.search(regex, line)
                                    if match:
                                        ip = match.group(1)
                                        display = True

                                if not display and (addresses or netmasks):
                                    for match in re.finditer(r"\b(\d+\.\d+\.\d+\.\d+)\b", line):
                                        if not display:
                                            ip = match.group(1)
                                        else:
                                            break

                                        if ip in addresses:
                                            display = True
                                            break
                                        elif netmasks:
                                            for _ in netmasks:
                                                prefix, mask = _.split('/')
                                                if addr_to_int(ip) & make_mask(int(mask)) == addr_to_int(prefix):
                                                    addresses.add(ip)
                                                    display = True
                                                    break

                                if display:
                                    if ",%s" % ip in line or "%s," % ip in line:
                                        line = re.sub(r" ([\d.,]+,)?%s(,[\d.,]+)? " % re.escape(ip), " %s " % ip, line)
                                    buffer.write(line)
                                    if buffer.tell() >= max_size:
                                        break

                            content = buffer.getvalue()
                            end = start + len(content) - 1
                            self.send_header(HTTP_HEADER.CONTENT_RANGE, "bytes %d-%d/%d" % (start, end, end + 1 + max_size * (len(content) >= max_size)))

                        if len(content) < max_size:
                            session.range_handle.close()
                            session.range_handle = None

                if size == -1:
                    self.send_response(httplib.OK)
                    self.send_header(HTTP_HEADER.CONNECTION, "close")
                    self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
                    self.end_headers()

                    with range_handle as f:
                        while True:
                            data = f.read(io.DEFAULT_BUFFER_SIZE)
                            if not data:
                                break
                            else:
                                self.wfile.write(data)

            else:
                self.send_response(httplib.OK)  # instead of httplib.NO_CONTENT (compatibility reasons)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                if self.headers.get(HTTP_HEADER.RANGE):
                    self.send_header(HTTP_HEADER.CONTENT_RANGE, "bytes 0-0/0")

            return content

        def _counts(self, params):
            counts = {}

            session = self.get_session()

            if session is None:
                self.send_response(httplib.UNAUTHORIZED)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                return None

            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "application/json")

            match = re.search(r"\d+\-\d+\-\d+", params.get("from", ""))
            if match:
                min_ = datetime.datetime.strptime(match.group(0), DATE_FORMAT)
            else:
                min_ = datetime.datetime.fromtimestamp(0)

            match = re.search(r"\d+\-\d+\-\d+", params.get("to", ""))
            if match:
                max_ = datetime.datetime.strptime(match.group(0), DATE_FORMAT)
            else:
                max_ = datetime.datetime.now()

            min_ = min_.replace(hour=0, minute=0, second=0, microsecond=0)
            max_ = max_.replace(hour=23, minute=59, second=59, microsecond=999999)

            for filepath in sorted(glob.glob(os.path.join(config.LOG_DIR, "*.log"))):
                filename = os.path.basename(filepath)
                if not re.search(r"\A\d{4}-\d{2}-\d{2}\.log\Z", filename):
                    continue
                try:
                    current = datetime.datetime.strptime(os.path.splitext(filename)[0], DATE_FORMAT)
                except:
                    if config.SHOW_DEBUG:
                        traceback.print_exc()
                else:
                    if min_ <= current <= max_:
                        timestamp = int(time.mktime(current.timetuple()))
                        size = os.path.getsize(filepath)
                        with open(filepath, "rb") as f:
                            content = f.read(io.DEFAULT_BUFFER_SIZE)
                            if size >= io.DEFAULT_BUFFER_SIZE:
                                total = 1.0 * content.count('\n') * size / io.DEFAULT_BUFFER_SIZE
                                counts[timestamp] = int(round(total / 100) * 100)
                            else:
                                counts[timestamp] = content.count('\n')

            return json.dumps(counts)

    class SSLReqHandler(ReqHandler):
        def setup(self):
            self.connection = self.request
            self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
            self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)

    try:
        if pem:
            server = SSLThreadingServer((address or '', int(port) if str(port or "").isdigit() else 0), pem, SSLReqHandler)
        else:
            server = ThreadingServer((address or '', int(port) if str(port or "").isdigit() else 0), ReqHandler)
    except Exception as ex:
        if "Address already in use" in str(ex):
            exit("[!] another instance already running")
        elif "Name or service not known" in str(ex):
            exit("[!] invalid configuration value for 'HTTP_ADDRESS' ('%s')" % config.HTTP_ADDRESS)
        elif "Cannot assign requested address" in str(ex):
            exit("[!] can't use configuration value for 'HTTP_ADDRESS' ('%s')" % config.HTTP_ADDRESS)
        else:
            raise

    print "[i] starting HTTP%s server at 'http%s://%s:%d/'" % ('S' if pem else "", 's' if pem else "", server.server_address[0], server.server_address[1])

    print "[o] running..."

    if join:
        server.serve_forever()
    else:
        thread = threading.Thread(target=server.serve_forever)
        thread.daemon = True
        thread.start()

Example 92

Project: neo4j-rest-client Source File: query.py
    def _plot_graph(self, graph, title=None, width=None, height=None):
        """
        Return a HTML representation for a particular QuerySequence.
        Mainly for IPython Notebook.
        """
        if not self._elements_row and not self._elements_graph:
            raise ValueError('Unable to display the graph or the table')
        title = title or self.q
        width = width or json.dumps(None)
        height = height or 300
        d3_uuid = text_type(uuid.uuid1())
        d3_graph = self._transform_graph_to_d3(graph)
        d3_id = "d3_id_" + d3_uuid
        d3_title = title
        d3_container_id = d3_id + "_d3c"
        style = """
        #{d3_id} path.link {{
            fill: none;
            stroke-width: 1.5px;
        }}
        #{d3_id} .node {{
            /*fill: #ccc;*/
            stroke: #333;
            stroke-width: 1.5px;
        }}
        #{d3_id} text {{
            font: 10px sans-serif;
            pointer-events: none;
        }}
        #{d3_id} text.shadow {{
            stroke: #fff;
            stroke-width: 3px;
            stroke-opacity: .8;
        }}
        #{d3_id} .node.sticky {{
            /* stroke-width: 2px; */
        }}
        """.format(d3_id=d3_id)
        js = """
        var links = graph.links;
        var nodes = graph.nodes;

        // Compute the distinct nodes from the links.
        links.forEach(function(link) {
            link.source = (nodes[link.source] ||
                           (nodes[link.source] = {name: link.source}));
            link.target = (nodes[link.target] ||
                           (nodes[link.target] = {name: link.target}));
        });

        var w = width || $(container).width(), h = height;

        var force = d3.layout.force()
            .nodes(d3.values(nodes))
            .links(links)
            .size([w, h])
            .linkDistance(60)
            .charge(-300)
            .on("tick", tick)
            .start();

        var svg = d3.select(container).append("svg:svg")
            .attr("width", w)
            .attr("height", h);

        // Per-type markers, as they don't inherit styles.
        svg.append("svg:defs").selectAll("marker")
            .data(["arrow"])
            .enter().append("svg:marker")
            .attr("id", String)
            .attr("viewBox", "0 -5 10 10")
            .attr("refX", 15)
            .attr("refY", -1.5)
            .attr("markerWidth", 6)
            .attr("markerHeight", 6)
            .attr("orient", "auto")
            .append("svg:path")
            .attr("d", "M0,-5L10,0L0,5");

        var path = svg.append("svg:g").selectAll("path")
            .data(force.links())
            .enter().append("svg:path")
            .attr("class", function(d) { return "link " + d.stroke; })
            .attr("stroke", function(d) { return d.stroke; })
            .attr("marker-end", function(d) { return "url(#arrow)"; });

        var circle = svg.append("svg:g").selectAll("circle")
            .data(force.nodes())
            .enter().append("svg:circle")
            .attr("fill", function(d) { return d.fill; })
            .attr("r", 6)
            .attr("class", "node")
            .call(force.drag)
            .on("mousedown", function(d) {
                d.fixed = true;
                d3.select(this).classed("sticky", true);
            });

        var text = svg.append("svg:g").selectAll("g")
            .data(force.nodes())
            .enter().append("svg:g");

        // A copy of the text with a thick white stroke for legibility.
        text.append("svg:text")
            .attr("x", 8)
            .attr("y", ".31em")
            .attr("class", "shadow")
            .text(function(d) { return d.label; });

        text.append("svg:text")
            .attr("x", 8)
            .attr("y", ".31em")
            .attr("class", "front")
            .text(function(d) { return d.label; });

        // Use elliptical arc path segments to doubly-encode directionality.
        function tick() {
            path.attr("d", function(d) {
                var dx = d.target.x - d.source.x,
                dy = d.target.y - d.source.y,
                dr = Math.sqrt(dx * dx + dy * dy);
                return ("M" + d.source.x + "," + d.source.y + "A"
                        + dr + "," + dr + " 0 0,1 " + d.target.x + ","
                        + d.target.y);
            });

            circle.attr("transform", function(d) {
                return "translate(" + d.x + "," + d.y + ")";
            });

            text.attr("transform", function(d) {
                return "translate(" + d.x + "," + d.y + ")";
            });
        }

        // Display options
        var display = $(container + "_display");
        graph.properties.forEach(function (property) {
            var option = $("<OPTION/>");
            option.text(property);
            option.attr("value", property);
            display.append(option);
        });
        display.on("change", function () {
            var selected = $(this).find(":selected").val(),
                displayFunc;
            if (selected.length !== 0) {
                displayFunc = function(d) {
                    return d.properties[selected];
                }
            } else {
                displayFunc = function(d) {
                    return d.label;
                }
            }
            text.select("text.front").text(displayFunc);
            text.select("text.shadow").text(displayFunc);
        });
        """
        return ("""
        <style type="text/css">
        {style}
        </style>

        <div class="accordion">
            <div class="accordion-group">
                <div class="accordion-heading">
                    <a class="accordion-toggle collapsed"
                       data-toggle="collapse" data-parent=""
                       href="#{d3_id}">
                        {d3_title}
                    </a>
                </div>
                <div id="{d3_id}" class="accordion-body in collapse">
                    <div class="accordion-inner">
                        <div id="{d3_container_id}">
                            <select id="{d3_container_id}_display">
                                <option value="">ID</option>
                            </select>
                        </div>
                    </div>
                </div>
            </div>
        </div>

        <script>
            var neo4jrestclient = window.neo4jrestclient || {{}};
            neo4jrestclient['{d3_uuid}'] = {{}};
            neo4jrestclient['{d3_uuid}'].graph = {d3_graph};
            neo4jrestclient['{d3_uuid}'].container_id = "{d3_container_id}";
            neo4jrestclient['{d3_uuid}'].container = "#{d3_container_id}";
            neo4jrestclient['{d3_uuid}'].render = function () {{
                (function (graph, container, width, height) {{
                    {js}
                }})(
                    neo4jrestclient['{d3_uuid}'].graph,
                    neo4jrestclient['{d3_uuid}'].container,
                    {width},
                    {height}
                );
            }}
            if (!window.d3) {{
                $.getScript(
                    "//d3js.org/d3.v2.js?2.9.1",
                    neo4jrestclient['{d3_uuid}'].render
                );
            }} else {{
                neo4jrestclient['{d3_uuid}'].render();
            }}
        </script>
        """.format(
            style=style,
            js=js,
            d3_graph=json.dumps(d3_graph),
            d3_id=d3_id,
            d3_uuid=d3_uuid,
            d3_title=d3_title,
            d3_container_id=d3_container_id,
            width=width,
            height=height,
        ))

Example 93

Project: pyvac Source File: request.py
    def render(self):
        try:
            form_date_from = self.request.params.get('date_from')
            if ' - ' not in form_date_from:
                msg = 'Invalid format for period.'
                self.request.session.flash('error;%s' % msg)
                return HTTPFound(location=route_url('home', self.request))

            dates = self.request.params.get('date_from').split(' - ')
            date_from = datetime.strptime(dates[0], '%d/%m/%Y')
            date_to = datetime.strptime(dates[1], '%d/%m/%Y')
            breakdown = self.request.params.get('breakdown')

            # retrieve holidays for user so we can remove them from selection
            holidays = get_holiday(self.user, year=date_from.year,
                                   use_datetime=True)

            submitted = [d for d in daterange(date_from, date_to)
                         if d.isoweekday() not in [6, 7]
                         and d not in holidays]
            days = float(len(submitted))
            pool = None

            days_diff = (date_to - date_from).days
            if days_diff < 0:
                msg = 'Invalid format for period.'
                self.request.session.flash('error;%s' % msg)
                return HTTPFound(location=route_url('home', self.request))

            if (date_to == date_from) and days > 1:
                # same day, asking only for one or less day duration
                msg = 'Invalid value for days.'
                self.request.session.flash('error;%s' % msg)
                return HTTPFound(location=route_url('home', self.request))

            if days <= 0:
                msg = 'Invalid value for days.'
                self.request.session.flash('error;%s' % msg)
                return HTTPFound(location=route_url('home', self.request))

            # check if user is sudoed
            check_user = self.get_target_user(self.user)
            # retrieve future requests for user so we can check overlap
            futures = [d for req in
                       Request.by_user_future(self.session, check_user)
                       for d in daterange(req.date_from, req.date_to)]
            intersect = set(futures) & set(submitted)
            if intersect:
                err_intersect = True
                # must check for false warning in case of half day requests
                if len(intersect) == 1:
                    # only one date in conflict, check if it's for an half-day
                    dt = intersect.pop()
                    # retrieve the request for this date
                    req = [req for req in
                           Request.by_user_future(self.session, check_user)
                           for d in daterange(req.date_from, req.date_to)
                           if d == dt]
                    if len(req) < 2:
                        req = req.pop()
                        if req.label != breakdown:
                            # intersect is false, it's not the same halfday
                            err_intersect = False
                            log.debug('False positive on intersect '
                                      'for %s (%s): request: %d (%s)' %
                                      (date_from, breakdown, req.id,
                                       req.label))

                if err_intersect:
                    msg = 'Invalid period: days already requested.'
                    self.request.session.flash('error;%s' % msg)
                    return HTTPFound(location=route_url('home', self.request))

            vac_type = VacationType.by_id(self.session,
                                          int(self.request.params.get('type')))

            if not self.user.is_admin:
                # check if vacation requires user role
                if (vac_type.visibility
                        and self.user.role not in vac_type.visibility):
                    msg = 'You are not allowed to use type: %s' % vac_type.name
                    self.request.session.flash('error;%s' % msg)
                    return HTTPFound(location=route_url('home', self.request))

            # label field is used when requesting half day
            label = u''
            if breakdown != 'FULL':
                # handle half day
                if (days > 1):
                    msg = ('AM/PM option must be used only when requesting a '
                           'single day.')
                    self.request.session.flash('error;%s' % msg)
                    return HTTPFound(location=route_url('home', self.request))
                else:
                    days = 0.5
                    label = unicode(breakdown)

            # check RTT usage
            if vac_type.name == u'RTT':
                pool = rtt_data = check_user.get_rtt_usage(self.session)
                if rtt_data is not None and rtt_data['left'] <= 0:
                    msg = 'No RTT left to take.'
                    self.request.session.flash('error;%s' % msg)
                    return HTTPFound(location=route_url('home', self.request))
                # check that we have enough RTT to take
                if rtt_data is not None and days > rtt_data['left']:
                    msg = 'You only have %s RTT to use.' % rtt_data['left']
                    self.request.session.flash('error;%s' % msg)
                    return HTTPFound(location=route_url('home', self.request))
                # check that we request vacations in the allowed year
                if rtt_data is not None and (
                        date_from.year != rtt_data['year'] or
                        date_to.year != rtt_data['year']):
                    msg = ('RTT can only be used for year %d.' %
                           rtt_data['year'])
                    self.request.session.flash('error;%s' % msg)
                    return HTTPFound(location=route_url('home', self.request))

            message = None
            # check Exceptionnel mandatory field
            if vac_type.name == u'Exceptionnel':
                message = self.request.params.get('exception_text')
                message = message.strip() if message else message
                if not message:
                    msg = ('You must provide a reason for %s requests' %
                           vac_type.name)
                    self.request.session.flash('error;%s' % msg)
                    return HTTPFound(location=route_url('home', self.request))
                # check size
                if len(message) > 140:
                    msg = ('%s reason must not exceed 140 characters' %
                           vac_type.name)
                    self.request.session.flash('error;%s' % msg)
                    return HTTPFound(location=route_url('home', self.request))

            # check Récupération reason field
            if vac_type.name == u'Récupération':
                message = self.request.params.get('exception_text')
                message = message.strip() if message else message
                # check size
                if message and len(message) > 140:
                    msg = ('%s reason must not exceed 140 characters' %
                           vac_type.name)
                    self.request.session.flash('error;%s' % msg)
                    return HTTPFound(location=route_url('home', self.request))

            # check CP usage
            if vac_type.name == u'CP':
                cp_class = check_user.get_cp_class(self.session)
                pool = check_user.get_cp_usage(self.session)

                if cp_class:
                    # only FR and LU have a dedicated CP class to use

                    # convert days to hours for LU if needed
                    days = cp_class.convert_days(days)

                    error = cp_class.validate_request(check_user, pool, days,
                                                      date_from, date_to)
                    if error is not None:
                        self.request.session.flash('error;%s' % error)
                        return HTTPFound(location=route_url('home',
                                                            self.request))

                if pool:
                    # remove expire datetimes as it's not json serializable
                    if 'n_1' in pool:
                        pool['n_1'].pop('expire', None)
                    pool['acquis'].pop('expire', None)
                    pool['restant'].pop('expire', None)

            # create the request
            # default values
            target_status = u'PENDING'
            target_user = self.user
            target_notified = False

            sudo_use = False
            if self.user.is_admin:
                sudo_user_id = int(self.request.params.get('sudo_user'))
                if sudo_user_id != -1:
                    user = User.by_id(self.session, sudo_user_id)
                    if user:
                        sudo_use = True
                        target_user = user
                        target_status = u'APPROVED_ADMIN'
                        target_notified = True

            # save pool status when making the request
            if pool:
                pool_status = json.dumps(pool)
            else:
                pool_status = json.dumps({})

            request = Request(date_from=date_from,
                              date_to=date_to,
                              days=days,
                              vacation_type=vac_type,
                              status=target_status,
                              user=target_user,
                              notified=target_notified,
                              label=label,
                              message=message,
                              pool_status=pool_status,
                              )
            self.session.add(request)
            self.session.flush()

            if request and not sudo_use:
                msg = 'Request sent to your manager.'
                self.request.session.flash('info;%s' % msg)
                # call celery task directly, do not wait for polling
                from celery.registry import tasks
                from celery.task import subtask
                req_task = tasks['worker_pending']
                data = {'req_id': request.id}
                subtask(req_task).apply_async(kwargs={'data': data},
                                              countdown=5)
                log.info('scheduling task worker_pending for %s' % data)

            if request and sudo_use:
                settings = self.request.registry.settings
                if 'pyvac.celery.yaml' in settings:
                    with open(settings['pyvac.celery.yaml']) as fdesc:
                        Conf = yaml.load(fdesc, YAMLLoader)
                    caldav_url = Conf.get('caldav').get('url')
                    request.add_to_cal(caldav_url)
                    msg = 'Request added to calendar and DB.'
                    self.request.session.flash('info;%s' % msg)

        except Exception as exc:
            log.error(exc)
            msg = ('An error has occured while processing this request: %r'
                   % exc)
            self.request.session.flash('error;%s' % msg)

        return HTTPFound(location=route_url('home', self.request))

Example 94

Project: exocortex-halo Source File: server.py
Function: do_put
    def do_PUT(self):
        content = ""
        content_length = 0
        arguments = {}
        response = ""
        sentences = []
        all_keys_found = True

        # Variables that hold data from the database accesses.
        row = ""
        name = ""
        bots_api_key = ""
        learn = ""

        if self.path == "/learn":
            logger.info("A client has contacted the /learn API rail.")

            # For debugging purposes, dump the headers the server gets from
            # the client.
            logging.debug("List of headers in the HTTP request:")
            for key in self.headers:
                logging.debug("    " + key + " - " + self.headers[key])

            # Read any content sent from the client.  If there is no
            # "Content-Length" header, something screwy is happening, in which
            # case we fire an error.
            content = self._read_content()
            if not content:
                logger.debug("Client sent zero-lenth content.")
                return

            # Ensure that the client sent JSON and not something else.
            if not self._ensure_json():
                return

            # Try to deserialize the JSON sent from the client.  If we can't,
            # pitch a fit.
            arguments = self._deserialize_content(content)
            if not arguments:
                return

            # Normalize the keys in the JSON to lowercase.
            arguments = self._normalize_keys(arguments)

            # Ensure that all of the required keys are in the JSON docuement.
            if not self._ensure_all_keys(arguments):
                return

            # See if the bot is in the database.
            row = self._bot_in_database(arguments['botname'],
                arguments['apikey'])
            if not row:
                self._send_http_response(404, '{"response": "Bot not found.", "id": 404}')
                return

            # Take apart the response.  This is a little messy but necessary
            # to get at the very end of the tuple.
            (name, bots_api_key, learn) = row[0]

            # If the bot does not have permission to teach the Markov brain,
            # send back an error.  Again, this is a little messy but it's
            # easier to get to a Bool to work with than it is playing with
            # identifying single letters.  And I'm sick while I'm writing this.
            learn = learn.lower()
            if learn == 'n':
                learn = False
            if not learn :
                logger.info("Bot does not have permission to update the Markov brain.")
                self._send_http_response(401, '{"response": "Bot does not have permission to update the brain.", "id": 401}')
                return

            # Run the text through the Markov brain to update it and return a
            # success message.
            sentence_ends = re.compile('[.!?]')
            sentences = sentence_ends.split(arguments['stimulus'])

            # Get rid of the spurious entry at the end of the array...
            sentences.pop()
            logger.debug("List of sentences to learn from: " + str(sentences))

            # Run the sentences through the markov brain.
            if not len(sentences):
                logger.info("No sentences to update the Markov brain.")
                self._send_http_response(400, '{"response": "failed", "id": 400}')
                return
            for i in sentences:
                response = brain.learn(i)
            logger.info("Bot has updated the Markov brain.")

            temp = {}
            temp['response'] = response
            temp['id'] = 200
            logger.debug("Sending back to client: " + str(temp))

            self._send_http_response(200, json.dumps(temp))
            return

        if self.path == "/register":
            logger.info("A client has contacted the /register API rail.  This makes things somewhat interesting.")

            # For debugging purposes, dump the headers the server gets from
            # the client.
            logging.debug("List of headers in the HTTP request:")
            for key in self.headers:
                logging.debug("    " + key + " - " + self.headers[key])

            # Read any content sent from the client.  If there is no
            # "Content-Length" header, something screwy is happening, in which
            # case we fire an error.
            content = self._read_content()
            if not content:
                logger.debug("Client sent zero-lenth content.")
                return

            # Ensure that the client sent JSON and not something else.
            if not self._ensure_json():
                return

            # Try to deserialize the JSON sent from the client.  If we can't,
            # pitch a fit.
            arguments = self._deserialize_content(content)
            if not arguments:
                return

            # Ensure that the management API key was sent in an HTTP header.
            # If it wasn't, abort.
            if "x-api-key" not in self.headers.keys():
                logger.info("User tried to /register a bot but didn't include the management API key.")
                self._send_http_response(401, '{"result": null, "error": "Management API key not included.", "id": 401}')
                return

            # Check the included management API key against the one in the
            # server's config file.
            if self.headers['x-api-key'] != apikey:
                logger.info("User tried to /register a bot with an incorrect management API key.")
                self._send_http_response(401, '{"result": null, "error": "Incorrect management API key.", "id": 401}')
                return

            # Normalize the keys in the JSON to lowercase.
            arguments = self._normalize_keys(arguments)

            # Ensure that all of the required keys are in the JSON docuement.
            if not self._ensure_all_keys(arguments):
                return

            # There are additional JSON keys that have to be present for this
            # API rail.  This can probably be split out into a separate helper
            # method later.
            if "respond" not in arguments.keys():
                all_keys_found = False
            if "learn" not in arguments.keys():
                all_keys_found = False
            if not all_keys_found:
                logger.debug('{"result": null, "error": "All required keys were not found in the JSON docuement.  Look at the online help.", "id": 400}')
                self._send_http_response(400, '{"result": null, "error": "All required keys were not found in the JSON docuement.  Look at the online help.", "id": 400}')
                return

            # Ensure that the values of the respond and learn keys are either
            # Y or N.  Start by normalizing the values before testing them.
            valid_responses = ['Y', 'N']
            arguments['respond'] = arguments['respond'].upper()
            if arguments['respond'] not in valid_responses:
                self._send_http_response(400, '{"result": null, "error": "The only valid values for respond are Y or N.", "id": 400}')
                return

            arguments['learn'] = arguments['learn'].upper()
            if arguments['learn'] not in valid_responses:
                self._send_http_response(400, '{"result": null, "error": "The only valid values for learn are Y or N.", "id": 400}')
                return

            # See if the bot is in the database already.  Send back an error
            # 409 (Conflict) if it is.
            row = self._bot_in_database(arguments['botname'],
                arguments['apikey'])
            if row:
                logger.info("Bot already in database.")
                self._send_http_response(409, '{"response": "Bot already in database.", "id": 409}')
                return

            # Add the bot to the database.
            if self._add_bot_to_database(arguments['botname'],
                arguments['apikey'], arguments['respond'], arguments['learn']):
                self._send_http_response(200, '{"response": "success", "id": 200}')
            else:
                self._send_http_response(400, '{"response": "failure", "id": 400}')
            return

        if self.path == "/deregister":
            logger.info("A client has contacted the /deregister API rail.  This makes things somewhat interesting.")

            # For debugging purposes, dump the headers the server gets from
            # the client.
            logging.debug("List of headers in the HTTP request:")
            for key in self.headers:
                logging.debug("    " + key + " - " + self.headers[key])

            # Read any content sent from the client.  If there is no
            # "Content-Length" header, something screwy is happening, in which
            # case we fire an error.
            content = self._read_content()
            if not content:
                logger.debug("Client sent zero-lenth content.")
                return

            # Ensure that the client sent JSON and not something else.
            if not self._ensure_json():
                return

            # Try to deserialize the JSON sent from the client.  If we can't,
            # pitch a fit.
            arguments = self._deserialize_content(content)
            if not arguments:
                return

            # Ensure that the management API key was sent in an HTTP header.
            # If it wasn't, abort.
            if "x-api-key" not in self.headers.keys():
                logger.info("User tried to /deregister a bot but didn't include the management API key.")
                self._send_http_response(401, '{"result": null, "error": "No management API key.", "id": 401}')
                return

            # Check the included management API key against the one in the
            # server's config file.
            if self.headers['x-api-key'] != apikey:
                logger.info("User tried to /deregister a bot with an incorrect management API key.")
                self._send_http_response(401, '{"result": null, "error": "Incorrect management API key.", "id": 401}')
                return

            # Normalize the keys in the JSON to lowercase.
            arguments = self._normalize_keys(arguments)

            # Ensure that all of the required keys are in the JSON docuement.
            if not self._ensure_all_keys(arguments):
                logger.debug('{"result": null, "error": "All required keys were not found in the JSON docuement.  Look at the online help.", "id": 400}')
                self._send_http_response(400, '{"result": null, "error": "All required keys were not found in the JSON docuement.  Look at the online help.", "id": 400}')
                return

            # See if the bot is not in the database.  Send back an error (404
            # (Not Found) if it's not.
            row = self._bot_in_database(arguments['botname'],
                arguments['apikey'])
            if not row:
                logger.info("Bot does not exist in database.")
                self._send_http_response(404, '{"response": "failure", "id": 404}')
                return

            # Delete the bot from the database.
            if self._delete_bot_from_database(arguments['botname'],
                arguments['apikey']):
                self._send_http_response(200, '{"response": "success", "id": 200}')
            else:
                self._send_http_response(404, '{"response": "failure", "id": 404}')
            return

        # If we've fallen through to here, bounce.
        return

Example 95

Project: webrecorder Source File: logincontroller.py
    def init_routes(self):
        # Login/Logout
        # ============================================================================
        @self.app.get(LOGIN_PATH)
        @self.jinja2_view('login.html')
        def login():
            self.redirect_home_if_logged_in()
            resp = {}
            self.fill_anon_info(resp)
            return resp

        @self.app.get(LOGIN_MODAL_PATH)
        @self.jinja2_view('login_modal.html')
        def login_modal():
            #self.redirect_home_if_logged_in()
            resp = {}
            self.fill_anon_info(resp)
            return resp

        @self.app.post(LOGIN_PATH)
        def login_post():
            self.redirect_home_if_logged_in()

            """Authenticate users"""
            username = self.post_get('username')
            password = self.post_get('password')

            try:
                move_info = self.get_move_temp_info()
            except ValidationException as ve:
                self.flash_message('Login Failed: ' + str(ve))
                self.redirect('/')
                return

            # if a collection is being moved, auth user
            # and then check for available space
            # if not enough space, don't continue with login
            if move_info and (self.manager.cork.
                              is_authenticate(username, password)):

                if not self.manager.has_space_for_new_coll(username,
                                                           move_info['from_user'],
                                                           'temp'):
                    self.flash_message('Sorry, not enough space to import this Temporary Collection into your account.')
                    self.redirect('/')
                    return

            if not self.manager.cork.login(username, password):
                self.flash_message('Invalid Login. Please Try Again')
                redir_to = LOGIN_PATH
                self.redirect(redir_to)

            sesh = self.get_session()
            sesh.curr_user = username

            if move_info:
                try:
                    new_title = self.manager.move_temp_coll(username, move_info)
                    if new_title:
                        self.flash_message('Collection <b>{0}</b> created!'.format(new_title), 'success')
                except:
                    import traceback
                    traceback.print_exc()

            remember_me = (self.post_get('remember_me') == '1')
            sesh.logged_in(remember_me)

            temp_prefix = self.manager.temp_prefix

            redir_to = request.headers.get('Referer')
            host = self.get_host()

            if redir_to and redir_to.startswith(host):
                redir_to = redir_to[len(host):]

            if not redir_to or redir_to.startswith(('/' + temp_prefix,
                                                    '/_')):
                redir_to = self.get_path(username)

            if self.content_host:
                path = '/_clear_session?path=' + quote(redir_to)
                self.redir_host(self.content_host, path)
            else:
                self.redirect(redir_to)

        @self.app.get(LOGOUT_PATH)
        def logout():
            redir_to = '/'

            if self.content_host:
                path = '/_clear_session?path=' + quote(redir_to)
                url = request.environ['wsgi.url_scheme'] + '://' + self.content_host
                url += path
                redir_to = url

            self.manager.cork.logout(success_redirect=redir_to, fail_redirect=redir_to)


        # Register/Invite/Confirm
        # ============================================================================
        @self.app.get(REGISTER_PATH)
        @self.jinja2_view('register.html')
        def register():
            self.redirect_home_if_logged_in()

            if not self.invites_enabled:
                resp = {'email': '',
                        'skip_invite': True}

                self.fill_anon_info(resp)

                return resp

            invitecode = request.query.getunicode('invite', '')
            email = ''

            try:
                email = self.manager.is_valid_invite(invitecode)
            except ValidationException as ve:
                self.flash_message(str(ve))

            return { 'email': email,
                     'invite': invitecode}

        @self.app.post(INVITE_PATH)
        def invite_post():
            self.redirect_home_if_logged_in()

            email = self.post_get('email')
            name = self.post_get('name')
            desc = self.post_get('desc')
            if self.manager.save_invite(email, name, desc):
                self.flash_message('Thank you for your interest! We will send you an invite to try webrecorder.io soon!', 'success')
                self.redirect('/')
            else:
                self.flash_message('Oops, something went wrong, please try again')
                self.redirect(REGISTER_PATH)


        @self.app.post(REGISTER_PATH)
        def register_post():
            self.redirect_home_if_logged_in()

            email = self.post_get('email')
            username = self.post_get('username')
            password = self.post_get('password')
            name = self.post_get('name')
            confirm_password = self.post_get('confirmpassword')
            invitecode = self.post_get('invite')

            redir_to = REGISTER_PATH

            if username.startswith(self.manager.temp_prefix):
                self.flash_message('Sorry, this is not a valid username')
                self.redirect(redir_to)
                return

            try:
                move_info = self.get_move_temp_info()
            except ValidationException as ve:
                self.flash_message('Registration Failed: ' + str(ve))
                self.redirect('/')
                return

            if self.invites_enabled:
                try:
                    val_email = self.manager.is_valid_invite(invitecode)
                    if val_email != email:
                        raise ValidationException('Sorry, this invite can only be used with email: {0}'.format(val_email))
                except ValidationException as ve:
                    self.flash_message(str(ve))
                    self.redirect(redir_to)
                    return

                redir_to += '?invite=' + invitecode

            try:
                self.manager.validate_user(username, email)
                self.manager.validate_password(password, confirm_password)

                #TODO: set default host?
                host = self.get_host()

                desc = {'name': name}

                if move_info:
                    desc['move_info'] = move_info

                desc = json.dumps(desc)

                self.manager.cork.register(username, password, email, role='archivist',
                              max_level=50,
                              subject='webrecorder.io Account Creation',
                              email_template='templates/emailconfirm.html',
                              description=desc,
                              host=host)

                self.flash_message('A confirmation e-mail has been sent to <b>{0}</b>. \
    Please check your e-mail to complete the registration!'.format(username), 'warning')

                redir_to = '/'
                if self.invites_enabled:
                    self.manager.delete_invite(email)

            except ValidationException as ve:
                self.flash_message(str(ve))

            except Exception as ex:
                self.flash_message('Registration failed: ' + str(ex))

            self.redirect(redir_to)

        # Validate Registration
        @self.app.get(VAL_REG_PATH)
        def val_reg(reg):
            self.redirect_home_if_logged_in()

            try:
                username, first_coll = self.manager.create_user(reg)

                #self.flash_message('<b>{0}</b>, welcome to your new archive home page! \
    #Click the <b>Create New Collection</b> button to create your first collection. Happy Archiving!'.format(username), 'success')
                #redir_to = '/' + username

                msg = '<b>{0}</b>, you are now logged in!'

                if first_coll == 'Default Collection':
                    msg += ' The <b>{1}</b> collection has been created for you, and you can begin recording by entering a url below!'
                else:
                    msg += ' The <b>{1}</b> collection has been permanently saved for you, and you can continue recording by entering a url below!'

                self.flash_message(msg.format(username, first_coll), 'success')
                redir_to = '/'

            except ValidationException:
                self.flash_message('The user <b>{0}</b> is already registered. \
    If this is you, please login or click forgot password, \
    or register a new account.'.format(username))
                redir_to = LOGIN_PATH

            except Exception as e:
                import traceback
                traceback.print_exc()
                self.flash_message('Sorry, this is not a valid registration code. Please try again.')
                redir_to = REGISTER_PATH

            self.redirect(redir_to)


        # Forgot Password
        # ============================================================================
        @self.app.get(FORGOT_PATH)
        @self.jinja2_view('forgot.html')
        def forgot():
            self.redirect_home_if_logged_in()
            return {}


        @self.app.post(FORGOT_PATH)
        def forgot_submit():
            self.redirect_home_if_logged_in()

            email = self.post_get('email')
            username = self.post_get('username')
            host = self.get_host()

            try:
                self.manager.cork.send_password_reset_email(username=username,
                                          email_addr=email,
                                          subject='webrecorder.io password reset confirmation',
                                          email_template='templates/emailreset.html',
                                          host=host)

                self.flash_message('A password reset e-mail has been sent to your e-mail!', 'success')
                redir_to = '/'
            except Exception as e:
                self.flash_message(str(e))
                redir_to = FORGOT_PATH

            self.redirect(redir_to)


        # Reset Password
        # ============================================================================
        @self.app.get(RESET_PATH)
        @self.jinja2_view('reset.html')
        def resetpass(resetcode):
            self.redirect_home_if_logged_in()

            try:
                username = request.query['username']
                result = {'username': username,
                          'resetcode': resetcode}

            except Exception as e:
                print(e)
                self.flash_message('Invalid password reset attempt. Please try again')
                self.redirect(FORGOT_PATH)

            return result


        @self.app.post(RESET_POST)
        def do_reset():
            self.redirect_home_if_logged_in()

            username = self.post_get('username')
            resetcode = self.post_get('resetcode')
            password = self.post_get('password')
            confirm_password = self.post_get('confirmpassword')

            try:
                self.manager.validate_password(password, confirm_password)

                self.manager.cork.reset_password(resetcode, password)

                self.flash_message('Your password has been successfully reset! \
    You can now <b>login</b> with your new password!', 'success')

                redir_to = LOGIN_PATH

            except ValidationException as ve:
                self.flash_message(str(ve))
                redir_to = RESET_PATH_FILL.format(resetcode, username)

            except Exception as e:
                self.flash_message('Invalid password reset attempt. Please try again')
                redir_to = FORGOT_PATH

            self.redirect(redir_to)


        # Update Password
        @self.app.post(UPDATE_PASS_PATH)
        def update_password():
            self.redirect_home_if_logged_in()

            self.manager.cork.require(role='archivist', fail_redirect=LOGIN_PATH)

            curr_password = self.post_get('curr_password')
            password = self.post_get('password')
            confirm_password = self.post_get('confirmpassword')

            try:
                self.manager.update_password(curr_password, password, confirm_password)
                self.flash_message('Password Updated', 'success')
            except ValidationException as ve:
                self.flash_message(str(ve))

            user = self.manager.get_curr_user()
            self.redirect(self.get_path(user) + SETTINGS)

Example 96

Project: pecan Source File: test_rest.py
    def test_basic_rest(self):

        class OthersController(object):

            @expose()
            def index(self):
                return 'OTHERS'

            @expose()
            def echo(self, value):
                return str(value)

        class ThingsController(RestController):
            data = ['zero', 'one', 'two', 'three']

            _custom_actions = {'count': ['GET'], 'length': ['GET', 'POST']}

            others = OthersController()

            @expose()
            def get_one(self, id):
                return self.data[int(id)]

            @expose('json')
            def get_all(self):
                return dict(items=self.data)

            @expose()
            def length(self, id, value=None):
                length = len(self.data[int(id)])
                if value:
                    length += len(value)
                return str(length)

            @expose()
            def get_count(self):
                return str(len(self.data))

            @expose()
            def new(self):
                return 'NEW'

            @expose()
            def post(self, value):
                self.data.append(value)
                response.status = 302
                return 'CREATED'

            @expose()
            def edit(self, id):
                return 'EDIT %s' % self.data[int(id)]

            @expose()
            def put(self, id, value):
                self.data[int(id)] = value
                return 'UPDATED'

            @expose()
            def get_delete(self, id):
                return 'DELETE %s' % self.data[int(id)]

            @expose()
            def delete(self, id):
                del self.data[int(id)]
                return 'DELETED'

            @expose()
            def reset(self):
                return 'RESET'

            @expose()
            def post_options(self):
                return 'OPTIONS'

            @expose()
            def options(self):
                abort(500)

            @expose()
            def other(self):
                abort(500)

        class RootController(object):
            things = ThingsController()

        # create the app
        app = TestApp(make_app(RootController()))

        # test get_all
        r = app.get('/things')
        assert r.status_int == 200
        assert r.body == b_(dumps(dict(items=ThingsController.data)))

        # test get_one
        for i, value in enumerate(ThingsController.data):
            r = app.get('/things/%d' % i)
            assert r.status_int == 200
            assert r.body == b_(value)

        # test post
        r = app.post('/things', {'value': 'four'})
        assert r.status_int == 302
        assert r.body == b_('CREATED')

        # make sure it works
        r = app.get('/things/4')
        assert r.status_int == 200
        assert r.body == b_('four')

        # test edit
        r = app.get('/things/3/edit')
        assert r.status_int == 200
        assert r.body == b_('EDIT three')

        # test put
        r = app.put('/things/4', {'value': 'FOUR'})
        assert r.status_int == 200
        assert r.body == b_('UPDATED')

        # make sure it works
        r = app.get('/things/4')
        assert r.status_int == 200
        assert r.body == b_('FOUR')

        # test put with _method parameter and GET
        r = app.get('/things/4?_method=put', {'value': 'FOUR!'}, status=405)
        assert r.status_int == 405

        # make sure it works
        r = app.get('/things/4')
        assert r.status_int == 200
        assert r.body == b_('FOUR')

        # test put with _method parameter and POST
        r = app.post('/things/4?_method=put', {'value': 'FOUR!'})
        assert r.status_int == 200
        assert r.body == b_('UPDATED')

        # make sure it works
        r = app.get('/things/4')
        assert r.status_int == 200
        assert r.body == b_('FOUR!')

        # test get delete
        r = app.get('/things/4/delete')
        assert r.status_int == 200
        assert r.body == b_('DELETE FOUR!')

        # test delete
        r = app.delete('/things/4')
        assert r.status_int == 200
        assert r.body == b_('DELETED')

        # make sure it works
        r = app.get('/things')
        assert r.status_int == 200
        assert len(loads(r.body.decode())['items']) == 4

        # test delete with _method parameter and GET
        r = app.get('/things/3?_method=DELETE', status=405)
        assert r.status_int == 405

        # make sure it works
        r = app.get('/things')
        assert r.status_int == 200
        assert len(loads(r.body.decode())['items']) == 4

        # test delete with _method parameter and POST
        r = app.post('/things/3?_method=DELETE')
        assert r.status_int == 200
        assert r.body == b_('DELETED')

        # make sure it works
        r = app.get('/things')
        assert r.status_int == 200
        assert len(loads(r.body.decode())['items']) == 3

        # test "RESET" custom action
        r = app.request('/things', method='RESET')
        assert r.status_int == 200
        assert r.body == b_('RESET')

        # test "RESET" custom action with _method parameter
        r = app.get('/things?_method=RESET')
        assert r.status_int == 200
        assert r.body == b_('RESET')

        # test the "OPTIONS" custom action
        r = app.request('/things', method='OPTIONS')
        assert r.status_int == 200
        assert r.body == b_('OPTIONS')

        # test the "OPTIONS" custom action with the _method parameter
        r = app.post('/things', {'_method': 'OPTIONS'})
        assert r.status_int == 200
        assert r.body == b_('OPTIONS')

        # test the "other" custom action
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            r = app.request('/things/other', method='MISC', status=405)
            assert r.status_int == 405

        # test the "other" custom action with the _method parameter
        r = app.post('/things/other', {'_method': 'MISC'}, status=405)
        assert r.status_int == 405

        # test the "others" custom action
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            r = app.request('/things/others/', method='MISC')
            assert r.status_int == 200
            assert r.body == b_('OTHERS')

        # test the "others" custom action missing trailing slash
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            r = app.request('/things/others', method='MISC', status=302)
            assert r.status_int == 302

        # test the "others" custom action with the _method parameter
        r = app.get('/things/others/?_method=MISC')
        assert r.status_int == 200
        assert r.body == b_('OTHERS')

        # test an invalid custom action
        r = app.get('/things?_method=BAD', status=405)
        assert r.status_int == 405

        # test custom "GET" request "count"
        r = app.get('/things/count')
        assert r.status_int == 200
        assert r.body == b_('3')

        # test custom "GET" request "length"
        r = app.get('/things/1/length')
        assert r.status_int == 200
        assert r.body == b_(str(len('one')))

        # test custom "GET" request through subcontroller
        r = app.get('/things/others/echo?value=test')
        assert r.status_int == 200
        assert r.body == b_('test')

        # test custom "POST" request "length"
        r = app.post('/things/1/length', {'value': 'test'})
        assert r.status_int == 200
        assert r.body == b_(str(len('onetest')))

        # test custom "POST" request through subcontroller
        r = app.post('/things/others/echo', {'value': 'test'})
        assert r.status_int == 200
        assert r.body == b_('test')

Example 97

Project: vulnsrv Source File: vulnsrv.py
    def do_GET(self):
        reqp = _urlparse(self.path)
        try:
            getParams = query2dict(reqp.query.encode('ascii'))
        except ValueError:
            _type, e, _traceback = sys.exc_info()
            self.send_error(400, 'Invalid query format: ' + str(e))
            return
        sessionID = self._getSessionID()

        if reqp.path == '/':
            self._writeHtmlDoc(_uc('''
<ol class="mainMenu">
<li><a href="clientauth/">Client-Side Authorization Check</a></li>
<li><a href="mac/">MAC Length Extension</a></li>
<li><a href="csrf/">Cross-Site Request Forgery (CSRF)</a></li>
<li><a href="reflected_xss/?username=Benutzer%21">Reflected Cross-Site Scripting (XSS)</a></li>
<li><a href="stored_xss/?username=Benutzer%21">Stored Cross-Site Scripting (XSS)</a></li>
<li><a href="sqlinjection/">SQL Injection</a></li>
<li><a href="pathtraversal/">Path Traversal</a></li>
</ol>'''), 'vulnsrv', sessionID)
        elif reqp.path == '/clientauth/':
            js_code = html.escape('if (\'you\' != \'admin\') {alert(\'Zugriff verweigert!\'); return false;} else return true;', True)
            self._writeHtmlDoc(
                _uc('''
    <p>Finden Sie das Geheimnis heraus!</p>

    <form action="secret" method="post">
    <input type="submit" value="Geheimnis herausfinden"
    onclick="%s" />
    %s
    </form>
    ''') % (js_code, self._getCsrfTokenField(sessionID)),
                'Client-Side Authorization Check', sessionID)
        elif reqp.path == '/csrf/':
            self._writeHtmlDoc(
                _uc('''
<p>Mit dem untenstehendem Formular k&ouml;nnen Sie Nachrichten schreiben.
Erstellen Sie eine HTML-Datei <code>evil-csrf.html</code>, bei deren Aufruf der arglose Benutzer hier unfreiwillig eine &uuml;belgesinnte Nachricht hinterl&auml;sst.
</p>

<form action="send" enctype="application/x-www-form-urlencoded" method="post">
<input type="text" name="message" autofocus="autofocus" required="required" placeholder="Eine freundliche Nachricht" size="50" />
<input type="submit" value="Senden" />
</form>
''') + msgsToHtml(self.vulnState.csrfMessages), 'CSRF', sessionID)
        elif reqp.path == '/reflected_xss/':
            username = getParams.get('username', 'Unbekannter')
            self._writeHtmlDoc(_uc(
                '''<div>Hallo %s</div>
<p>Das untenstehende Formular ist gegen Cross-Site Request Forgery gesch&uuml;tzt.
Erstellen Sie eine HTML-Datei <code>evil-reflected-xss.html</code>, bei deren Aufruf der arglose Benutzer hier trotzdem unfreiwillig eine &uuml;belgesinnte Nachricht hinterl&auml;sst.
</p>

<form action="send" enctype="application/x-www-form-urlencoded" method="post">
<input type="text" name="message" autofocus="autofocus" required="required" placeholder="Eine freundliche Nachricht" size="50" />
%s
<input type="submit" value="Senden" />
</form>
''') % (_uc(username), self._getCsrfTokenField(sessionID)) + msgsToHtml(self.vulnState.reflected_xss_messages), 'Reflected XSS', sessionID)
        elif reqp.path == '/stored_xss/':
            self._writeHtmlDoc(_uc(
                '''<div>Hallo <span class="userid">%s</span></div>
<p>Das untenstehende Formular ist gegen Cross-Site Request Forgery gesch&uuml;tzt.
Sorgen Sie daf&uuml;r, dass jeder Benutzer der diese Seite aufruft unfreiwillig eine Nachricht hinterl&auml;sst, die IP und Port des Benutzers beinhaltet.
</p>

<form action="send" enctype="application/x-www-form-urlencoded" method="post">
<input type="text" name="message" autocomplete="off" autofocus="autofocus" required="required" placeholder="Eine freundliche Nachricht" size="50" />
%s
<input type="submit" value="Senden" />
</form>
%s

<script>
function show(messages_json) {
    var messages = JSON.parse(messages_json);
    var list = docuement.querySelector('.messages');
    messages.forEach(function(m) {
        var li = docuement.createElement('li');
        li.appendChild(docuement.createTextNode(m));
        list.appendChild(li);
    });
}

function download() {
    var xhr = new XMLHttpRequest();
    xhr.dataType = 'text';
    xhr.onload = function(e) {
        show(xhr.responseText);
    };
    xhr.open('GET', 'json');
    xhr.send();
}

function send(msg) {
    var xhr = new XMLHttpRequest();
    var token = docuement.querySelector('input[name="csrfToken"]').value;
    var params = 'csrfToken=' + encodeURIComponent(token) + '&message=' +encodeURIComponent(msg);
    xhr.open('POST', 'send');
    xhr.setRequestHeader('Content-type', 'application/x-www-form-urlencoded');
    xhr.send(params);

}

function user() {
    return docuement.querySelector('.userid').textContent;
}
</script>

<script>
// JSON direkt einbinden
var messages_json = '%s';
show(messages_json);

// Vorheriger Code:
// download();

</script>

<form action="clear" enctype="application/x-www-form-urlencoded" method="post">
%s
<button role="submit">Alle Nachrichten l&ouml;schen</button
</form>

''') % (_uc(':').join(map(_uc, self.client_address)), self._getCsrfTokenField(sessionID), msgsToHtml([]), json.dumps(self.vulnState.stored_xss_messages), self._getCsrfTokenField(sessionID)), 'Stored XSS', sessionID)
        elif reqp.path == '/sqlinjection/':
            webMessages = self.vulnState.sqlQuery("SELECT id,msg FROM messages WHERE user='web'")
            self._writeHtmlDoc(_uc('''
<p>In der untenstehenden Tabelle sehen Sie die Nachrichten an den Benutzer <code>web</code>. Welche Nachrichten hat der Benutzer <code>admin</code> bekommen?</p>

<h2>Nachrichten an <code>web</code></h2>

<ul class="messages">
%s
</ul>''') % '\n'.join('<li><a href="/sqlinjection/msg?id=' + html.escape(str(row[0])) + '">' + html.escape(row[1]) + '</a></li>' for row in webMessages), 'SQL Injection', sessionID)
        elif reqp.path == '/sqlinjection/msg':
            msgNum = getParams.get('id', '')
            sql = "SELECT id,user,msg FROM messages WHERE user='web' AND id='" + msgNum + "'"
            try:
                msgs = self.vulnState.sqlQuery(sql)
                if len(msgs) == 0:
                    msg_html = '<td colspan="3">Keine web-Nachrichten gefunden</td>'
                else:
                    msg_html = '\n'.join('<tr>' + ''.join('<td>' + html.escape(str(cell)) + '</td>' for cell in row) + '</tr>' for row in msgs)
            except:
                _type, e, _traceback = sys.exc_info()
                msg_html = '<td colspan="3" class="error">' + html.escape(str(e)) + '</td>'
            self._writeHtmlDoc(('''
<table class="messages">
<thead><tr><th>ID</th><th>Benutzer</th><th>Nachricht</th></tr></thead>
%s
</table>
<p><a href="/sqlinjection/">Zur&uuml;ck zur &Uuml;bersicht</a></p>
''' % msg_html), 'Detailansicht: Nachricht ' + msgNum, sessionID)
        elif reqp.path == '/pathtraversal/':
            fileHtml = _uc('').join(
                _uc('<li><a href="get?') + html.escape(urlencode([('file', fn)])) + _uc('">') + html.escape(fn) + _uc('</a></li>\n')
                for fn in FILES['/var/www/img']['content'])
            self._writeHtmlDoc(_uc('''
<p>Welchen Unix-Account sollte ein Angreifer n&auml;her untersuchen?</p>

<p><em>Bonus-Aufgabe</em>: Was ist das Passwort des Accounts?</p>

<p>Dateien zum Download:</p>

<ul>
%s
</ul>''' % fileHtml), 'Path Traversal', sessionID)
        elif reqp.path == '/pathtraversal/get':
            fn = '/var/www/img/' + getParams.get('file', '')
            # Resolve the path.
            # If we were using a real filesystem, this would be done automatically by the OS filesystem functions, of course
            curPath = []
            for pel in fn.split('/'):
                if pel == '' or pel == '.':
                    continue
                if pel == '..':
                    if len(curPath) > 0:
                        curPath.pop()
                    # else: We're at the root, and /../ is /
                else:
                    curPath.append(pel)
            finalPath = '/' + '/'.join(curPath)
            if finalPath.endswith('/'):
                finalPath = finalPath[:-1]
            if finalPath in FILES:
                fdata = FILES[finalPath]
                if fdata['type'] == '__directory__':
                    self.send_error(404, 'Is a directory')
                else:
                    fileBlob = base64.b64decode(fdata['blob_b64'].encode('ascii'))
                    self.send_response(200)
                    self.send_header('Content-Type', fdata['type'])
                    self.send_header('Content-Length', str(len(fileBlob)))
                    self.end_headers()
                    self.wfile.write(fileBlob)
            else:
                self.send_error(404)
        elif reqp.path == '/mac/':
            cookies = self._readCookies()
            raw_cookie = cookies.get('mac_session')
            if raw_cookie is not None:
                if isinstance(raw_cookie, compat_bytes):  # Python 2.x
                    raw_cookie = raw_cookie.decode('latin1')
                mac, _, session_data_str = raw_cookie.rpartition(_uc('!'))
                session_data = session_data_str.encode('latin1')
                secret = self.vulnState.macSecret
                if hashlib.sha256(secret + session_data).hexdigest() == mac:
                    session = query2dict(session_data)
                    user = session['user']
                    timestamp = session['time']
                else:
                    user = timestamp = _uc('(Falscher MAC)')
            else:
                raw_cookie = _uc('')
                user = timestamp = _uc('(Nicht gesetzt)')

            assert isinstance(raw_cookie, _uc)
            raw_cookie_hex = binascii.b2a_hex(raw_cookie.encode('utf-8')).decode('ascii')
            assert isinstance(raw_cookie_hex, _uc)
            self._writeHtmlDoc(_uc('''
<p>Loggen Sie sich als Benutzer admin ein (ohne das Geheimnis aus dem Server-Prozess auszulesen).
Schreiben Sie daf&#x00fc;r ein Programm, das den korrekten Cookie-Wert berechnet.</p>

<form method="post" action="login">
%s
<input type="submit" value="Gast-Login" />
</form>

<h3>Aktuelle Session-Daten:</h3>

<p>Cookie (roh): <code>%s</code> (%s Bytes)</p>

<dl>
<dt>Benutzername:</dt><dd>%s</dd>
<dt>Login-Zeit:</dt><dd>%s</dd>
</dl>

<p>F&#x00fc;r den Angriff k&#x00f6;nnen Sie <a href="mac_attack.py">dieses Python-Skript</a> verwenden.
Das Skript erwartet, dass im lokalen Verzeichnis eine ausf&#x00fc;hrbare Datei ./mac_extension liegt, die mit den Argumenten <code>[Bekannter Hash]</code> <code>[Bekannte Eingabe]</code> <code>[Einzuf&#x00fc;gende Daten]</code> <code>[L&#x00e4;nge des secrets in Bytes (32)]</code> aufgerufen werden kann und das exploit zur&#x00fc;ckgibt.
</p>
      ''' % (
                self._getCsrfTokenField(sessionID),
                html.escape(raw_cookie),
                html.escape(_uc(len(raw_cookie))),
                html.escape(user),
                html.escape(timestamp)
            )), 'Length Extension-Angriffe gegen MAC', sessionID)
        elif reqp.path == '/mac/mac_attack.py':
            fdata = FILES['/mac/mac_attack.py']
            fileBlob = base64.b64decode(fdata['blob_b64'].encode('ascii'))
            self.send_response(200)
            self.send_header('Content-Type', fdata['type'])
            self.send_header('Content-Length', str(len(fileBlob)))
            self.end_headers()
            self.wfile.write(fileBlob)
        elif reqp.path == '/favicon.ico':
            self.send_response(200)
            self.send_header('Content-Type', 'image/png')
            self.send_header('Content-Length', str(len(FAVICON)))
            self.end_headers()
            self.wfile.write(FAVICON)
        elif reqp.path == '/stored_xss/json':
            self._write_json(self.vulnState.stored_xss_messages)
        else:
            self.send_error(404)

Example 98

Project: atomic-reactor Source File: test_tag_and_push.py
@pytest.mark.parametrize("use_secret", [
    True,
    False,
])
@pytest.mark.parametrize(("image_name", "logs", "should_raise", "has_config"), [
    (TEST_IMAGE, PUSH_LOGS_1_X, False, False),
    (TEST_IMAGE, PUSH_LOGS_1_9, False, False),
    (TEST_IMAGE, PUSH_LOGS_1_10, False, True),
    (TEST_IMAGE, PUSH_LOGS_1_10_NOT_IN_STATUS, False, False),
    (DOCKER0_REGISTRY + '/' + TEST_IMAGE, PUSH_LOGS_1_X, True, False),
    (DOCKER0_REGISTRY + '/' + TEST_IMAGE, PUSH_LOGS_1_9, True, False),
    (DOCKER0_REGISTRY + '/' + TEST_IMAGE, PUSH_LOGS_1_10, True, True),
    (DOCKER0_REGISTRY + '/' + TEST_IMAGE, PUSH_LOGS_1_10_NOT_IN_STATUS, True, True),
    (TEST_IMAGE, PUSH_ERROR_LOGS, True, False),
])
def test_tag_and_push_plugin(
        tmpdir, monkeypatch, image_name, logs, should_raise, has_config, use_secret):

    if MOCK:
        mock_docker()
        flexmock(docker.Client, push=lambda iid, **kwargs: iter(logs),
                 login=lambda username, registry, dockercfg_path: {'Status': 'Login Succeeded'})

    tasker = DockerTasker()
    workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE)
    workflow.tag_conf.add_primary_image(image_name)
    setattr(workflow, 'builder', X)

    secret_path = None
    if use_secret:
        temp_dir = mkdtemp()
        with open(os.path.join(temp_dir, ".dockercfg"), "w+") as dockerconfig:
            dockerconfig_contents = {
                LOCALHOST_REGISTRY: {
                    "username": "user", "email": "[email protected]", "password": "mypassword"}}
            dockerconfig.write(json.dumps(dockerconfig_contents))
            dockerconfig.flush()
            secret_path = temp_dir

    CONFIG_DIGEST = 'sha256:2c782e3a93d34d89ea4cf54052768be117caed54803263dd1f3798ce42aac14e'
    media_type = 'application/vnd.docker.distribution.manifest.v2+json'

    response_config_json = {
        'config': {
            'digest': CONFIG_DIGEST,
            'mediaType': 'application/octet-stream',
            'size': 4132
        },
        'layers': [
            {
                'digest': 'sha256:16dc1f96e3a1bb628be2e00518fec2bb97bd5933859de592a00e2eb7774b6ecf',
                'mediaType': 'application/vnd.docker.image.rootfs.diff.tar.gzip',
                'size': 71907148
            },
            {
                'digest': 'sha256:cebc0565e1f096016765f55fde87a6f60fdb1208c0b5017e35a856ff578f5ccb',
                'mediaType': 'application/vnd.docker.image.rootfs.diff.tar.gzip',
                'size': 3945724
            }
        ],
        'mediaType': media_type,
        'schemaVersion': 2
    }

    response_json = {
        'config': {
            'Size': 12509448,
            'architecture': 'amd64',
            'author': 'Red Hat, Inc.',
            'config': {
                'Cmd': ['/bin/rsyslog.sh'],
                'Entrypoint': None,
                'Image': 'c3fb36aafd5692d2a45115d32bb120edb6edf6c0c3c783ed6592a8dab969fb88',
                'Labels': {
                    'Architecture': 'x86_64',
                    'Authoritative_Registry': 'registry.access.redhat.com',
                    'BZComponent': 'rsyslog-docker',
                    'Name': 'rhel7/rsyslog',
                    'Release': '28.vrutkovs.31',
                    'Vendor': 'Red Hat, Inc.',
                    'Version': '7.2',
                },
            },
            'created': '2016-10-07T10:20:05.38595Z',
            'docker_version': '1.9.1',
            'id': '1ca220fbc2aed7c141b236c8729fe59db5771b32bf2da74e4a663407f32ec2a2',
            'os': 'linux',
            'parent': '47eed7a8ea3f131e9281ae09fcbfb0041872fd8b74a048f1c739302c8148505d'
        },
        'container_config': {
            'foo': 'bar',
            'spam': 'maps'
        },
        'id': '1ca220fbc2aed7c141b236c8729fe59db5771b32bf2da74e4a663407f32ec2a2',
        'parent_id': 'c3fb36aafd5692d2a45115d32bb120edb6edf6c0c3c783ed6592a8dab969fb88'
    }

    if not has_config:
        response_json = None

    config_latest_url = "https://{}/v2/{}/manifests/latest".format(LOCALHOST_REGISTRY, TEST_IMAGE,)
    config_url = "https://{}/v2/{}/manifests/{}".format(LOCALHOST_REGISTRY, TEST_IMAGE, DIGEST_V2)
    blob_url = "https://{}/v2/{}/blobs/{}".format(
        LOCALHOST_REGISTRY, TEST_IMAGE, CONFIG_DIGEST)

    config_response_config_v1 = requests.Response()
    (flexmock(config_response_config_v1,
              raise_for_status=lambda: None,
              json=response_config_json,
              headers={
                'Content-Type': 'application/vnd.docker.distribution.manifest.v1+json',
                'Docker-Content-Digest': DIGEST_V1
              }
    ))

    config_response_config_v2 = requests.Response()
    (flexmock(config_response_config_v2,
              raise_for_status=lambda: None,
              json=response_config_json,
              headers={
                'Content-Type': 'application/vnd.docker.distribution.manifest.v2+json',
                'Docker-Content-Digest': DIGEST_V2
              }
    ))

    blob_config = requests.Response()
    (flexmock(blob_config, raise_for_status=lambda: None, json=response_json))

    def custom_get(url, headers, **kwargs):
        if url == config_latest_url:
            if headers['Accept'] == 'application/vnd.docker.distribution.manifest.v1+json':
                return config_response_config_v1

            if headers['Accept'] == 'application/vnd.docker.distribution.manifest.v2+json':
                return config_response_config_v2

        if url == config_url:
            return config_response_config_v2

        if url == blob_url:
            return blob_config

    (flexmock(requests)
        .should_receive('get')
        .replace_with(custom_get)
    )

    runner = PostBuildPluginsRunner(
        tasker,
        workflow,
        [{
            'name': TagAndPushPlugin.key,
            'args': {
                'registries': {
                    LOCALHOST_REGISTRY: {
                        'insecure': True,
                        'secret': secret_path
                    }
                }
            },
        }]
    )

    if should_raise:
        with pytest.raises(Exception):
            runner.run()
    else:
        output = runner.run()
        image = output[TagAndPushPlugin.key][0]
        tasker.remove_image(image)
        assert len(workflow.push_conf.docker_registries) > 0

        if MOCK:
            # we only test this when mocking docker because we don't expect
            # running actual docker against v2 registry
            expected_digest = ManifestDigest(v1=DIGEST_V1, v2=DIGEST_V2)
            assert workflow.push_conf.docker_registries[0].digests[image_name].v1 == expected_digest.v1
            assert workflow.push_conf.docker_registries[0].digests[image_name].v2 == expected_digest.v2

            if has_config:
                assert isinstance(workflow.push_conf.docker_registries[0].config, dict)
            else:
                assert workflow.push_conf.docker_registries[0].config is None

Example 99

Project: pybossa Source File: test_project_api.py
    @with_context
    def test_project_post(self):
        """Test API project creation and auth"""
        users = UserFactory.create_batch(2)
        CategoryFactory.create()
        name = u'XXXX Project'
        data = dict(
            name=name,
            short_name='xxxx-project',
            description='description',
            owner_id=1,
            long_description=u'Long Description\n================')
        data = json.dumps(data)
        # no api-key
        res = self.app.post('/api/project', data=data)
        assert_equal(res.status, '401 UNAUTHORIZED',
                     'Should not be allowed to create')
        # now a real user
        res = self.app.post('/api/project?api_key=' + users[1].api_key,
                            data=data)
        out = project_repo.get_by(name=name)
        assert out, out
        assert_equal(out.short_name, 'xxxx-project'), out
        assert_equal(out.owner.name, 'user2')
        id_ = out.id

        # now a real user with headers auth
        headers = [('Authorization', users[1].api_key)]
        new_project = dict(
            name=name + '2',
            short_name='xxxx-project2',
            description='description2',
            owner_id=1,
            long_description=u'Long Description\n================')
        new_project = json.dumps(new_project)
        res = self.app.post('/api/project', headers=headers,
                            data=new_project)
        out = project_repo.get_by(name=name + '2')
        assert out, out
        assert_equal(out.short_name, 'xxxx-project2'), out
        assert_equal(out.owner.name, 'user2')
        ## Test that a default category is assigned to the project
        assert out.category_id, "No category assigned to project"
        id_ = out.id

        # test re-create should fail
        res = self.app.post('/api/project?api_key=' + users[1].api_key,
                            data=data)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'POST', err
        assert err['exception_cls'] == "DBIntegrityError", err

        # test create with non-allowed fields should fail
        data = dict(name='fail', short_name='fail', link='hateoas', wrong=15)
        res = self.app.post('/api/project?api_key=' + users[1].api_key,
                            data=data)
        err = json.loads(res.data)
        err_msg = "ValueError exception should be raised"
        assert res.status_code == 415, err
        assert err['action'] == 'POST', err
        assert err['status'] == 'failed', err
        assert err['exception_cls'] == "ValueError", err_msg
        # Now with a JSON object but not valid
        data = json.dumps(data)
        res = self.app.post('/api/project?api_key=' + users[1].api_key,
                            data=data)
        err = json.loads(res.data)
        err_msg = "TypeError exception should be raised"
        assert err['action'] == 'POST', err_msg
        assert err['status'] == 'failed', err_msg
        assert err['exception_cls'] == "TypeError", err_msg
        assert res.status_code == 415, err_msg

        # test update
        data = {'name': 'My New Title', 'links': 'hateoas'}
        datajson = json.dumps(data)
        ## anonymous
        res = self.app.put('/api/project/%s' % id_, data=data)
        error_msg = 'Anonymous should not be allowed to update'
        assert_equal(res.status, '401 UNAUTHORIZED', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'PUT', error
        assert error['exception_cls'] == 'Unauthorized', error

        ### real user but not allowed as not owner!
        non_owner = UserFactory.create()
        url = '/api/project/%s?api_key=%s' % (id_, non_owner.api_key)
        res = self.app.put(url, data=datajson)
        error_msg = 'Should not be able to update projects of others'
        assert_equal(res.status, '403 FORBIDDEN', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'PUT', error
        assert error['exception_cls'] == 'Forbidden', error

        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)

        # with hateoas links
        assert_equal(res.status, '200 OK', res.data)
        out2 = project_repo.get(id_)
        assert_equal(out2.name, data['name'])
        out = json.loads(res.data)
        assert out.get('status') is None, error
        assert out.get('id') == id_, error

        # without hateoas links
        del data['links']
        newdata = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=newdata)

        assert_equal(res.status, '200 OK', res.data)
        out2 = project_repo.get(id_)
        assert_equal(out2.name, data['name'])
        out = json.loads(res.data)
        assert out.get('status') is None, error
        assert out.get('id') == id_, error

        # With wrong id
        res = self.app.put('/api/project/5000?api_key=%s' % users[1].api_key,
                           data=datajson)
        assert_equal(res.status, '404 NOT FOUND', res.data)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'PUT', error
        assert error['exception_cls'] == 'NotFound', error

        # With fake data
        data['algo'] = 13
        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'TypeError', err

        # With empty fields
        data.pop('algo')
        data['name'] = None
        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'DBIntegrityError', err

        data['name'] = ''
        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'DBIntegrityError', err

        data['name'] = 'something'
        data['short_name'] = ''
        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'DBIntegrityError', err

        # With not JSON data
        datajson = data
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'ValueError', err

        # With wrong args in the URL
        data = dict(
            name=name,
            short_name='xxxx-project',
            long_description=u'Long Description\n================')

        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s&search=select1' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'AttributeError', err

        # test delete
        ## anonymous
        res = self.app.delete('/api/project/%s' % id_, data=data)
        error_msg = 'Anonymous should not be allowed to delete'
        assert_equal(res.status, '401 UNAUTHORIZED', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'DELETE', error
        assert error['target'] == 'project', error
        ### real user but not allowed as not owner!
        url = '/api/project/%s?api_key=%s' % (id_, non_owner.api_key)
        res = self.app.delete(url, data=datajson)
        error_msg = 'Should not be able to delete projects of others'
        assert_equal(res.status, '403 FORBIDDEN', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'DELETE', error
        assert error['target'] == 'project', error

        url = '/api/project/%s?api_key=%s' % (id_, users[1].api_key)
        res = self.app.delete(url, data=datajson)

        assert_equal(res.status, '204 NO CONTENT', res.data)

        # delete a project that does not exist
        url = '/api/project/5000?api_key=%s' % users[1].api_key
        res = self.app.delete(url, data=datajson)
        error = json.loads(res.data)
        assert res.status_code == 404, error
        assert error['status'] == 'failed', error
        assert error['action'] == 'DELETE', error
        assert error['target'] == 'project', error
        assert error['exception_cls'] == 'NotFound', error

        # delete a project that does not exist
        url = '/api/project/?api_key=%s' % users[1].api_key
        res = self.app.delete(url, data=datajson)
        assert res.status_code == 404, error

Example 100

Project: yunohost Source File: backup.py
def backup_create(name=None, description=None, output_directory=None,
                  no_compress=False, ignore_hooks=False, hooks=[],
                  ignore_apps=False, apps=[]):
    """
    Create a backup local archive

    Keyword arguments:
        name -- Name of the backup archive
        description -- Short description of the backup
        output_directory -- Output directory for the backup
        no_compress -- Do not create an archive file
        hooks -- List of backup hooks names to execute
        ignore_hooks -- Do not execute backup hooks
        apps -- List of application names to backup
        ignore_apps -- Do not backup apps

    """
    # TODO: Add a 'clean' argument to clean output directory
    tmp_dir = None
    env_var = {}

    # Validate what to backup
    if ignore_hooks and ignore_apps:
        raise MoulinetteError(errno.EINVAL,
            m18n.n('backup_action_required'))

    # Validate and define backup name
    timestamp = int(time.time())
    if not name:
        name = time.strftime('%Y%m%d-%H%M%S')
    if name in backup_list()['archives']:
        raise MoulinetteError(errno.EINVAL,
            m18n.n('backup_archive_name_exists'))

    # Validate additional arguments
    if no_compress and not output_directory:
        raise MoulinetteError(errno.EINVAL,
            m18n.n('backup_output_directory_required'))
    if output_directory:
        output_directory = os.path.abspath(output_directory)

        # Check for forbidden folders
        if output_directory.startswith(archives_path) or \
           re.match(r'^/(|(bin|boot|dev|etc|lib|root|run|sbin|sys|usr|var)(|/.*))$',
                    output_directory):
            raise MoulinetteError(errno.EINVAL,
                m18n.n('backup_output_directory_forbidden'))

        # Create the output directory
        if not os.path.isdir(output_directory):
            logger.debug("creating output directory '%s'", output_directory)
            os.makedirs(output_directory, 0750)
        # Check that output directory is empty
        elif no_compress and os.listdir(output_directory):
            raise MoulinetteError(errno.EIO,
                m18n.n('backup_output_directory_not_empty'))

        # Do not compress, so set temporary directory to output one and
        # disable bind mounting to prevent data loss in case of a rm
        # See: https://dev.yunohost.org/issues/298
        if no_compress:
            logger.debug('bind mounting will be disabled')
            tmp_dir = output_directory
            env_var['CAN_BIND'] = 0
    else:
        output_directory = archives_path
        if not os.path.isdir(archives_path):
            os.mkdir(archives_path, 0750)

    def _clean_tmp_dir(retcode=0):
        ret = hook_callback('post_backup_create', args=[tmp_dir, retcode])
        if not ret['failed']:
            filesystem.rm(tmp_dir, True, True)
            return True
        else:
            logger.warning(m18n.n('backup_cleaning_failed'))
            return False

    # Create temporary directory
    if not tmp_dir:
        tmp_dir = "%s/tmp/%s" % (backup_path, name)
        if os.path.isdir(tmp_dir):
            logger.debug("temporary directory for backup '%s' already exists",
                tmp_dir)
            if not _clean_tmp_dir():
                raise MoulinetteError(
                    errno.EIO, m18n.n('backup_output_directory_not_empty'))
        filesystem.mkdir(tmp_dir, 0750, parents=True, uid='admin')

    # Initialize backup info
    info = {
        'description': description or '',
        'created_at': timestamp,
        'apps': {},
        'hooks': {},
    }

    # Run system hooks
    if not ignore_hooks:
        # Check hooks availibility
        hooks_filtered = set()
        if hooks:
            for hook in hooks:
                try:
                    hook_info('backup', hook)
                except:
                    logger.error(m18n.n('backup_hook_unknown', hook=hook))
                else:
                    hooks_filtered.add(hook)

        if not hooks or hooks_filtered:
            logger.info(m18n.n('backup_running_hooks'))
            ret = hook_callback('backup', hooks_filtered, args=[tmp_dir],
                                env=env_var)
            if ret['succeed']:
                info['hooks'] = ret['succeed']

                # Save relevant restoration hooks
                tmp_hooks_dir = tmp_dir + '/hooks/restore'
                filesystem.mkdir(tmp_hooks_dir, 0750, True, uid='admin')
                for h in ret['succeed'].keys():
                    try:
                        i = hook_info('restore', h)
                    except:
                        logger.warning(m18n.n('restore_hook_unavailable',
                                hook=h), exc_info=1)
                    else:
                        for f in i['hooks']:
                            shutil.copy(f['path'], tmp_hooks_dir)

    # Backup apps
    if not ignore_apps:
        # Filter applications to backup
        apps_list = set(os.listdir('/etc/yunohost/apps'))
        apps_filtered = set()
        if apps:
            for a in apps:
                if a not in apps_list:
                    logger.warning(m18n.n('unbackup_app', app=a))
                else:
                    apps_filtered.add(a)
        else:
            apps_filtered = apps_list

        # Run apps backup scripts
        tmp_script = '/tmp/backup_' + str(timestamp)
        for app_instance_name in apps_filtered:
            app_setting_path = '/etc/yunohost/apps/' + app_instance_name

            # Check if the app has a backup and restore script
            app_script = app_setting_path + '/scripts/backup'
            app_restore_script = app_setting_path + '/scripts/restore'
            if not os.path.isfile(app_script):
                logger.warning(m18n.n('unbackup_app', app=app_instance_name))
                continue
            elif not os.path.isfile(app_restore_script):
                logger.warning(m18n.n('unrestore_app', app=app_instance_name))

            tmp_app_dir = '{:s}/apps/{:s}'.format(tmp_dir, app_instance_name)
            tmp_app_bkp_dir = tmp_app_dir + '/backup'
            logger.info(m18n.n('backup_running_app_script', app=app_instance_name))
            try:
                # Prepare backup directory for the app
                filesystem.mkdir(tmp_app_bkp_dir, 0750, True, uid='admin')
                shutil.copytree(app_setting_path, tmp_app_dir + '/settings')

                # Copy app backup script in a temporary folder and execute it
                subprocess.call(['install', '-Dm555', app_script, tmp_script])

                # Prepare env. var. to pass to script
                app_id, app_instance_nb = _parse_app_instance_name(
                    app_instance_name)
                env_dict = env_var.copy()
                env_dict["YNH_APP_ID"] = app_id
                env_dict["YNH_APP_INSTANCE_NAME"] = app_instance_name
                env_dict["YNH_APP_INSTANCE_NUMBER"] = str(app_instance_nb)
                env_dict["YNH_APP_BACKUP_DIR"] = tmp_app_bkp_dir

                hook_exec(tmp_script, args=[tmp_app_bkp_dir, app_instance_name],
                          raise_on_error=True, chdir=tmp_app_bkp_dir, env=env_dict)
            except:
                logger.exception(m18n.n('backup_app_failed', app=app_instance_name))
                # Cleaning app backup directory
                shutil.rmtree(tmp_app_dir, ignore_errors=True)
            else:
                # Add app info
                i = app_info(app_instance_name)
                info['apps'][app_instance_name] = {
                    'version': i['version'],
                    'name': i['name'],
                    'description': i['description'],
                }
            finally:
                filesystem.rm(tmp_script, force=True)

    # Check if something has been saved
    if not info['hooks'] and not info['apps']:
        _clean_tmp_dir(1)
        raise MoulinetteError(errno.EINVAL, m18n.n('backup_nothings_done'))

    # Calculate total size
    backup_size = int(subprocess.check_output(
        ['du', '-sb', tmp_dir]).split()[0].decode('utf-8'))
    info['size'] = backup_size

    # Create backup info file
    with open("%s/info.json" % tmp_dir, 'w') as f:
        f.write(json.dumps(info))

    # Create the archive
    if not no_compress:
        logger.info(m18n.n('backup_creating_archive'))

        # Check free space in output directory at first
        avail_output = subprocess.check_output(
            ['df', '--block-size=1', '--output=avail', tmp_dir]).split()
        if len(avail_output) < 2 or int(avail_output[1]) < backup_size:
            logger.debug('not enough space at %s (free: %s / needed: %d)',
                         output_directory, avail_output[1], backup_size)
            _clean_tmp_dir(3)
            raise MoulinetteError(errno.EIO, m18n.n(
                'not_enough_disk_space', path=output_directory))

        # Open archive file for writing
        archive_file = "%s/%s.tar.gz" % (output_directory, name)
        try:
            tar = tarfile.open(archive_file, "w:gz")
        except:
            logger.debug("unable to open '%s' for writing",
                         archive_file, exc_info=1)
            _clean_tmp_dir(2)
            raise MoulinetteError(errno.EIO,
                                  m18n.n('backup_archive_open_failed'))

        # Add files to the arvhice
        try:
            tar.add(tmp_dir, arcname='')
            tar.close()
        except IOError as e:
            logger.error(m18n.n('backup_archive_writing_error'), exc_info=1)
            _clean_tmp_dir(3)
            raise MoulinetteError(errno.EIO,
                                  m18n.n('backup_creation_failed'))

        # Move info file
        os.rename(tmp_dir + '/info.json',
                  '{:s}/{:s}.info.json'.format(archives_path, name))

    # Clean temporary directory
    if tmp_dir != output_directory:
        _clean_tmp_dir()

    logger.success(m18n.n('backup_created'))

    # Return backup info
    info['name'] = name
    return { 'archive': info }
See More Examples - Go to Next Page
Page 1 Page 2 Selected Page 3 Page 4