cStringIO.StringIO

Here are the examples of the python api cStringIO.StringIO taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

View license
def CreateUploadDispatcher(get_blob_storage=GetBlobStorage):
  """Function to create upload dispatcher.

  Returns:
    New dispatcher capable of handling large blob uploads.
  """


  from google.appengine.tools import dev_appserver

  class UploadDispatcher(dev_appserver.URLDispatcher):
    """Dispatcher that handles uploads."""

    def __init__(self):
      """Constructor.

      Args:
        blob_storage: A BlobStorage instance.
      """
      self.__cgi_handler = dev_appserver_upload.UploadCGIHandler(
          get_blob_storage())



    def Dispatch(self,
                 request,
                 outfile,
                 base_env_dict=None):
      """Handle post dispatch.

      This dispatcher will handle all uploaded files in the POST request, store
      the results in the blob-storage, close the upload session and transform
      the original request in to one where the uploaded files have external
      bodies.

      Returns:
        New AppServerRequest indicating request forward to upload success
        handler.
      """

      if base_env_dict['REQUEST_METHOD'] != 'POST':
        outfile.write('Status: 400\n\n')
        return


      upload_key = re.match(UPLOAD_URL_PATTERN, request.relative_url).group(1)
      try:
        upload_session = datastore.Get(upload_key)
      except datastore_errors.EntityNotFoundError:
        upload_session = None

      if upload_session:
        success_path = upload_session['success_path']
        max_bytes_per_blob = upload_session['max_bytes_per_blob']
        max_bytes_total = upload_session['max_bytes_total']

        upload_form = cgi.FieldStorage(fp=request.infile,
                                       headers=request.headers,
                                       environ=base_env_dict)

        try:


          mime_message_string = self.__cgi_handler.GenerateMIMEMessageString(
              upload_form,
              max_bytes_per_blob=max_bytes_per_blob,
              max_bytes_total=max_bytes_total)

          datastore.Delete(upload_session)
          self.current_session = upload_session


          header_end = mime_message_string.find('\n\n') + 1
          content_start = header_end + 1
          header_text = mime_message_string[:header_end].replace('\n', '\r\n')
          content_text = mime_message_string[content_start:].replace('\n',
                                                                     '\r\n')


          complete_headers = ('%s'
                              'Content-Length: %d\r\n'
                              '\r\n') % (header_text, len(content_text))

          return dev_appserver.AppServerRequest(
              success_path,
              None,
              mimetools.Message(cStringIO.StringIO(complete_headers)),
              cStringIO.StringIO(content_text),
              force_admin=True)
        except dev_appserver_upload.InvalidMIMETypeFormatError:
          outfile.write('Status: 400\n\n')
        except dev_appserver_upload.UploadEntityTooLargeError:
          outfile.write('Status: 413\n\n')
          response = ERROR_RESPONSE_TEMPLATE % {
              'response_code': 413,
              'response_string': 'Request Entity Too Large',
              'response_text': 'Your client issued a request that was too '
              'large.'}
          outfile.write(response)
      else:
        logging.error('Could not find session for %s', upload_key)
        outfile.write('Status: 404\n\n')


    def EndRedirect(self, dispatched_output, original_output):
      """Handle the end of upload complete notification.

      Makes sure the application upload handler returned an appropriate status
      code.
      """
      response = dev_appserver.RewriteResponse(dispatched_output)
      logging.info('Upload handler returned %d', response.status_code)
      outfile = cStringIO.StringIO()
      outfile.write('Status: %s\n' % response.status_code)

      if response.body and len(response.body.read()) > 0:
        response.body.seek(0)
        outfile.write(response.body.read())
      else:
        outfile.write(''.join(response.headers.headers))

      outfile.seek(0)
      dev_appserver.URLDispatcher.EndRedirect(self,
                                              outfile,
                                              original_output)

  return UploadDispatcher()

Example 2

View license
def CreateUploadDispatcher(get_blob_storage=GetBlobStorage):
  """Function to create upload dispatcher.

  Returns:
    New dispatcher capable of handling large blob uploads.
  """
  from google.appengine.tools import dev_appserver

  class UploadDispatcher(dev_appserver.URLDispatcher):
    """Dispatcher that handles uploads."""

    def __init__(self):
      """Constructor.

      Args:
        blob_storage: A BlobStorage instance.
      """
      self.__cgi_handler = dev_appserver_upload.UploadCGIHandler(
          get_blob_storage())

    def Dispatch(self,
                 request,
                 outfile,
                 base_env_dict=None):
      """Handle post dispatch.

      This dispatcher will handle all uploaded files in the POST request, store
      the results in the blob-storage, close the upload session and transform
      the original request in to one where the uploaded files have external
      bodies.

      Returns:
        New AppServerRequest indicating request forward to upload success
        handler.
      """
      if base_env_dict['REQUEST_METHOD'] != 'POST':
        outfile.write('Status: 400\n\n')
        return

      upload_key = re.match(UPLOAD_URL_PATTERN, request.relative_url).group(1)
      try:
        upload_session = datastore.Get(upload_key)
      except datastore_errors.EntityNotFoundError:
        upload_session = None

      if upload_session:
        success_path = upload_session['success_path']

        upload_form = cgi.FieldStorage(fp=request.infile,
                                       headers=request.headers,
                                       environ=base_env_dict)

        try:
          mime_message_string = self.__cgi_handler.GenerateMIMEMessageString(
              upload_form)
          datastore.Delete(upload_session)
          self.current_session = upload_session

          header_end = mime_message_string.find('\n\n') + 1
          content_start = header_end + 1
          header_text = mime_message_string[:header_end]
          content_text = mime_message_string[content_start:]

          complete_headers = ('%s'
                              'Content-Length: %d\n'
                              '\n') % (header_text, len(content_text))

          return dev_appserver.AppServerRequest(
              success_path,
              None,
              mimetools.Message(cStringIO.StringIO(complete_headers)),
              cStringIO.StringIO(content_text),
              force_admin=True)
        except dev_appserver_upload.InvalidMIMETypeFormatError:
          outfile.write('Status: 400\n\n')
      else:
        logging.error('Could not find session for %s', upload_key)
        outfile.write('Status: 404\n\n')

    def EndRedirect(self, redirected_outfile, original_outfile):
      """Handle the end of upload complete notification.

      Makes sure the application upload handler returned an appropriate status
      code.
      """
      response = dev_appserver.RewriteResponse(redirected_outfile)
      logging.info('Upload handler returned %d', response.status_code)

      if (response.status_code in (301, 302, 303) and
          (not response.body or len(response.body.read()) == 0)):
        contentless_outfile = cStringIO.StringIO()
        contentless_outfile.write('Status: %s\n' % response.status_code)
        contentless_outfile.write(''.join(response.headers.headers))
        contentless_outfile.seek(0)
        dev_appserver.URLDispatcher.EndRedirect(self,
                                                contentless_outfile,
                                                original_outfile)
      else:
        logging.error(
            'Invalid upload handler response. Only 301, 302 and 303 '
            'statuses are permitted and it may not have a content body.')
        original_outfile.write('Status: 500\n\n')

  return UploadDispatcher()

Example 3

Project: ssf
Source File: importer.py
View license
def import_spreadsheet():
    spreadsheet = request.body.read()
    import cStringIO
    spreadsheet_json = cStringIO.StringIO(spreadsheet)
    spreadsheet_json.seek(0)
    #j = json.loads(spreadsheet)
    exec("j = " + spreadsheet)
    similar_rows = []
    importable_rows = []
    if j.has_key("header_row"):
         j["spreadsheet"].pop(j["header_row"])
         j["rows"] -= 1
    if j.has_key("re_import"):
        for x in range(0, len(j["map"])):
	    j["map"][x][2] = j["map"][x][2].replace(">", ">")
    if not j.has_key("re_import"):# and j["re_import"] is not True:
    	for x in range(0, len(j["spreadsheet"])):
	    for y in range(x + 1, len(j["spreadsheet"])):
 	        k = jaro_winkler_distance_row(j["spreadsheet"][x], j["spreadsheet"][y])
	        if k is True:
	           similar_rows.append(j["spreadsheet"][x])
	           similar_rows.append(j["spreadsheet"][y])
	        else:
	           pass
	for k in similar_rows:
	    for l in k:
	       l = l.encode("ascii")
        session.similar_rows = similar_rows
    for k in j["spreadsheet"]:
        if k in similar_rows:
	    j["spreadsheet"].remove(k)
	    j["rows"] -= 1
    i = k = 0
    send_dict = {}
    resource = j["resource"].encode("ascii")
    send_dict[resource] = []
    while (i < j["rows"]):
	res = {}
	k = 0
	while (k < j["columns"]):
	    if " --> " in j["map"][k][2]:
		field, nested_resource, nested_field = j["map"][k][2].split(" --> ")
		field = "$k_" + field
		nr = nested_resource
		nested_resource= "$_" + nested_resource
		if res.has_key(field) is False:
		   res[field] = {}
		   res[field]["@resource"] = nr
		   res[field][nested_resource] = [{}]
		res[field][nested_resource][0][nested_field] = j["spreadsheet"][i][k].encode("ascii")
		k += 1
		continue
	    if "opt_" in j["map"][k][2]:
		res[j["map"][k][2].encode("ascii")]["@value"] = j["spreadsheet"][i][k].encode("ascii")
	    	res[j["map"][k][2].encode("ascii")]["$"] = j["spreadsheet"][i][k].encode("ascii")
	    else:
		res[j["map"][k][2].encode("ascii")] = j["spreadsheet"][i][k].encode("ascii")
		if j["map"][k][2].encode("ascii") == "comments":
			res[j["map"][k][2].encode("ascii")] = j["map"][k][1] + "-->" + j["spreadsheet"][i][k].encode("ascii")
	    res["@modified_at"] = j["modtime"]
	    k += 1
	send_dict[resource].append(res)
	i += 1

    word = json.dumps(send_dict[resource])
    # Remove all white spaces and newlines in the JSON
    new_word = ""
    cntr = 1
    for i in range(0,len(word)):
        if word[i] == "\"":
           cntr = cntr + 1
      	   cntr = cntr % 2
        if cntr == 0:
           new_word += word[i]
        if cntr == 1:
           if word[i] == " " or word[i] == "\n":
	      continue
           else:
    	      new_word += word[i]
    # new_word is without newlines and whitespaces
    new_word  = "{\"$_" + resource + "\":"+ new_word + "}"
    # added resource name
    send = cStringIO.StringIO(new_word)
    tree = s3mgr.xml.json2tree(send)
    prefix, name = resource.split("_")
    res = s3mgr.define_resource(prefix, name)
    res.import_xml(source = tree, ignore_errors = True)
    returned_json = s3mgr.xml.tree2json(tree)
    if "@error" not in repr(returned_json):
	    return dict(module_name = module_name)
    invalid_rows = []
    exec("returned_json = " + returned_json)
    for record in returned_json["$_"+resource]:
	 del record["@modified_at"]
    for record in returned_json["$_"+resource]:
	wrong_dict = {}
        for field in record:
	    if field[0:3] == "$k_":
		nest_res = "$_" + record[field]["@resource"]
		for nested_fields in record[field][nest_res][0]:
		    if "@error" in record[field][nest_res][0][nested_fields]:
		       	wrong_dict[field + " --> " + nest_res + " --> " + nested_fields] = "*_error_*" + record[field][nest_res][0][nested_fields.encode("ascii")]["@error"] + " You entered " + record[field][nest_res][0][nested_fields.encode("ascii")]["@value"]
		    else:
			    try:
			        wrong_dict[field + " --> " + nest_res + " --> " + nested_fields] = record[field][nest_res][0][nested_fields.encode("ascii")]["@value"]
			    except:
				    wrong_dict[field + " --> " + nest_res + " --> " + nested_fields] = record[field][nest_res][0][nested_fields.encode("ascii")]
	    else:
	        if "@error" in record[field]:
		    wrong_dict[field] = "*_error_*" + record[field]["@error"] + ". You entered " + record[field]["@value"]
		else:
		    wrong_dict[field]  = record[field]["@value"]
	if "Data Import Error" not in wrong_dict.values():
	    invalid_rows.append(wrong_dict)
    temp = []
    for k in invalid_rows:
        for l in k.values():
	    if "*_error_*" in l:
	        temp.append(k)
    invalid_rows = temp
    '''f.write("\n\nInvalid rows " + repr(invalid_rows))
    session.import_success = len(invalid_rows)
    incorrect_rows = []
    correct_rows = []
    returned_tree = tree
    returned_json = s3mgr.xml.tree2json(returned_tree)
    returned_json = returned_json.encode("ascii")
    f.write("THIS IS ASCII returned_json " + returned_json + "\n\n\n")
    returned_json = json.loads(returned_json,encoding = "ascii")
    f.write("-->\n\n\n<--" + repr(type(returned_json)))
    for resource_name in returned_json:
	for record in returned_json[resource_name]:
	     if "@error" in repr(record):
		incorrect_rows.append(record)
		f.write("\n\n\n" + repr(record))
	     else:
		correct_rows.append(record)
    correct_rows_send = {}
    correct_rows_send[("$_"+prefix+"_"+name)] = correct_rows
    f.write("The correct rows are " + repr(correct_rows_send))
    for k in incorrect_rows:
	 del k["@modified_at"]
    send_json = json.dumps(correct_rows_send)
    send_json = cStringIO.StringIO(send_json)
    tree = s3mgr.xml.json2tree(send_json)
    k = res.import_xml(tree)
    if k:
       f.write("RE_IMPORT SUCCEEDED\n\n\n")
    else:
       f.write("DID NOT SUCCEED AGAIN\n\n")
    session.fields = []
    '''
    '''
    for k in invalid_rows:
        for l in k.keys():
 	    if l[0:3] == "$k_":
	    	nest_res = k[l]["@resource"]
 	        for m in k[l]["$_"+nest_res][0].keys():
	            session.fields.append(l[3:].encode("ascii") + " --> " + nest_res.encode("ascii") + " --> " + m.encode("ascii"))
    '''
    '''
	for i in range(0,len(session.fields)):
       	    session.fields[i]=session.fields[i].encode("ascii")
    	f.write("Session fields \n" + repr(session.fields)+"\n\n\n")
    	session.import_rows = len(incorrect_rows)
    	for record in incorrect_rows:
       	   for field in record:
               if "@error" in record[field]:
                  f.write(repr(record[field]))
		  record[field] = "*_error_*" + record[field]["@error"] + ". You entered " + record[field]["@value"]
	       else:
	          f.write("in else " + repr(record[field]))
	          record[field] = record[field]["@value"]
    	f.write("These rows are incorrect "+repr(incorrect_rows))

	incorrect_rows = json.dumps(incorrect_rows,ensure_ascii=True)
    	'''
    session.invalid_rows = invalid_rows
    session.import_columns = j["columns"]
    session.import_map = json.dumps(j["map"], ensure_ascii=True)
    session.import_resource = resource.encode("ascii")
    return dict()

Example 4

View license
def DownloadRewriter(response, request_headers):
  """Intercepts blob download key and rewrites response with large download.

  Checks for the X-AppEngine-BlobKey header in the response.  If found, it will
  discard the body of the request and replace it with the blob content
  indicated.

  If a valid blob is not found, it will send a 404 to the client.

  If the application itself provides a content-type header, it will override
  the content-type stored in the action blob.

  If Content-Range header is provided, blob will be partially served.  The
  application can set blobstore.BLOB_RANGE_HEADER if the size of the blob is
  not known.  If Range is present, and not blobstore.BLOB_RANGE_HEADER, will
  use Range instead.

  Args:
    response: Response object to be rewritten.
    request_headers: Original request headers.  Looks for 'Range' header to copy
      to response.
  """
  blob_key = response.headers.getheader(blobstore.BLOB_KEY_HEADER)
  if blob_key:
    del response.headers[blobstore.BLOB_KEY_HEADER]


    try:
      blob_info = datastore.Get(
          datastore.Key.from_path(blobstore.BLOB_INFO_KIND,
                                  blob_key,
                                  namespace=''))

      content_range_header = response.headers.getheader('Content-Range')
      blob_size = blob_info['size']
      range_header = response.headers.getheader(blobstore.BLOB_RANGE_HEADER)
      if range_header is not None:
        del response.headers[blobstore.BLOB_RANGE_HEADER]
      else:
        range_header = request_headers.getheader('Range')

      def not_satisfiable():
        """Short circuit response and return 416 error."""
        response.status_code = 416
        response.status_message = 'Requested Range Not Satisfiable'
        response.body = cStringIO.StringIO('')
        response.headers['Content-Length'] = '0'
        del response.headers['Content-Type']
        del response.headers['Content-Range']

      if range_header:
        start, end = ParseRangeHeader(range_header)
        if start is not None:
          if end is None:
            if start >= 0:
              content_range_start = start
            else:
              content_range_start = blob_size + start
            content_range = MakeContentRange(
                content_range_start, blob_size, blob_size)
            content_range_header = str(content_range)
          else:
            content_range = MakeContentRange(start, min(end, blob_size),
                                             blob_size)
            content_range_header = str(content_range)
          response.headers['Content-Range'] = content_range_header
        else:
          not_satisfiable()
          return

      content_range_header = response.headers.getheader('Content-Range')
      content_length = blob_size
      start = 0
      end = content_length
      if content_range_header is not None:
        content_range = ParseContentRange(content_range_header)
        if content_range:
          start = content_range.start
          stop = GetContentRangeStop(content_range)
          content_length = min(stop, blob_size) - start
          stop = start + content_length
          content_range = MakeContentRange(start, stop, blob_size)
          response.headers['Content-Range'] = str(content_range)
        else:
          not_satisfiable()
          return

      blob_stream = GetBlobStorage().OpenBlob(blob_key)
      blob_stream.seek(start)
      response.body = cStringIO.StringIO(blob_stream.read(content_length))
      response.headers['Content-Length'] = str(content_length)

      content_type = response.headers.getheader('Content-Type')
      if not content_type or content_type == AUTO_MIME_TYPE:
        response.headers['Content-Type'] = blob_info['content_type']
      response.large_response = True



    except datastore_errors.EntityNotFoundError:

      response.status_code = 500
      response.status_message = 'Internal Error'
      response.body = cStringIO.StringIO()

      if response.headers.getheader('status'):
        del response.headers['status']
      if response.headers.getheader('location'):
        del response.headers['location']
      if response.headers.getheader('content-type'):
        del response.headers['content-type']

      logging.error('Could not find blob with key %s.', blob_key)

Example 5

Project: onomatopoeia
Source File: map.py
View license
    def getBlock(self, x, y, z):
        cur = self.conn.cursor()
        cur.execute("SELECT `data` FROM `blocks` WHERE `pos`==? LIMIT 1", (getBlockAsInteger(x, y, z), ))
        r = cur.fetchone()
        if not r:
            return DummyMapBlock()
        f = cStringIO.StringIO(r[0])
        version = readU8(f)
        flags = f.read(1)

        # Check flags
        is_underground = ((ord(flags) & 1) != 0)
        day_night_differs = ((ord(flags) & 2) != 0)
        lighting_expired = ((ord(flags) & 4) != 0)
        generated = ((ord(flags) & 8) != 0)

        if version >= 22:
            content_width = readU8(f)
            params_width = readU8(f)

        # Node data
        dec_o = zlib.decompressobj()
        try:
            mapdata = array.array("B", dec_o.decompress(f.read()))
        except:
            mapdata = []

        # Reuse the unused tail of the file
        f.close()
        f = cStringIO.StringIO(dec_o.unused_data)

        # zlib-compressed node metadata list
        dec_o = zlib.decompressobj()
        try:
            metaliststr = array.array("B", dec_o.decompress(f.read()))
            # And do nothing with it
        except:
            metaliststr = []

        # Reuse the unused tail of the file
        f.close()
        f = cStringIO.StringIO(dec_o.unused_data)
        data_after_node_metadata = dec_o.unused_data

        if version <= 21:
            # mapblockobject_count
            readU16(f)

        if version == 23:
            readU8(f)  # Unused node timer version (always 0)
        if version == 24:
            ver = readU8(f)
            if ver == 1:
                num = readU16(f)
                for i in range(0, num):
                    readU16(f)
                    readS32(f)
                    readS32(f)

        static_object_version = readU8(f)
        static_object_count = readU16(f)
        for i in range(0, static_object_count):
            # u8 type (object type-id)
            object_type = readU8(f)
            # s32 pos_x_nodes * 10000
            pos_x_nodes = readS32(f) / 10000
            # s32 pos_y_nodes * 10000
            pos_y_nodes = readS32(f) / 10000
            # s32 pos_z_nodes * 10000
            pos_z_nodes = readS32(f) / 10000
            # u16 data_size
            data_size = readU16(f)
            # u8[data_size] data
            data = f.read(data_size)

        timestamp = readU32(f)

        id_to_name = {}
        if version >= 22:
            name_id_mapping_version = readU8(f)
            num_name_id_mappings = readU16(f)
            for i in range(0, num_name_id_mappings):
                node_id = readU16(f)
                name_len = readU16(f)
                name = f.read(name_len)
                id_to_name[node_id] = name

        # Node timers
        if version >= 25:
            timer_size = readU8(f)
            num = readU16(f)
            for i in range(0, num):
                readU16(f)
                readS32(f)
                readS32(f)

        #print(id_to_name)
        #print(mapdata)
        return MapBlock(id_to_name, mapdata)

Example 6

View license
def CreateUploadDispatcher(get_blob_storage=GetBlobStorage):
  """Function to create upload dispatcher.

  Returns:
    New dispatcher capable of handling large blob uploads.
  """
  from google.appengine.tools import dev_appserver

  class UploadDispatcher(dev_appserver.URLDispatcher):
    """Dispatcher that handles uploads."""

    def __init__(self):
      """Constructor.

      Args:
        blob_storage: A BlobStorage instance.
      """
      self.__cgi_handler = dev_appserver_upload.UploadCGIHandler(
          get_blob_storage())

    def Dispatch(self,
                 request,
                 outfile,
                 base_env_dict=None):
      """Handle post dispatch.

      This dispatcher will handle all uploaded files in the POST request, store
      the results in the blob-storage, close the upload session and transform
      the original request in to one where the uploaded files have external
      bodies.

      Returns:
        New AppServerRequest indicating request forward to upload success
        handler.
      """
      if base_env_dict['REQUEST_METHOD'] != 'POST':
        outfile.write('Status: 400\n\n')
        return

      upload_key = re.match(UPLOAD_URL_PATTERN, request.relative_url).group(1)
      try:
        upload_session = datastore.Get(upload_key)
      except datastore_errors.EntityNotFoundError:
        upload_session = None

      if upload_session:
        success_path = upload_session['success_path']

        upload_form = cgi.FieldStorage(fp=request.infile,
                                       headers=request.headers,
                                       environ=base_env_dict)

        try:
          mime_message_string = self.__cgi_handler.GenerateMIMEMessageString(
              upload_form)
          datastore.Delete(upload_session)
          self.current_session = upload_session

          header_end = mime_message_string.find('\n\n') + 1
          content_start = header_end + 1
          header_text = mime_message_string[:header_end]
          content_text = mime_message_string[content_start:]

          complete_headers = ('%s'
                              'Content-Length: %d\n'
                              '\n') % (header_text, len(content_text))

          return dev_appserver.AppServerRequest(
              success_path,
              None,
              mimetools.Message(cStringIO.StringIO(complete_headers)),
              cStringIO.StringIO(content_text),
              force_admin=True)
        except dev_appserver_upload.InvalidMIMETypeFormatError:
          outfile.write('Status: 400\n\n')
      else:
        logging.error('Could not find session for %s', upload_key)
        outfile.write('Status: 404\n\n')

    def EndRedirect(self, redirected_outfile, original_outfile):
      """Handle the end of upload complete notification.

      Makes sure the application upload handler returned an appropriate status
      code.
      """
      response = dev_appserver.RewriteResponse(redirected_outfile)
      logging.info('Upload handler returned %d', response.status_code)

      if (response.status_code in (301, 302, 303) and
          (not response.body or len(response.body.read()) == 0)):
        contentless_outfile = cStringIO.StringIO()
        contentless_outfile.write('Status: %s\n' % response.status_code)
        contentless_outfile.write(''.join(response.headers.headers))
        contentless_outfile.seek(0)
        dev_appserver.URLDispatcher.EndRedirect(self,
                                                contentless_outfile,
                                                original_outfile)
      else:
        logging.error(
            'Invalid upload handler response. Only 301, 302 and 303 '
            'statuses are permitted and it may not have a content body.')
        original_outfile.write('Status: 500\n\n')

  return UploadDispatcher()

Example 7

Project: grr
Source File: build.py
View license
  def MakeSelfExtractingZip(self, payload_data, output_path):
    """Repack the installer into the payload.

    Args:
      payload_data: data payload for zip file
      output_path: filename for the zip output
    Raises:
      RuntimeError: if the ClientBuilder.unzipsfx_stub doesn't require admin.
    Returns:
      output_path: filename string of zip output file
    """
    context = self.context + ["Client Context"]

    src_zip = zipfile.ZipFile(cStringIO.StringIO(payload_data), mode="r")
    zip_data = cStringIO.StringIO()
    output_zip = zipfile.ZipFile(
        zip_data, mode="w", compression=zipfile.ZIP_DEFLATED)

    config_file_name = config_lib.CONFIG.Get("ClientBuilder.config_filename",
                                             context=context)
    # Copy the rest of the files from the package to the new zip.
    for template_file in src_zip.namelist():
      if template_file != config_file_name:
        # Avoid writing the config file twice if we're repacking a binary that
        # has already been run through deployment. We write it in the next step,
        # so no need to copy over from the original here.
        CopyFileInZip(src_zip, template_file, output_zip)

    client_config_content = self.GetClientConfig(context)

    output_zip.writestr(
        config_file_name,
        client_config_content,
        compress_type=zipfile.ZIP_STORED)

    # The zip file comment is used by the self extractor to run
    # the installation script
    output_zip.comment = "$AUTORUN$>%s" % config_lib.CONFIG.Get(
        "ClientBuilder.autorun_command_line", context=context)

    output_zip.close()

    utils.EnsureDirExists(os.path.dirname(output_path))
    with open(output_path, "wb") as fd:
      # First write the installer stub
      stub_data = cStringIO.StringIO()
      unzipsfx_stub = config_lib.CONFIG.Get("ClientBuilder.unzipsfx_stub",
                                            context=context)
      stub_raw = open(unzipsfx_stub, "rb").read()

      # Check stub has been compiled with the requireAdministrator manifest.
      if "level=\"requireAdministrator" not in stub_raw:
        raise RuntimeError("Bad unzip binary in use. Not compiled with the"
                           "requireAdministrator manifest option.")

      stub_data.write(stub_raw)

      # If in verbose mode, modify the unzip bins PE header to run in console
      # mode for easier debugging.
      SetPeSubsystem(
          stub_data,
          console=config_lib.CONFIG.Get("ClientBuilder.console",
                                        context=context))

      # Now patch up the .rsrc section to contain the payload.
      end_of_file = zip_data.tell() + stub_data.tell()

      # This is the IMAGE_SECTION_HEADER.Name which is also the start of
      # IMAGE_SECTION_HEADER.
      offset_to_rsrc = stub_data.getvalue().find(".rsrc")

      # IMAGE_SECTION_HEADER.PointerToRawData is a 32 bit int.
      stub_data.seek(offset_to_rsrc + 20)
      start_of_rsrc_section = struct.unpack("<I", stub_data.read(4))[0]

      # Adjust IMAGE_SECTION_HEADER.SizeOfRawData to span from the old start to
      # the end of file.
      stub_data.seek(offset_to_rsrc + 16)
      stub_data.write(struct.pack("<I", end_of_file - start_of_rsrc_section))

      # Concatenate stub and zip file.
      out_data = cStringIO.StringIO()
      out_data.write(stub_data.getvalue())
      out_data.write(zip_data.getvalue())

      # Then write the actual output file.
      fd.write(self.Sign(out_data.getvalue()))

    logging.info("Deployable binary generated at %s", output_path)

    return output_path

Example 8

Project: django-static
Source File: django_static.py
View license
def _static_file(filename,
                 optimize_if_possible=False,
                 symlink_if_possible=False,
                 warn_no_file=True):
    """
    """
    if not settings.DJANGO_STATIC:
        return file_proxy(filename, disabled=True)

    def wrap_up(filename):
        if settings.DJANGO_STATIC_MEDIA_URL_ALWAYS:
            return settings.DJANGO_STATIC_MEDIA_URL + filename
        elif settings.DJANGO_STATIC_MEDIA_URL:
            return settings.DJANGO_STATIC_MEDIA_URL + filename
        return filename

    is_combined_files = isinstance(filename, list)
    if is_combined_files and len(filename) == 1:
        # e.g. passed a list of files but only one so treat it like a
        # single file
        filename = filename[0]
        is_combined_files = False

    if is_combined_files:
        map_key = ';'.join(filename)
    else:
        map_key = filename

    if settings.DJANGO_STATIC_USE_MANIFEST_FILE:
        new_filename, m_time = _get(_MANIFEST_PATH, map_key)
    else:
        new_filename, m_time = _FILE_MAP.get(map_key, (None, None))


    # we might already have done a conversion but the question is
    # if the file has changed. This we only want
    # to bother with when in DEBUG mode because it adds one more
    # unnecessary operation.
    if new_filename:
        if settings.DEBUG:
            # need to check if the original has changed
            old_new_filename = new_filename
            new_filename = None
        else:
            # This is really fast and only happens when NOT in DEBUG mode
            # since it doesn't do any comparison
            return file_proxy(wrap_up(new_filename), **fp_default_kwargs)
    else:
        # This is important so that we can know that there wasn't an
        # old file which will help us know we don't need to delete
        # the old one
        old_new_filename = None


    if not new_filename:
        if is_combined_files:
            # It's a list! We have to combine it into one file
            new_file_content = StringIO()
            each_m_times = []
            extension = None
            for each in filename:
                filepath, path = _find_filepath_in_roots(each)
                if not filepath:
                    raise OSError("Failed to find %s in %s" % (each,
                        ",".join(settings.DJANGO_STATIC_MEDIA_ROOTS)))

                if extension:
                    if os.path.splitext(filepath)[1] != extension:
                        raise ValueError("Mismatching file extension in combo %r" % \
                          each)
                else:
                    extension = os.path.splitext(filepath)[1]
                each_m_times.append(os.stat(filepath)[stat.ST_MTIME])
                new_file_content.write(open(filepath, 'r').read().strip())
                new_file_content.write('\n')

            filename = _combine_filenames(filename, settings.DJANGO_STATIC_NAME_MAX_LENGTH)
            # Set the root path of the combined files to the first entry
            # in the MEDIA_ROOTS list. This way django-static behaves a
            # little more predictible.
            path = settings.DJANGO_STATIC_MEDIA_ROOTS[0]
            new_m_time = max(each_m_times)

        else:
            filepath, path = _find_filepath_in_roots(filename)
            if not filepath:
                if warn_no_file:
                    msg = "Can't find file %s in %s" % \
                      (filename, ",".join(settings.DJANGO_STATIC_MEDIA_ROOTS))
                    warnings.warn(msg)
                return file_proxy(wrap_up(filename),
                                  **dict(fp_default_kwargs,
                                         filepath=filepath,
                                         notfound=True))

            new_m_time = os.stat(filepath)[stat.ST_MTIME]

        if m_time:
            # we had the filename in the map
            if m_time != new_m_time:
                # ...but it has changed!
                m_time = None
            else:
                # ...and it hasn't changed!
                return file_proxy(wrap_up(old_new_filename))

        if not m_time:
            # We did not have the filename in the map OR it has changed
            apart = os.path.splitext(filename)
            new_filename = _generate_filename(apart, new_m_time)
            fileinfo = (settings.DJANGO_STATIC_NAME_PREFIX + new_filename,
                        new_m_time)


            if settings.DJANGO_STATIC_USE_MANIFEST_FILE:
                _set(_MANIFEST_PATH, map_key, fileinfo)
            else:
                _FILE_MAP[map_key] = fileinfo


            if old_new_filename:
                old_new_filename = old_new_filename.replace(
                                      settings.DJANGO_STATIC_NAME_PREFIX, '')
                old_new_filepath = _filename2filepath(old_new_filename,
                        settings.DJANGO_STATIC_SAVE_PREFIX or path)
                if not os.path.isdir(os.path.dirname(old_new_filepath)):
                    _mkdir(os.path.dirname(old_new_filepath))

                if os.path.isfile(old_new_filepath):
                    os.remove(old_new_filepath)
    new_filepath = _filename2filepath(new_filename,
            settings.DJANGO_STATIC_SAVE_PREFIX or path)

    if not os.path.isdir(os.path.dirname(new_filepath)):
        _mkdir(os.path.dirname(new_filepath))


    # Files are either slimmered or symlinked or just copied. Basically, only
    # .css and .js can be slimmered but not all are. For example, an already
    # minified say jquery.min.js doesn't need to be slimmered nor does it need
    # to be copied.
    # If you're on windows, it will always have to do a copy.
    # When symlinking, what the achievement is is that it gives the file a
    # unique and different name than the original.
    #
    # The caller of this method is responsible for dictacting if we're should
    # slimmer and if we can symlink.
    if optimize_if_possible:
        # Then we expect to be able to modify the content and we will
        # definitely need to write a new file.
        if is_combined_files:
            content = new_file_content.getvalue().decode('utf-8')
        else:
            #content = open(filepath).read()
            content = codecs.open(filepath, 'r', 'utf-8').read()
        if new_filename.endswith('.js') and has_optimizer(JS):
            content = optimize(content, JS)
        elif new_filename.endswith('.css') and has_optimizer(CSS):
            content = optimize(content, CSS)

            # and _static_file() all images refered in the CSS file itself
            def replacer(match):
                this_filename = match.groups()[0]

                if (this_filename.startswith('"') and this_filename.endswith('"')) or \
                  (this_filename.startswith("'") and this_filename.endswith("'")):
                    this_filename = this_filename[1:-1]
                # It's really quite common that the CSS file refers to the file
                # that doesn't exist because if you refer to an image in CSS for
                # a selector you never use you simply don't suffer.
                # That's why we say not to warn on nonexisting files

                replace_with = this_filename

                if not (this_filename.startswith('/') or \
                  (this_filename.startswith('http') and '://' in this_filename)):
                    # if the referenced filename is something like
                    # 'images/foo.jpg' or 'sub/module.css' then we need to copy the
                    # current relative directory
                    replace_with = this_filename
                    this_filename = os.path.join(os.path.dirname(filename), this_filename)
                optimize_again = optimize_if_possible and \
                                 this_filename.lower().endswith('.css') or False
                new_filename = _static_file(this_filename,
                                            symlink_if_possible=symlink_if_possible,
                                            optimize_if_possible=optimize_again,
                                            warn_no_file=settings.DEBUG and True or False)
                return match.group().replace(replace_with, new_filename)

            content = REFERRED_CSS_URLS_REGEX.sub(replacer, content)
            content = REFERRED_CSS_URLLESS_IMPORTS_REGEX.sub(replacer, content)

        elif slimmer or cssmin:
            raise ValueError(
              "Unable to slimmer file %s. Unrecognized extension" % new_filename)
        #print "** STORING:", new_filepath
        codecs.open(new_filepath, 'w', 'utf-8').write(content)
    elif symlink_if_possible and not is_combined_files:
        #print "** SYMLINK:", filepath, '-->', new_filepath

        # The reason we have to do this strange while loop is that it can
        # happen that in between the time it takes to destroy symlink till you
        # can create it, another thread or process might be trying to do the
        # exact same thing with just a fraction of a second difference, thus
        # making it possible to, at the time of creating the symlink, that it's
        # already there which will raise an OSError.
        #
        # This is quite possible when Django for example starts multiple fcgi
        # threads roughly all at the same time. An alternative approach would
        # be to store the global variable _FILE_MAP in a cache or something
        # which would effectively make it thread safe but that has the annoying
        # disadvantage that it remains in the cache between server restarts and
        # for a production environment, server restarts very often happen
        # because you have upgraded the code (and the static files). So, an
        # alternative is to use a cache so that thread number 2, number 3 etc
        # gets the file mappings of the first thread and then let this cache
        # only last for a brief amount of time. That amount of time would
        # basically be equivalent of the time the sys admin or developer would
        # have to wait between new code deployment and refreshed symlinks for
        # the static files. That feels convoluted and complex so I've decided
        # to instead use this rather obtuse while loop which is basically built
        # to try X number of times. If it still fails after X number of attempts
        # there's something else wrong with the IO which needs to bubble up.
        _max_attempts = 10
        while True:
            try:
                if os.path.lexists(new_filepath):
                    # since in the other cases we write a new file, it doesn't matter
                    # that the file existed before.
                    # That's not the case with symlinks
                    os.unlink(new_filepath)

                os.symlink(filepath, new_filepath)
                break
            except OSError:
                _max_attempts -= 1
                if _max_attempts <= 0:
                    raise
    elif is_combined_files:
        #print "** STORING COMBO:", new_filepath
        open(new_filepath, 'w').write(new_file_content.getvalue())
    else:
        # straight copy
        #print "** STORING COPY:", new_filepath
        shutil.copyfile(filepath, new_filepath)

    return file_proxy(wrap_up(settings.DJANGO_STATIC_NAME_PREFIX + new_filename),
                      **dict(fp_default_kwargs, new=True,
                             filepath=new_filepath, checked=True))

Example 9

Project: woffTools
Source File: __init__.py
View license
    def save(self, file, compressionLevel=9, recompressTables=False, reorderTables=True, recalculateHeadChecksum=True):
        """
        Save a WOFF into file a file object specifified by the
        file argument.. Optionally, file can be a path and a
        new file will be created at that location.

        compressionLevel is the compression level to be
        used with zlib. This must be an int between 1 and 9.
        The default is 9, the highest compression, but slowest
        compression time.

        Set recompressTables to True if you want any already
        compressed tables to be decompressed and then recompressed
        using the level specified by compressionLevel.

        If you want the tables in the WOFF reordered following
        the suggested optimal table orderings described in the
        OTF/OFF sepecification, set reorderTables to True.
        Tables cannot be reordered if a DSIG table is in the font.

        If you change any of the SFNT data or reorder the tables,
        the head table checkSumAdjustment must be recalculated.
        If you are not changing any of the SFNT data, you can set
        recalculateHeadChecksum to False to prevent the recalculation.
        This must be set to False if the font contains a DSIG table.
        """
        # if DSIG is to be written, the table order
        # must be completely specified. otherwise the
        # DSIG may not be valid after decoding the WOFF.
        tags = self.keys()
        if "GlyphOrder" in tags:
            tags.remove("GlyphOrder")
        if "DSIG" in tags:
            if self._tableOrder is None or (set(self._tableOrder) != set(tags)):
                raise WOFFLibError("A complete table order must be supplied when saving a font with a 'DSIG' table.")
            elif reorderTables:
                raise WOFFLibError("Tables can not be reordered when a 'DSIG' table is in the font. Set reorderTables to False.")
            elif recalculateHeadChecksum:
                raise WOFFLibError("The 'head' table checkSumAdjustment can not be recalculated when a 'DSIG' table is in the font.")
        # sort the tags if necessary
        if reorderTables:
            tags = sortedTagList(tags)
        # open a file if necessary
        closeStream = False
        if not hasattr(file, "write"):
            closeStream = True
            file = open(file, "wb")
        # write the table data
        if "GlyphOrder" in tags:
            tags.remove("GlyphOrder")
        numTables = len(tags)
        writer = WOFFWriter(file, numTables, flavor=self.flavor,
            majorVersion=self.majorVersion, minorVersion=self.minorVersion,
            compressionLevel=compressionLevel, recalculateHeadChecksum=recalculateHeadChecksum,
            verbose=self.verbose)
        for tag in tags:
            origData = None
            origLength = None
            origChecksum = None
            compLength = None
            # table is loaded
            if self.isLoaded(tag):
                origData = self.getTableData(tag)
            # table is in reader
            elif self.reader is not None:
                if recompressTables:
                    origData = self.getTableData(tag)
                else:
                    if self.verbose:
                        debugmsg("Reading '%s' table from disk" % tag)
                    origData, origLength, origChecksum, compLength = self.reader.getCompressedTableData(tag)
            # add to writer
            writer.setTable(tag, origData, origLength=origLength, origChecksum=origChecksum, compLength=compLength)
        # write the metadata
        metadata = None
        metaOrigLength = None
        metaLength = None
        if hasattr(self, "metadata"):
            declaration = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
            tree = ElementTree.ElementTree(self.metadata)
            f = StringIO()
            tree.write(f, encoding="utf-8")
            metadata = f.getvalue()
            # make sure the metadata starts with the declaration
            if not metadata.startswith(declaration):
                metadata = declaration + metadata
            del f
        elif self.reader is not None:
            if recompressTables:
                metadata = self.reader.metadata
            else:
                metadata, metaOrigLength, metaLength = self.reader.getCompressedMetadata()
        if metadata:
            writer.setMetadata(metadata, metaOrigLength=metaOrigLength, metaLength=metaLength)
        # write the private data
        privData = self.privateData
        if privData:
            writer.setPrivateData(privData)
        # close the writer
        writer.close()
        # close the file
        if closeStream:
            file.close()

Example 10

Project: eden
Source File: budget.py
View license
def kit_export_pdf():
    """
        Export a list of Kits in Adobe PDF format
        Uses Geraldo SubReport
        @ToDo: Use S3PDF Method
    """
    try:
        from reportlab.lib.units import cm
        from reportlab.lib.pagesizes import A4
        from reportlab.lib.enums import TA_CENTER, TA_RIGHT
    except ImportError:
        session.error = "Python needs the ReportLab module installed for PDF export"
        redirect(URL(c="kit"))
    try:
        from geraldo import Report, ReportBand, SubReport, Label, ObjectValue, SystemField, landscape, BAND_WIDTH
        from geraldo.generators import PDFGenerator
    except ImportError:
        session.error = "Python needs the Geraldo module installed for PDF export"
        redirect(URL(c="kit"))

    table = db.budget_kit
    objects_list = db(table.id > 0).select()
    if not objects_list:
        session.warning = T("No data in this table - cannot create PDF!")
        redirect(URL(r=request))

    import cStringIO
    output = cStringIO.StringIO()

    #class MySubReport(SubReport):
    #    def __init__(self, db=None, **kwargs):
    #        " Initialise parent class & make any necessary modifications "
    #        self.db = db
    #        SubReport.__init__(self, **kwargs)

    class MyReport(Report):
        def __init__(self, queryset=None, db=None):
            " Initialise parent class & make any necessary modifications "
            Report.__init__(self, queryset)
            self.db = db
        # can't use T() here!
        title = "Kits"
        page_size = landscape(A4)
        class band_page_header(ReportBand):
            height = 1.3*cm
            elements = [
                SystemField(expression="%(report_title)s", top=0.1*cm,
                    left=0, width=BAND_WIDTH, style={"fontName": "Helvetica-Bold",
                    "fontSize": 14, "alignment": TA_CENTER}
                    ),
                Label(text="Code", top=0.8*cm, left=0.2*cm),
                Label(text="Description", top=0.8*cm, left=2*cm),
                Label(text="Cost", top=0.8*cm, left=10*cm),
                Label(text="Monthly", top=0.8*cm, left=12*cm),
                Label(text="per Minute", top=0.8*cm, left=14*cm),
                Label(text="per Megabyte", top=0.8*cm, left=16*cm),
                Label(text="Comments", top=0.8*cm, left=18*cm),
            ]
            borders = {"bottom": True}
        class band_page_footer(ReportBand):
            height = 0.5*cm
            elements = [
                Label(text="%s" % request.utcnow.date(), top=0.1*cm, left=0),
                SystemField(expression="Page # %(page_number)d of %(page_count)d", top=0.1*cm,
                    width=BAND_WIDTH, style={"alignment": TA_RIGHT}),
            ]
            borders = {"top": True}
        class band_detail(ReportBand):
            height = 0.5*cm
            auto_expand_height = True
            elements = (
                    ObjectValue(attribute_name="code", left=0.2*cm, width=1.8*cm),
                    ObjectValue(attribute_name="description", left=2*cm, width=8*cm),
                    ObjectValue(attribute_name="total_unit_cost", left=10*cm, width=2*cm),
                    ObjectValue(attribute_name="total_monthly_cost", left=12*cm, width=2*cm),
                    ObjectValue(attribute_name="total_minute_cost", left=14*cm, width=2*cm),
                    ObjectValue(attribute_name="total_megabyte_cost", left=16*cm, width=2*cm),
                    ObjectValue(attribute_name="comments", left=18*cm, width=6*cm),
                    )
        subreports = [
            SubReport(
                #queryset_string = "db((db.budget_kit_item.kit_id == %(object)s.id) & (db.budget_item.id == db.budget_kit_item.item_id)).select(db.budget_item.code, db.budget_item.description, db.budget_item.unit_cost)",
                #queryset_string = "db(db.budget_kit_item.kit_id == %(object)s.id).select()",
                band_header = ReportBand(
                        height=0.5*cm,
                        elements=[
                            Label(text="Item ID", top=0, left=0.2*cm, style={"fontName": "Helvetica-Bold"}),
                            Label(text="Quantity", top=0, left=2*cm, style={"fontName": "Helvetica-Bold"}),
                            #Label(text="Unit Cost", top=0, left=4*cm, style={"fontName": "Helvetica-Bold"}),
                            ],
                        borders={"top": True, "left": True, "right": True},
                        ),
                detail_band = ReportBand(
                        height=0.5*cm,
                        elements=[
                            ObjectValue(attribute_name="item_id", top=0, left=0.2*cm),
                            ObjectValue(attribute_name="quantity", top=0, left=2*cm),
                            #ObjectValue(attribute_name="unit_cost", top=0, left=4*cm),
                            ]
                        ),
                ),
            ]


    #report = MyReport(queryset=objects_list)
    report = MyReport(queryset=objects_list, db=db)
    report.generate_by(PDFGenerator, filename=output)

    output.seek(0)
    import gluon.contenttype
    response.headers["Content-Type"] = gluon.contenttype.contenttype(".pdf")
    filename = "%s_kits.pdf" % (request.env.server_name)
    response.headers["Content-disposition"] = "attachment; filename=\"%s\"" % filename
    return output.read()

Example 11

Project: oleviewdotnet
Source File: config.py
View license
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
    """
    Start up a socket server on the specified port, and listen for new
    configurations.

    These will be sent as a file suitable for processing by fileConfig().
    Returns a Thread object on which you can call start() to start the server,
    and which you can join() when appropriate. To stop the server, call
    stopListening().
    """
    if not thread:
        raise NotImplementedError("listen() needs threading to work")

    class ConfigStreamHandler(StreamRequestHandler):
        """
        Handler for a logging configuration request.

        It expects a completely new logging configuration and uses fileConfig
        to install it.
        """
        def handle(self):
            """
            Handle a request.

            Each request is expected to be a 4-byte length, packed using
            struct.pack(">L", n), followed by the config file.
            Uses fileConfig() to do the grunt work.
            """
            import tempfile
            try:
                conn = self.connection
                chunk = conn.recv(4)
                if len(chunk) == 4:
                    slen = struct.unpack(">L", chunk)[0]
                    chunk = self.connection.recv(slen)
                    while len(chunk) < slen:
                        chunk = chunk + conn.recv(slen - len(chunk))
                    try:
                        import json
                        d =json.loads(chunk)
                        assert isinstance(d, dict)
                        dictConfig(d)
                    except:
                        #Apply new configuration.

                        file = cStringIO.StringIO(chunk)
                        try:
                            fileConfig(file)
                        except (KeyboardInterrupt, SystemExit):
                            raise
                        except:
                            traceback.print_exc()
                    if self.server.ready:
                        self.server.ready.set()
            except socket.error, e:
                if not isinstance(e.args, tuple):
                    raise
                else:
                    errcode = e.args[0]
                    if errcode != RESET_ERROR:
                        raise

    class ConfigSocketReceiver(ThreadingTCPServer):
        """
        A simple TCP socket-based logging config receiver.
        """

        allow_reuse_address = 1

        def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
                     handler=None, ready=None):
            ThreadingTCPServer.__init__(self, (host, port), handler)
            logging._acquireLock()
            self.abort = 0
            logging._releaseLock()
            self.timeout = 1
            self.ready = ready

        def serve_until_stopped(self):
            import select
            abort = 0
            while not abort:
                rd, wr, ex = select.select([self.socket.fileno()],
                                           [], [],
                                           self.timeout)
                if rd:
                    self.handle_request()
                logging._acquireLock()
                abort = self.abort
                logging._releaseLock()
            self.socket.close()

    class Server(threading.Thread):

        def __init__(self, rcvr, hdlr, port):
            super(Server, self).__init__()
            self.rcvr = rcvr
            self.hdlr = hdlr
            self.port = port
            self.ready = threading.Event()

        def run(self):
            server = self.rcvr(port=self.port, handler=self.hdlr,
                               ready=self.ready)
            if self.port == 0:
                self.port = server.server_address[1]
            self.ready.set()
            global _listener
            logging._acquireLock()
            _listener = server
            logging._releaseLock()
            server.serve_until_stopped()

    return Server(ConfigSocketReceiver, ConfigStreamHandler, port)

Example 12

Project: WAPT
Source File: config.py
View license
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
    """
    Start up a socket server on the specified port, and listen for new
    configurations.

    These will be sent as a file suitable for processing by fileConfig().
    Returns a Thread object on which you can call start() to start the server,
    and which you can join() when appropriate. To stop the server, call
    stopListening().
    """
    if not thread:
        raise NotImplementedError("listen() needs threading to work")

    class ConfigStreamHandler(StreamRequestHandler):
        """
        Handler for a logging configuration request.

        It expects a completely new logging configuration and uses fileConfig
        to install it.
        """
        def handle(self):
            """
            Handle a request.

            Each request is expected to be a 4-byte length, packed using
            struct.pack(">L", n), followed by the config file.
            Uses fileConfig() to do the grunt work.
            """
            import tempfile
            try:
                conn = self.connection
                chunk = conn.recv(4)
                if len(chunk) == 4:
                    slen = struct.unpack(">L", chunk)[0]
                    chunk = self.connection.recv(slen)
                    while len(chunk) < slen:
                        chunk = chunk + conn.recv(slen - len(chunk))
                    try:
                        import json
                        d =json.loads(chunk)
                        assert isinstance(d, dict)
                        dictConfig(d)
                    except:
                        #Apply new configuration.

                        file = cStringIO.StringIO(chunk)
                        try:
                            fileConfig(file)
                        except (KeyboardInterrupt, SystemExit):
                            raise
                        except:
                            traceback.print_exc()
                    if self.server.ready:
                        self.server.ready.set()
            except socket.error as e:
                if e.errno != RESET_ERROR:
                    raise

    class ConfigSocketReceiver(ThreadingTCPServer):
        """
        A simple TCP socket-based logging config receiver.
        """

        allow_reuse_address = 1

        def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
                     handler=None, ready=None):
            ThreadingTCPServer.__init__(self, (host, port), handler)
            logging._acquireLock()
            self.abort = 0
            logging._releaseLock()
            self.timeout = 1
            self.ready = ready

        def serve_until_stopped(self):
            import select
            abort = 0
            while not abort:
                rd, wr, ex = select.select([self.socket.fileno()],
                                           [], [],
                                           self.timeout)
                if rd:
                    self.handle_request()
                logging._acquireLock()
                abort = self.abort
                logging._releaseLock()
            self.socket.close()

    class Server(threading.Thread):

        def __init__(self, rcvr, hdlr, port):
            super(Server, self).__init__()
            self.rcvr = rcvr
            self.hdlr = hdlr
            self.port = port
            self.ready = threading.Event()

        def run(self):
            server = self.rcvr(port=self.port, handler=self.hdlr,
                               ready=self.ready)
            if self.port == 0:
                self.port = server.server_address[1]
            self.ready.set()
            global _listener
            logging._acquireLock()
            _listener = server
            logging._releaseLock()
            server.serve_until_stopped()

    return Server(ConfigSocketReceiver, ConfigStreamHandler, port)

Example 13

Project: Herd
Source File: track.py
View license
    def get_infopage(self):
        try:
            if not self.config['show_infopage']:
                return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
            red = self.config['infopage_redirect']
            if red:
                return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
                        '<A HREF="'+red+'">Click Here</A>')

            s = StringIO()
            s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
                '<html><head><title>BitTorrent download info</title>\n')
            if self.favicon is not None:
                s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
            s.write('</head>\n<body>\n' \
                '<h3>BitTorrent download info</h3>\n'\
                '<ul>\n'
                '<li><strong>tracker version:</strong> %s</li>\n' \
                '<li><strong>server time:</strong> %s</li>\n' \
                '</ul>\n' % (version, isotime()))
            if self.config['allowed_dir']:
                if self.show_names:
                    names = [ (self.allowed[hash]['name'],hash)
                              for hash in self.allowed.keys() ]
                else:
                    names = [ (None,hash)
                              for hash in self.allowed.keys() ]
            else:
                names = [ (None,hash) for hash in self.downloads.keys() ]
            if not names:
                s.write('<p>not tracking any files yet...</p>\n')
            else:
                names.sort()
                tn = 0
                tc = 0
                td = 0
                tt = 0  # Total transferred
                ts = 0  # Total size
                nf = 0  # Number of files displayed
                if self.config['allowed_dir'] and self.show_names:
                    s.write('<table summary="files" border="1">\n' \
                        '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
                else:
                    s.write('<table summary="files">\n' \
                        '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
                for name,hash in names:
                    l = self.downloads[hash]
                    n = self.completed.get(hash, 0)
                    tn = tn + n
                    c = self.seedcount[hash]
                    tc = tc + c
                    d = len(l) - c
                    td = td + d
                    if self.config['allowed_dir'] and self.show_names:
                        if self.allowed.has_key(hash):
                            nf = nf + 1
                            sz = self.allowed[hash]['length']  # size
                            ts = ts + sz
                            szt = sz * n   # Transferred for this torrent
                            tt = tt + szt
                            if self.allow_get == 1:
                                linkname = '<a href="/file?info_hash=' + quote(hash) + '">' + name + '</a>'
                            else:
                                linkname = name
                            s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
                                % (b2a_hex(hash), linkname, size_format(sz), c, d, n, size_format(szt)))
                    else:
                        s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
                            % (b2a_hex(hash), c, d, n))
                ttn = 0
                for i in self.completed.values():
                    ttn = ttn + i
                if self.config['allowed_dir'] and self.show_names:
                    s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n'
                            % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt)))
                else:
                    s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n'
                            % (nf, tc, td, tn, ttn))
                s.write('</table>\n' \
                    '<ul>\n' \
                    '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
                    '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
                    '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
                    '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \
                    '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
                    '</ul>\n')

            s.write('</body>\n' \
                '</html>\n')
            return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
        except:
            print_exc()
            return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')

Example 14

Project: bh2014
Source File: config.py
View license
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
    """
    Start up a socket server on the specified port, and listen for new
    configurations.

    These will be sent as a file suitable for processing by fileConfig().
    Returns a Thread object on which you can call start() to start the server,
    and which you can join() when appropriate. To stop the server, call
    stopListening().
    """
    if not thread:
        raise NotImplementedError("listen() needs threading to work")

    class ConfigStreamHandler(StreamRequestHandler):
        """
        Handler for a logging configuration request.

        It expects a completely new logging configuration and uses fileConfig
        to install it.
        """
        def handle(self):
            """
            Handle a request.

            Each request is expected to be a 4-byte length, packed using
            struct.pack(">L", n), followed by the config file.
            Uses fileConfig() to do the grunt work.
            """
            import tempfile
            try:
                conn = self.connection
                chunk = conn.recv(4)
                if len(chunk) == 4:
                    slen = struct.unpack(">L", chunk)[0]
                    chunk = self.connection.recv(slen)
                    while len(chunk) < slen:
                        chunk = chunk + conn.recv(slen - len(chunk))
                    try:
                        import json
                        d =json.loads(chunk)
                        assert isinstance(d, dict)
                        dictConfig(d)
                    except:
                        #Apply new configuration.

                        file = cStringIO.StringIO(chunk)
                        try:
                            fileConfig(file)
                        except (KeyboardInterrupt, SystemExit):
                            raise
                        except:
                            traceback.print_exc()
                    if self.server.ready:
                        self.server.ready.set()
            except socket.error, e:
                if not isinstance(e.args, tuple):
                    raise
                else:
                    errcode = e.args[0]
                    if errcode != RESET_ERROR:
                        raise

    class ConfigSocketReceiver(ThreadingTCPServer):
        """
        A simple TCP socket-based logging config receiver.
        """

        allow_reuse_address = 1

        def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
                     handler=None, ready=None):
            ThreadingTCPServer.__init__(self, (host, port), handler)
            logging._acquireLock()
            self.abort = 0
            logging._releaseLock()
            self.timeout = 1
            self.ready = ready

        def serve_until_stopped(self):
            import select
            abort = 0
            while not abort:
                rd, wr, ex = select.select([self.socket.fileno()],
                                           [], [],
                                           self.timeout)
                if rd:
                    self.handle_request()
                logging._acquireLock()
                abort = self.abort
                logging._releaseLock()
            self.socket.close()

    class Server(threading.Thread):

        def __init__(self, rcvr, hdlr, port):
            super(Server, self).__init__()
            self.rcvr = rcvr
            self.hdlr = hdlr
            self.port = port
            self.ready = threading.Event()

        def run(self):
            server = self.rcvr(port=self.port, handler=self.hdlr,
                               ready=self.ready)
            if self.port == 0:
                self.port = server.server_address[1]
            self.ready.set()
            global _listener
            logging._acquireLock()
            _listener = server
            logging._releaseLock()
            server.serve_until_stopped()

    return Server(ConfigSocketReceiver, ConfigStreamHandler, port)

Example 15

Project: PySPICE
Source File: mkwrapper.py
View license
def main(cspice_toolkit):
    global cspice_src

    parsing_prototype = False
    curr_prototype = ''
    module_methods = StringIO()
    buffer = StringIO()

    cspice_header = os.path.join(cspice_toolkit, 'include', 'SpiceUsr.h')
    cspice_src = os.path.join(cspice_toolkit, 'src', 'cspice')

    if not os.path.exists(cspice_header):
        sys.exit('Error: Unable to find %s' % cspice_header)

    if not os.path.exists(cspice_src):
        sys.exit('Error: Unable to find %s' % cspice_header)

    # preprocess the header file
    output = StringIO()
    run_command('gcc -E %s' % cspice_header, output)
    output.reset()

    used_prototypes = 0
    total_prototypes = 0;
    line_no = 0
    while 1:
        input = output.readline()
        line_no +=1
        # #debug('line: {1}, input: {0}'.format(input,line_no))
        if input == "":
            break
        input = input.strip()

        if input == "":
            continue
        # if parsing still false and line does not have opening bracket
        elif not parsing_prototype and "(" not in input:
            continue
        if parsing_prototype:
            # #debug("parse_adding: %s" % input)

            curr_prototype += input

            # #debug("curr_prototype now: %s" % curr_prototype)

            # if the last character is a semi-colon, the prototype is
            # complete, pass it along to the wrapper generator and clear
            # out the curr_prototype variable
            if input.endswith(";"):
                parsing_prototype = False

                # gen_wrapper can return False, then don't count it as used
                #debug('')
                #debug('%s\n' % curr_prototype)
                if gen_wrapper(curr_prototype, buffer):
                    used_prototypes += 1
                total_prototypes += 1

                curr_prototype = ""
            pass
        else:
            first_word = input[0:input.index(" ")]
            # #debug('first_word: {0}'.format(first_word))
            if first_word in function_types:
                # #debug("adding %s" % input)

                curr_prototype += input

                # #debug("curr_prototype now: %s" % curr_prototype)
            else:
                continue
            if input.endswith(";"):
                # #debug("prototype to be wrapped: {0}".format(curr_prototype))
                gen_wrapper(curr_prototype, buffer)
                curr_prototype = ""
            else:
                parsing_prototype = True

    sys.stderr.write("prototypes used: %d, total: %d\n" % (used_prototypes, total_prototypes))
    # put together the methods array
    for module_def in module_defs:
        module_methods.write("\n  %s" % module_def)

    # print out necessary boilerplate stuff
    return """\
/*
THIS IS AUTOMATICALLY GENERATED CODE.  IF THERE IS AN ERROR, PLEASE
MAKE ANY NECESSARY CHANGES IN THE PYTHON SCRIPT NAMED mkwrapper.py.

THIS CODE HAS NOT BEEN THOROUGHLY TESTED, USE AT YOUR OWN RISK, THE
AUTHOR(S) IS/ARE NOT RESPONSIBLE IF YOUR CRAFT GOES DOWN, BLAH BLAH
BLAH.  SEE FILE "LICENSE" FOR MORE INFO.
*/

#include "pyspice.h"

PyObject *SpiceException;

%s
PyMethodDef methods[] = {
%s
  {NULL, NULL},
};

void init_spice(PyObject *self)
{
  PyObject *m = NULL;

  m = Py_InitModule("_spice", methods);

  /* Don't allow an exception to stop execution */
  erract_c("SET", 0, "RETURN");
  errdev_c("SET", 0, "NULL");

  SpiceException = \
    PyErr_NewException("_spice.SpiceException", PyExc_Exception, NULL);
  Py_INCREF(SpiceException);

  PyModule_AddObject(m, "SpiceException", SpiceException);
}""" % (buffer.getvalue(), module_methods.getvalue())

Example 16

Project: code-vilya
Source File: git_http_backend.py
View license
    def __call__(self, environ, start_response):
        """
        WSGI Response producer for HTTP POST Git Smart HTTP requests.
        Reads commands and data from HTTP POST's body.
        returns an iterator obj with contents of git command's response to stdout
        """
        # 1. Determine git_command, repo_path
        # 2. Determine IN content (encoding)
        # 3. prepare OUT content (encoding, header)

        dataObj = {}
        answer = self.basic_checks(dataObj, environ, start_response)
        if answer:
            # this is a WSGI "trick". basic_checks have already prepared the headers,
            # and a response body (which is the 'answer') returned here.
            # presense of anything of truthiness in 'answer' = some ERROR have
            # already prepared a response and all I need to do is let go of the response.
            return answer

        git_command = dataObj['git_command']
        repo_path = dataObj['repo_path']

        try:
            _l = int(environ.get('CONTENT_LENGTH',''))
        except:
            _l = None

        # Note, depending on the WSGI server, the following handlings of chunked
        # request bodies are possible:
        # 1. This is WSGI 1.0-only compliant server. wsgi.input.read() is bottomless
        #    and Content-Length is absent.
        #    If WSGI app is assuming no size header = size header is Zero, app will respond with wrong data.
        #    (this code is not assuming None = zero data. We look deeper)
        #    If WSGI app is chunked-aware, but respects WSGI 1.0 only,
        #    it will reply with "501 Not Implemented"
        # 2. This is WSGI 1.0-compliant server that tries to accommodate Transfer-Encoding: chunked
        #    requests by caching the body and presenting it as wsgi.input file-like.
        #    Content-Length header is set to captured size and Transfer-Encoding
        #    header is removed. This is not per WSGI 1.0 spec, but is a good thing to do.
        #    All WSGI 1.x apps are happy.
        # 3. This is WSGI 1.1-compliant server that presents Transfer-Encoding: chunked
        #    requests as a file-like that yields an EOF at the end.
        #    Content-Length header is NOT set.
        #    Only WSGI 1.1 apps are happy. WSGI 1.0 apps are confused by lack of
        #    content-length header and blow up. (We are WSGI 1.1 app)

        # any WSGI server that claims to be HTTP/1.1 compliant must deal with chunked
        # If not #3 above, then #2 would be done by a self-respecting HTTP/1.1 server.

        wsgi_version = environ.get('wsgi.version',(1,0))
        if wsgi_version[0] >= 1 and wsgi_version[1] >= 1: # if it's 1.1 or higher.
            wsgi_input_has_EOF = True
        else:
            wsgi_input_has_EOF = False

        _i = environ.get('wsgi.input')
        if wsgi_input_has_EOF: # this is approximately equal "if server is WSGI 1.1 and above"
            stdin = _i
        else:
            if _l > self.bufsize: # too large to be a string in memory
                bs = self.bufsize
                btr = _l
                stdin = tempfile.TemporaryFile()
                while btr >= bs:
                    stdin.write(_i.read(bs))
                    btr -= bs
                stdin.write(_i.read(btr))
                stdin.flush()
                stdin.seek(0)
            else: # between zero and max memory buffer size = string or bytes
                stdin = _i.read(_l)

        cmd = r'git %s --stateless-rpc "%s"' % (git_command[4:], repo_path)
        _p = subprocess.Popen(cmd,
            bufsize = -1,
            shell = True,
            stdin = subprocess.PIPE,
            stdout = subprocess.PIPE,
            env = dict(CODE_REMOTE_USER=environ.get('REMOTE_USER', '')),
            )
        if isinstance(stdin, file):
            out, _ = _p.communicate(stdin.read())
        else:
            try:
                fp = gzip.GzipFile(fileobj=StringIO(stdin))
                out, _ = _p.communicate(fp.read())
            except IOError:
                out, _ = _p.communicate(stdin)
        out = StringIO(out)

        if git_command == u'git-receive-pack':
            # updating refs manually after each push. Needed for pre-1.7.0.4 git clients using regular HTTP mode.
            subprocess.call(u'git --git-dir "%s" update-server-info' % repo_path, shell=True)

        headers = [('Content-type', 'application/x-%s-result' % git_command.encode('utf8'))]
        return self.package_response(
            out,
            environ,
            start_response,
            headers)

Example 17

Project: termite-data-server
Source File: widget.py
View license
def console():
    """ Defines the behavior of the console web2py execution """
    import optparse
    import textwrap

    usage = "python web2py.py"

    description = """\
    web2py Web Framework startup script.
    ATTENTION: unless a password is specified (-a 'passwd') web2py will
    attempt to run a GUI. In this case command line options are ignored."""

    description = textwrap.dedent(description)

    parser = optparse.OptionParser(
        usage, None, optparse.Option, ProgramVersion)

    parser.description = description

    msg = ('IP address of the server (e.g., 127.0.0.1 or ::1); '
           'Note: This value is ignored when using the \'interfaces\' option.')
    parser.add_option('-i',
                      '--ip',
                      default='127.0.0.1',
                      dest='ip',
                      help=msg)

    parser.add_option('-p',
                      '--port',
                      default='8000',
                      dest='port',
                      type='int',
                      help='port of server (8000)')

    msg = ('password to be used for administration '
           '(use -a "<recycle>" to reuse the last password))')
    parser.add_option('-a',
                      '--password',
                      default='<ask>',
                      dest='password',
                      help=msg)

    parser.add_option('-c',
                      '--ssl_certificate',
                      default='',
                      dest='ssl_certificate',
                      help='file that contains ssl certificate')

    parser.add_option('-k',
                      '--ssl_private_key',
                      default='',
                      dest='ssl_private_key',
                      help='file that contains ssl private key')

    msg = ('Use this file containing the CA certificate to validate X509 '
           'certificates from clients')
    parser.add_option('--ca-cert',
                      action='store',
                      dest='ssl_ca_certificate',
                      default=None,
                      help=msg)

    parser.add_option('-d',
                      '--pid_filename',
                      default='httpserver.pid',
                      dest='pid_filename',
                      help='file to store the pid of the server')

    parser.add_option('-l',
                      '--log_filename',
                      default='httpserver.log',
                      dest='log_filename',
                      help='file to log connections')

    parser.add_option('-n',
                      '--numthreads',
                      default=None,
                      type='int',
                      dest='numthreads',
                      help='number of threads (deprecated)')

    parser.add_option('--minthreads',
                      default=None,
                      type='int',
                      dest='minthreads',
                      help='minimum number of server threads')

    parser.add_option('--maxthreads',
                      default=None,
                      type='int',
                      dest='maxthreads',
                      help='maximum number of server threads')

    parser.add_option('-s',
                      '--server_name',
                      default=socket.gethostname(),
                      dest='server_name',
                      help='server name for the web server')

    msg = 'max number of queued requests when server unavailable'
    parser.add_option('-q',
                      '--request_queue_size',
                      default='5',
                      type='int',
                      dest='request_queue_size',
                      help=msg)

    parser.add_option('-o',
                      '--timeout',
                      default='10',
                      type='int',
                      dest='timeout',
                      help='timeout for individual request (10 seconds)')

    parser.add_option('-z',
                      '--shutdown_timeout',
                      default='5',
                      type='int',
                      dest='shutdown_timeout',
                      help='timeout on shutdown of server (5 seconds)')

    parser.add_option('--socket-timeout',
                      default=5,
                      type='int',
                      dest='socket_timeout',
                      help='timeout for socket (5 second)')

    parser.add_option('-f',
                      '--folder',
                      default=os.getcwd(),
                      dest='folder',
                      help='folder from which to run web2py')

    parser.add_option('-v',
                      '--verbose',
                      action='store_true',
                      dest='verbose',
                      default=False,
                      help='increase --test verbosity')

    parser.add_option('-Q',
                      '--quiet',
                      action='store_true',
                      dest='quiet',
                      default=False,
                      help='disable all output')

    msg = ('set debug output level (0-100, 0 means all, 100 means none; '
           'default is 30)')
    parser.add_option('-D',
                      '--debug',
                      dest='debuglevel',
                      default=30,
                      type='int',
                      help=msg)

    msg = ('run web2py in interactive shell or IPython (if installed) with '
           'specified appname (if app does not exist it will be created). '
           'APPNAME like a/c/f (c,f optional)')
    parser.add_option('-S',
                      '--shell',
                      dest='shell',
                      metavar='APPNAME',
                      help=msg)

    msg = ('run web2py in interactive shell or bpython (if installed) with '
           'specified appname (if app does not exist it will be created).\n'
           'Use combined with --shell')
    parser.add_option('-B',
                      '--bpython',
                      action='store_true',
                      default=False,
                      dest='bpython',
                      help=msg)

    msg = 'only use plain python shell; should be used with --shell option'
    parser.add_option('-P',
                      '--plain',
                      action='store_true',
                      default=False,
                      dest='plain',
                      help=msg)

    msg = ('auto import model files; default is False; should be used '
           'with --shell option')
    parser.add_option('-M',
                      '--import_models',
                      action='store_true',
                      default=False,
                      dest='import_models',
                      help=msg)

    msg = ('run PYTHON_FILE in web2py environment; '
           'should be used with --shell option')
    parser.add_option('-R',
                      '--run',
                      dest='run',
                      metavar='PYTHON_FILE',
                      default='',
                      help=msg)

    msg = ('run scheduled tasks for the specified apps: expects a list of '
           'app names as -K app1,app2,app3 '
           'or a list of app:groups as -K app1:group1:group2,app2:group1 '
           'to override specific group_names. (only strings, no spaces '
           'allowed. Requires a scheduler defined in the models')
    parser.add_option('-K',
                      '--scheduler',
                      dest='scheduler',
                      default=None,
                      help=msg)

    msg = 'run schedulers alongside webserver, needs -K app1 and -a too'
    parser.add_option('-X',
                      '--with-scheduler',
                      action='store_true',
                      default=False,
                      dest='with_scheduler',
                      help=msg)

    msg = ('run doctests in web2py environment; '
           'TEST_PATH like a/c/f (c,f optional)')
    parser.add_option('-T',
                      '--test',
                      dest='test',
                      metavar='TEST_PATH',
                      default=None,
                      help=msg)

    msg = 'trigger a cron run manually; usually invoked from a system crontab'
    parser.add_option('-C',
                      '--cron',
                      action='store_true',
                      dest='extcron',
                      default=False,
                      help=msg)

    msg = 'triggers the use of softcron'
    parser.add_option('--softcron',
                      action='store_true',
                      dest='softcron',
                      default=False,
                      help=msg)

    parser.add_option('-Y',
                      '--run-cron',
                      action='store_true',
                      dest='runcron',
                      default=False,
                      help='start the background cron process')

    parser.add_option('-J',
                      '--cronjob',
                      action='store_true',
                      dest='cronjob',
                      default=False,
                      help='identify cron-initiated command')

    parser.add_option('-L',
                      '--config',
                      dest='config',
                      default='',
                      help='config file')

    parser.add_option('-F',
                      '--profiler',
                      dest='profiler_dir',
                      default=None,
                      help='profiler dir')

    parser.add_option('-t',
                      '--taskbar',
                      action='store_true',
                      dest='taskbar',
                      default=False,
                      help='use web2py gui and run in taskbar (system tray)')

    parser.add_option('',
                      '--nogui',
                      action='store_true',
                      default=False,
                      dest='nogui',
                      help='text-only, no GUI')

    msg = ('should be followed by a list of arguments to be passed to script, '
           'to be used with -S, -A must be the last option')
    parser.add_option('-A',
                      '--args',
                      action='store',
                      dest='args',
                      default=None,
                      help=msg)

    parser.add_option('--no-banner',
                      action='store_true',
                      default=False,
                      dest='nobanner',
                      help='Do not print header banner')

    msg = ('listen on multiple addresses: '
           '"ip1:port1:key1:cert1:ca_cert1;ip2:port2:key2:cert2:ca_cert2;..." '
           '(:key:cert:ca_cert optional; no spaces; IPv6 addresses must be in '
           'square [] brackets)')
    parser.add_option('--interfaces',
                      action='store',
                      dest='interfaces',
                      default=None,
                      help=msg)

    msg = 'runs web2py tests'
    parser.add_option('--run_system_tests',
                      action='store_true',
                      dest='run_system_tests',
                      default=False,
                      help=msg)

    msg = ('adds coverage reporting (needs --run_system_tests), '
           'python 2.7 and the coverage module installed. '
           'You can alter the default path setting the environmental '
           'var "COVERAGE_PROCESS_START". '
           'By default it takes gluon/tests/coverage.ini')
    parser.add_option('--with_coverage',
                      action='store_true',
                      dest='with_coverage',
                      default=False,
                      help=msg)

    if '-A' in sys.argv:
        k = sys.argv.index('-A')
    elif '--args' in sys.argv:
        k = sys.argv.index('--args')
    else:
        k = len(sys.argv)
    sys.argv, other_args = sys.argv[:k], sys.argv[k + 1:]
    (options, args) = parser.parse_args()
    options.args = [options.run] + other_args
    global_settings.cmd_options = options
    global_settings.cmd_args = args

    try:
        options.ips = list(set( # no duplicates
            [addrinfo[4][0] for addrinfo in getipaddrinfo(socket.getfqdn())
             if not is_loopback_ip_address(addrinfo=addrinfo)]))
    except socket.gaierror:
        options.ips = []

    if options.run_system_tests:
        run_system_tests(options)

    if options.quiet:
        capture = cStringIO.StringIO()
        sys.stdout = capture
        logger.setLevel(logging.CRITICAL + 1)
    else:
        logger.setLevel(options.debuglevel)

    if options.config[-3:] == '.py':
        options.config = options.config[:-3]

    if options.cronjob:
        global_settings.cronjob = True  # tell the world
        options.plain = True    # cronjobs use a plain shell
        options.nobanner = True
        options.nogui = True

    options.folder = os.path.abspath(options.folder)

    #  accept --interfaces in the form
    #  "ip1:port1:key1:cert1:ca_cert1;[ip2]:port2;ip3:port3:key3:cert3"
    #  (no spaces; optional key:cert indicate SSL)
    if isinstance(options.interfaces, str):
        interfaces = options.interfaces.split(';')
        options.interfaces = []
        for interface in interfaces:
            if interface.startswith('['):  # IPv6
                ip, if_remainder = interface.split(']', 1)
                ip = ip[1:]
                if_remainder = if_remainder[1:].split(':')
                if_remainder[0] = int(if_remainder[0])  # numeric port
                options.interfaces.append(tuple([ip] + if_remainder))
            else:  # IPv4
                interface = interface.split(':')
                interface[1] = int(interface[1])  # numeric port
                options.interfaces.append(tuple(interface))

    #  accepts --scheduler in the form
    #  "app:group1,group2,app2:group1"
    scheduler = []
    options.scheduler_groups = None
    if isinstance(options.scheduler, str):
        if ':' in options.scheduler:
            for opt in options.scheduler.split(','):
                scheduler.append(opt.split(':'))
            options.scheduler = ','.join([app[0] for app in scheduler])
            options.scheduler_groups = scheduler

    if options.numthreads is not None and options.minthreads is None:
        options.minthreads = options.numthreads  # legacy

    create_welcome_w2p()

    if not options.cronjob:
        # If we have the applications package or if we should upgrade
        if not os.path.exists('applications/__init__.py'):
            write_file('applications/__init__.py', '')

    return options, args

Example 18

Project: termite-visualizations
Source File: widget.py
View license
def console():
    """ Defines the behavior of the console web2py execution """
    import optparse
    import textwrap

    usage = "python web2py.py"

    description = """\
    web2py Web Framework startup script.
    ATTENTION: unless a password is specified (-a 'passwd') web2py will
    attempt to run a GUI. In this case command line options are ignored."""

    description = textwrap.dedent(description)

    parser = optparse.OptionParser(
        usage, None, optparse.Option, ProgramVersion)

    parser.description = description

    msg = ('IP address of the server (e.g., 127.0.0.1 or ::1); '
           'Note: This value is ignored when using the \'interfaces\' option.')
    parser.add_option('-i',
                      '--ip',
                      default='127.0.0.1',
                      dest='ip',
                      help=msg)

    parser.add_option('-p',
                      '--port',
                      default='8000',
                      dest='port',
                      type='int',
                      help='port of server (8000)')

    msg = ('password to be used for administration '
           '(use -a "<recycle>" to reuse the last password))')
    parser.add_option('-a',
                      '--password',
                      default='<ask>',
                      dest='password',
                      help=msg)

    parser.add_option('-c',
                      '--ssl_certificate',
                      default='',
                      dest='ssl_certificate',
                      help='file that contains ssl certificate')

    parser.add_option('-k',
                      '--ssl_private_key',
                      default='',
                      dest='ssl_private_key',
                      help='file that contains ssl private key')

    msg = ('Use this file containing the CA certificate to validate X509 '
           'certificates from clients')
    parser.add_option('--ca-cert',
                      action='store',
                      dest='ssl_ca_certificate',
                      default=None,
                      help=msg)

    parser.add_option('-d',
                      '--pid_filename',
                      default='httpserver.pid',
                      dest='pid_filename',
                      help='file to store the pid of the server')

    parser.add_option('-l',
                      '--log_filename',
                      default='httpserver.log',
                      dest='log_filename',
                      help='file to log connections')

    parser.add_option('-n',
                      '--numthreads',
                      default=None,
                      type='int',
                      dest='numthreads',
                      help='number of threads (deprecated)')

    parser.add_option('--minthreads',
                      default=None,
                      type='int',
                      dest='minthreads',
                      help='minimum number of server threads')

    parser.add_option('--maxthreads',
                      default=None,
                      type='int',
                      dest='maxthreads',
                      help='maximum number of server threads')

    parser.add_option('-s',
                      '--server_name',
                      default=socket.gethostname(),
                      dest='server_name',
                      help='server name for the web server')

    msg = 'max number of queued requests when server unavailable'
    parser.add_option('-q',
                      '--request_queue_size',
                      default='5',
                      type='int',
                      dest='request_queue_size',
                      help=msg)

    parser.add_option('-o',
                      '--timeout',
                      default='10',
                      type='int',
                      dest='timeout',
                      help='timeout for individual request (10 seconds)')

    parser.add_option('-z',
                      '--shutdown_timeout',
                      default='5',
                      type='int',
                      dest='shutdown_timeout',
                      help='timeout on shutdown of server (5 seconds)')

    parser.add_option('--socket-timeout',
                      default=5,
                      type='int',
                      dest='socket_timeout',
                      help='timeout for socket (5 second)')

    parser.add_option('-f',
                      '--folder',
                      default=os.getcwd(),
                      dest='folder',
                      help='folder from which to run web2py')

    parser.add_option('-v',
                      '--verbose',
                      action='store_true',
                      dest='verbose',
                      default=False,
                      help='increase --test verbosity')

    parser.add_option('-Q',
                      '--quiet',
                      action='store_true',
                      dest='quiet',
                      default=False,
                      help='disable all output')

    msg = ('set debug output level (0-100, 0 means all, 100 means none; '
           'default is 30)')
    parser.add_option('-D',
                      '--debug',
                      dest='debuglevel',
                      default=30,
                      type='int',
                      help=msg)

    msg = ('run web2py in interactive shell or IPython (if installed) with '
           'specified appname (if app does not exist it will be created). '
           'APPNAME like a/c/f (c,f optional)')
    parser.add_option('-S',
                      '--shell',
                      dest='shell',
                      metavar='APPNAME',
                      help=msg)

    msg = ('run web2py in interactive shell or bpython (if installed) with '
           'specified appname (if app does not exist it will be created).\n'
           'Use combined with --shell')
    parser.add_option('-B',
                      '--bpython',
                      action='store_true',
                      default=False,
                      dest='bpython',
                      help=msg)

    msg = 'only use plain python shell; should be used with --shell option'
    parser.add_option('-P',
                      '--plain',
                      action='store_true',
                      default=False,
                      dest='plain',
                      help=msg)

    msg = ('auto import model files; default is False; should be used '
           'with --shell option')
    parser.add_option('-M',
                      '--import_models',
                      action='store_true',
                      default=False,
                      dest='import_models',
                      help=msg)

    msg = ('run PYTHON_FILE in web2py environment; '
           'should be used with --shell option')
    parser.add_option('-R',
                      '--run',
                      dest='run',
                      metavar='PYTHON_FILE',
                      default='',
                      help=msg)

    msg = ('run scheduled tasks for the specified apps: expects a list of '
           'app names as -K app1,app2,app3 '
           'or a list of app:groups as -K app1:group1:group2,app2:group1 '
           'to override specific group_names. (only strings, no spaces '
           'allowed. Requires a scheduler defined in the models')
    parser.add_option('-K',
                      '--scheduler',
                      dest='scheduler',
                      default=None,
                      help=msg)

    msg = 'run schedulers alongside webserver, needs -K app1 and -a too'
    parser.add_option('-X',
                      '--with-scheduler',
                      action='store_true',
                      default=False,
                      dest='with_scheduler',
                      help=msg)

    msg = ('run doctests in web2py environment; '
           'TEST_PATH like a/c/f (c,f optional)')
    parser.add_option('-T',
                      '--test',
                      dest='test',
                      metavar='TEST_PATH',
                      default=None,
                      help=msg)

    msg = 'trigger a cron run manually; usually invoked from a system crontab'
    parser.add_option('-C',
                      '--cron',
                      action='store_true',
                      dest='extcron',
                      default=False,
                      help=msg)

    msg = 'triggers the use of softcron'
    parser.add_option('--softcron',
                      action='store_true',
                      dest='softcron',
                      default=False,
                      help=msg)

    parser.add_option('-Y',
                      '--run-cron',
                      action='store_true',
                      dest='runcron',
                      default=False,
                      help='start the background cron process')

    parser.add_option('-J',
                      '--cronjob',
                      action='store_true',
                      dest='cronjob',
                      default=False,
                      help='identify cron-initiated command')

    parser.add_option('-L',
                      '--config',
                      dest='config',
                      default='',
                      help='config file')

    parser.add_option('-F',
                      '--profiler',
                      dest='profiler_dir',
                      default=None,
                      help='profiler dir')

    parser.add_option('-t',
                      '--taskbar',
                      action='store_true',
                      dest='taskbar',
                      default=False,
                      help='use web2py gui and run in taskbar (system tray)')

    parser.add_option('',
                      '--nogui',
                      action='store_true',
                      default=False,
                      dest='nogui',
                      help='text-only, no GUI')

    msg = ('should be followed by a list of arguments to be passed to script, '
           'to be used with -S, -A must be the last option')
    parser.add_option('-A',
                      '--args',
                      action='store',
                      dest='args',
                      default=None,
                      help=msg)

    parser.add_option('--no-banner',
                      action='store_true',
                      default=False,
                      dest='nobanner',
                      help='Do not print header banner')

    msg = ('listen on multiple addresses: '
           '"ip1:port1:key1:cert1:ca_cert1;ip2:port2:key2:cert2:ca_cert2;..." '
           '(:key:cert:ca_cert optional; no spaces; IPv6 addresses must be in '
           'square [] brackets)')
    parser.add_option('--interfaces',
                      action='store',
                      dest='interfaces',
                      default=None,
                      help=msg)

    msg = 'runs web2py tests'
    parser.add_option('--run_system_tests',
                      action='store_true',
                      dest='run_system_tests',
                      default=False,
                      help=msg)

    msg = ('adds coverage reporting (needs --run_system_tests), '
           'python 2.7 and the coverage module installed. '
           'You can alter the default path setting the environmental '
           'var "COVERAGE_PROCESS_START". '
           'By default it takes gluon/tests/coverage.ini')
    parser.add_option('--with_coverage',
                      action='store_true',
                      dest='with_coverage',
                      default=False,
                      help=msg)

    if '-A' in sys.argv:
        k = sys.argv.index('-A')
    elif '--args' in sys.argv:
        k = sys.argv.index('--args')
    else:
        k = len(sys.argv)
    sys.argv, other_args = sys.argv[:k], sys.argv[k + 1:]
    (options, args) = parser.parse_args()
    options.args = [options.run] + other_args
    global_settings.cmd_options = options
    global_settings.cmd_args = args

    try:
        options.ips = list(set( # no duplicates
            [addrinfo[4][0] for addrinfo in getipaddrinfo(socket.getfqdn())
             if not is_loopback_ip_address(addrinfo=addrinfo)]))
    except socket.gaierror:
        options.ips = []

    if options.run_system_tests:
        run_system_tests(options)

    if options.quiet:
        capture = cStringIO.StringIO()
        sys.stdout = capture
        logger.setLevel(logging.CRITICAL + 1)
    else:
        logger.setLevel(options.debuglevel)

    if options.config[-3:] == '.py':
        options.config = options.config[:-3]

    if options.cronjob:
        global_settings.cronjob = True  # tell the world
        options.plain = True    # cronjobs use a plain shell
        options.nobanner = True
        options.nogui = True

    options.folder = os.path.abspath(options.folder)

    #  accept --interfaces in the form
    #  "ip1:port1:key1:cert1:ca_cert1;[ip2]:port2;ip3:port3:key3:cert3"
    #  (no spaces; optional key:cert indicate SSL)
    if isinstance(options.interfaces, str):
        interfaces = options.interfaces.split(';')
        options.interfaces = []
        for interface in interfaces:
            if interface.startswith('['):  # IPv6
                ip, if_remainder = interface.split(']', 1)
                ip = ip[1:]
                if_remainder = if_remainder[1:].split(':')
                if_remainder[0] = int(if_remainder[0])  # numeric port
                options.interfaces.append(tuple([ip] + if_remainder))
            else:  # IPv4
                interface = interface.split(':')
                interface[1] = int(interface[1])  # numeric port
                options.interfaces.append(tuple(interface))

    #  accepts --scheduler in the form
    #  "app:group1,group2,app2:group1"
    scheduler = []
    options.scheduler_groups = None
    if isinstance(options.scheduler, str):
        if ':' in options.scheduler:
            for opt in options.scheduler.split(','):
                scheduler.append(opt.split(':'))
            options.scheduler = ','.join([app[0] for app in scheduler])
            options.scheduler_groups = scheduler

    if options.numthreads is not None and options.minthreads is None:
        options.minthreads = options.numthreads  # legacy

    create_welcome_w2p()

    if not options.cronjob:
        # If we have the applications package or if we should upgrade
        if not os.path.exists('applications/__init__.py'):
            write_file('applications/__init__.py', '')

    return options, args

Example 19

Project: viewfinder
Source File: analyze_merged_logs.py
View license
@gen.engine
def ProcessFiles(merged_store, logs_paths, filenames, callback):
  """Fetch and process each file contained in 'filenames'."""

  def _ProcessOneFile(contents, day_stats, device_entries, trace_entries):
    """Iterate over the contents of a processed file: one entry per line. Increment stats for specific entries."""
    buf = cStringIO.StringIO(contents)
    buf.seek(0)
    # Max len is +1 since we include the current line. It allows us to call 'continue' in the middle of the loop.
    context_before = deque(maxlen=options.options.trace_context_num_lines + 1)
    # Traces that still need "after" context.
    pending_traces = []
    def _AddTrace(trace_type, timestamp, module, message):
      # context_before also has the current line, so grab only :-1.
      trace = {'type': trace_type,
               'timestamp': timestamp,
               'module': module,
               'trace': msg,
               'context_before': list(context_before)[:-1],
               'context_after': []}
      if options.options.trace_context_num_lines == 0:
        trace_entries.append(trace)
      else:
        pending_traces.append(trace)

    def _CheckPendingTraces(line):
      for t in pending_traces:
        t['context_after'].append(line)
      while pending_traces and len(pending_traces[0]['context_after']) >= options.options.trace_context_num_lines:
        trace_entries.append(pending_traces.pop(0))

    while True:
      line = buf.readline()
      if not line:
        break
      line = line.rstrip('\n')
      # The deque automatically pops elements from the front when maxlen is reached.
      context_before.append(line)
      _CheckPendingTraces(line)

      parsed = logs_util.ParseLogLine(line)
      if not parsed:
        continue
      day, time, module, msg = parsed
      timestamp = logs_util.DayTimeStringsToUTCTimestamp(day, time)

      if options.options.process_traceback and re.search(kTracebackRE, line):
        _AddTrace('traceback', timestamp, module, msg)

      if module.startswith('user_op_manager:') or module.startswith('operation:'):
        # Found op status line.
        if msg.startswith('SUCCESS'):
          # Success message. eg: SUCCESS: user: xx, device: xx, op: xx, method: xx.yy in xxs
          parsed = logs_util.ParseSuccessMsg(msg)
          if not parsed:
            continue
          user, device, op, class_name, method_name = parsed
          method = '%s.%s' % (class_name, method_name)
          day_stats.ActiveAll(user)
          if method in ('Follower.UpdateOperation', 'UpdateFollowerOperation.Execute'):
            day_stats.ActiveView(user)
          elif method in ('Comment.PostOperation', 'PostCommentOperation.Execute'):
            day_stats.ActivePost(user)
          elif method in ('Episode.ShareExistingOperation', 'Episode.ShareNewOperation',
                          'ShareExistingOperation.Execute', 'ShareNewOperation.Execute'):
            day_stats.ActiveShare(user)
        elif msg.startswith('EXECUTE'):
          # Exec message. eg: EXECUTE: user: xx, device: xx, op: xx, method: xx.yy: <req>
          parsed = logs_util.ParseExecuteMsg(msg)
          if not parsed:
            continue
          user, device, op, class_name, method_name, request = parsed
          method = '%s.%s' % (class_name, method_name)
          if method in ('Device.UpdateOperation', 'User.RegisterOperation', 'RegisterUserOperation.Execute'):
            try:
              req_dict = eval(request)
              device_entries.append({'method': method, 'timestamp': timestamp, 'request': req_dict})
            except Exception as e:
              continue
        elif msg.startswith('ABORT'):
          if options.options.process_op_abort:
            # Abort message, save the entire line as well as context.
            _AddTrace('abort', timestamp, module, msg)
        # FAILURE status is already handled by Traceback processing.
      elif module.startswith('base:') and msg.startswith('/ping OK:'):
        # Ping message. Extract full request dict.
        req_str = logs_util.ParsePingMsg(msg)
        if not req_str:
          continue
        try:
          req_dict = json.loads(req_str)
          device_entries.append({'method': 'ping', 'timestamp': timestamp, 'request': req_dict})
        except Exception as e:
          continue
      elif module.startswith('ping:') and msg.startswith('ping OK:'):
        # Ping message in new format. Extract full request and response dicts.
        (req_str, resp_str) = logs_util.ParseNewPingMsg(msg)
        if not req_str or not resp_str:
          continue
        try:
          req_dict = json.loads(req_str)
          resp_dict = json.loads(resp_str)
          device_entries.append({'method': 'ping', 'timestamp': timestamp, 'request': req_dict, 'response': resp_dict})
        except Exception as e:
          continue


    # No more context. Flush the pending traces into the list.
    trace_entries.extend(pending_traces)
    buf.close()

  today = util.NowUTCToISO8601()
  # Group filenames by day.
  files_by_day = defaultdict(list)
  for filename in filenames:
    day = logs_paths.MergedLogPathToDate(filename)
    if not day:
      logging.error('filename cannot be parsed as processed log: %s' % filename)
      continue
    if options.options.compute_today or today != day:
      files_by_day[day].append(filename)

  # Sort the list of days. This is important both for --max_days_to_process, and to know the last
  # day for which we wrote the file.
  day_list = sorted(files_by_day.keys())
  if options.options.max_days_to_process is not None:
    day_list = day_list[:options.options.max_days_to_process]

  last_day_written = None
  for day in day_list:
    files = files_by_day[day]
    day_stats = logs_util.DayUserRequestStats(day)
    device_entries = []
    trace_entries = []
    for f in files:
      # Let exceptions surface.
      contents = yield gen.Task(merged_store.Get, f)
      logging.info('Processing %d bytes from %s' % (len(contents), f))
      _ProcessOneFile(contents, day_stats, device_entries, trace_entries)

    if not options.options.dry_run:
      # Write the json-ified stats.
      req_contents = json.dumps(day_stats.ToDotDict())
      req_file_path = 'processed_data/user_requests/%s' % day
      dev_contents = json.dumps(device_entries)
      dev_file_path = 'processed_data/device_details/%s' % day
      try:
        trace_contents = json.dumps(trace_entries)
      except Exception as e:
        trace_contents = None
      trace_file_path = 'processed_data/traces/%s' % day


      @gen.engine
      def _MaybePut(path, contents, callback):
        if contents:
          yield gen.Task(merged_store.Put, path, contents)
          logging.info('Wrote %d bytes to %s' % (len(contents), path))
        callback()


      yield [gen.Task(_MaybePut, req_file_path, req_contents),
             gen.Task(_MaybePut, dev_file_path, dev_contents),
             gen.Task(_MaybePut, trace_file_path, trace_contents)]

      last_day_written = day_stats.day

  callback(last_day_written)
  return

Example 20

Project: pymo
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 21

Project: imagrium
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 22

Project: skarphed
Source File: repository.py
View license
    def upload_module(self, environ, data, signature):
        """
        Uploads a modules. data is the module's archive. It must contain a manifest.json file
        in order to provide module meta information.
        """
        # check if the signature matches a registered developer
        dev_name = self._match_developer_signature(environ, data, signature)
        
        # extract the module's manifest from the module's archive
        datafile = StringIO(data)
        try:
            tar = tarfile.open(fileobj = datafile, mode = 'r:gz') 
            manifestdata = tar.extractfile('manifest.json').read()
        except Exception, e:
            raise create_repository_exception(RepositoryErrorCode.UPLOAD_CORRUPTED)
        
        # checks whether the manifest is a valid json a contains all necessary keys
        try:
            manifest = json.loads(manifestdata)
            self._check_manifest(manifest, ['name', 'hrname', 'version_major', 'version_minor'])
        except Exception, e:
            raise create_repository_exception(RepositoryErrorCode.UPLOAD_MANIFEST)

        # checks whether the modules developer prefix matches the developer name
        if not manifest['name'].startswith(dev_name + '_'):
            raise create_repository_exception(RepositoryErrorCode.UPLOAD_DEV_PREFIX)

        # Checks if module supports js and indicates it. if not, assume, the module is a pure html module
        js_mandatory = 0
        if manifest.has_key('js_mandatory') and manifest['js_mandatory'] in (JSMandatory.NO,JSMandatory.SUPPORTED,JSMandatory.MANDATORY):
            js_mandatory = manifest['js_mandatory']
        else:
            manifest['js_mandatory'] = 0

        # checks whether all dependencies are available
        dependencies = []
        if 'dependencies' in manifest:
            dependencies = manifest['dependencies']
            missing = []
            for dep in dependencies:
                cur = environ['db'].query('SELECT COUNT(*) AS DEPCOUNT \
                        FROM MODULES \
                        WHERE MOD_NAME = ? AND MOD_VERSIONMAJOR = ? \
                        AND MOD_VERSIONMINOR = ?;',
                        (dep['name'], dep['version_major'], dep['version_minor']))
                result = cur.fetchonemap()
                if result['DEPCOUNT'] == 0:
                    missing.append({'name' : dep['name'],
                            'version_major' : dep['version_major'],
                            'version_minor' : dep['version_minor']})
            if missing:
                raise create_repository_exception(RepositoryErrorCode.UPLOAD_DEPENDENCIES, *missing)
        
        # calculate the new revision of the module and write it to the manifest
        cur = environ['db'].query('SELECT MAX(MOD_VERSIONREV) AS MAXREVISION \
                FROM MODULES \
                WHERE MOD_NAME = ?;', manifest['name'])
        result = cur.fetchonemap()
        maxrevision = result['MAXREVISION']
        if maxrevision is not None:
            revision = maxrevision + 1
        else:
            revision = 0
        manifest['revision'] = revision

        # creates the new module archive with the new revision
        datafile = StringIO()
        try:
            newtar = tarfile.open(fileobj = datafile, mode = 'w:gz')
            for member in tar:
                if member.isfile():
                    if member.name != 'manifest.json':
                        f = tar.extractfile(member)
                        newtar.addfile(member, f)
                    else:
                        manifestdata = json.dumps(manifest)
                        info = tarfile.TarInfo(name = 'manifest.json')
                        info.size = len(manifestdata)
                        info.mtime = time.time()
                        newtar.addfile(tarinfo = info, fileobj = StringIO(manifestdata))
            newtar.close()
            tar.close()
            newdata = datafile.getvalue()
        except Exception, e:
            raise create_repository_exception(RepositoryErrorCode.UPLOAD_CORRUPTED) # TODO check different error cases

        # generates a new module id and sign the module with the repositories
        # private key
        mod_id = environ['db'].get_seq_next('MOD_GEN')

        key = RSA.importKey(self.get_private_key(environ))
        hashobj = SHA256.new(newdata)
        signer = PKCS1_v1_5.new(key)
        repo_signature = base64.b64encode(signer.sign(hashobj))
        
        # stores the new module in the database
        environ['db'].query('INSERT INTO MODULES (MOD_ID, MOD_NAME, MOD_DISPLAYNAME, \
                MOD_VERSIONMAJOR, MOD_VERSIONMINOR, MOD_VERSIONREV, MOD_SIGNATURE, \
                MOD_JSMANDATORY, MOD_DATA) VALUES (?,?,?,?,?,?,?,?,?);',
                (mod_id, manifest['name'], manifest['hrname'], manifest['version_major'],
                manifest['version_minor'], revision, repo_signature, js_mandatory, base64.b64encode(newdata)), commit=True)

        # stores the dependencies of this module in the database
        for dep in dependencies:
            dep_id = environ['db'].get_seq_next('DEP_GEN')
            
            cur =  environ['db'].query('SELECT MOD_ID \
                    FROM MODULES M \
                    JOIN (SELECT MOD_NAME, MOD_VERSIONMAJOR, MOD_VERSIONMINOR, MAX(MOD_VERSIONREV) AS MAXREV \
                        FROM MODULES \
                        GROUP BY MOD_NAME, MOD_VERSIONMAJOR, MOD_VERSIONMINOR) REV \
                    ON M.MOD_NAME = REV.MOD_NAME AND M.MOD_VERSIONMAJOR = REV.MOD_VERSIONMAJOR AND \
                        M.MOD_VERSIONMINOR = REV.MOD_VERSIONMINOR AND M.MOD_VERSIONREV = REV.MAXREV \
                    WHERE M.MOD_NAME = ? AND M.MOD_VERSIONMAJOR = ? AND M.MOD_VERSIONMINOR = ?;',
                    (dep['name'], dep['version_major'], dep['version_minor']))
            result = cur.fetchonemap()
            dependson_id = result['MOD_ID']

            environ['db'].query('INSERT INTO DEPENDENCIES (DEP_ID, DEP_MOD_ID, DEP_MOD_DEPENDSON) \
                    VALUES (?,?,?);', (dep_id, mod_id, dependson_id), commit=True) 

Example 23

Project: fixofx
Source File: webunittest.py
View license
    def fetch(self, url, postdata=None, server=None, port=None, protocol=None,
                    ok_codes=None):
        '''Run a single test request to the indicated url. Use the POST data
        if supplied.

        Raises failureException if the returned data contains any of the
        strings indicated to be Error Content.
        Returns a HTTPReponse object wrapping the response from the server.
        '''
        # see if the url is fully-qualified (not just a path)
        t_protocol, t_server, t_url, x, t_args, x = urlparse.urlparse(url)
        if t_server:
            protocol = t_protocol
            if ':' in t_server:
                server, port = t_server.split(':')
            else:
                server = t_server
                if protocol == 'http':
                    port = '80'
                else:
                    port = '443'
            url = t_url
            if t_args:
                url = url + '?' + t_args
            # ignore the machine name if the URL is for localhost
            if t_server == 'localhost':
                server = None
        elif not server:
            # no server was specified with this fetch, or in the URL, so
            # see if there's a base URL to use.
            base = self.get_base_url()
            if base:
                t_protocol, t_server, t_url, x, x, x = urlparse.urlparse(base)
                if t_protocol:
                    protocol = t_protocol
                if t_server:
                    server = t_server
                if t_url:
                    url = urlparse.urljoin(t_url, url)

        # TODO: allow override of the server and port from the URL!
        if server is None: server = self.server
        if port is None: port = self.port
        if protocol is None: protocol = self.protocol
        if ok_codes is None: ok_codes = self.expect_codes

        if protocol == 'http':
            handler = self.scheme_handlers.get('http')
            h = handler(server, int(port))

            if int(port) == 80:
               host_header = server
            else: 
               host_header = '%s:%s'%(server, port)
        elif protocol == 'https':
            #if httpslib is None:
                #raise ValueError, "Can't fetch HTTPS: M2Crypto not installed"
            handler = self.scheme_handlers.get('https')
            h = handler(server, int(port))
            
            if int(port) == 443:
               host_header = server
            else: 
               host_header = '%s:%s'%(server, port)
        else:
            raise ValueError, protocol

        params = None
        if postdata:
            for field,value in postdata.items():
                if type(value) == type({}):
                    postdata[field] = []
                    for k,selected in value.items():
                        if selected: postdata[field].append(k)

            # Do a post with the data file
            params = mimeEncode(postdata)
            h.putrequest('POST', url)
            h.putheader('Content-type', 'multipart/form-data; boundary=%s'%
                boundary)
            h.putheader('Content-length', str(len(params)))
        else:
            # Normal GET
            h.putrequest('GET', url)

        # Other Full Request headers
        if self.authinfo:
            h.putheader('Authorization', "Basic %s"%self.authinfo)
        h.putheader('Host', host_header)

        # Send cookies
        #  - check the domain, max-age (seconds), path and secure
        #    (http://www.ietf.org/rfc/rfc2109.txt)
        cookies_used = []
        cookie_list = []
        for domain, cookies in self.cookies.items():
            # check cookie domain
            if not server.endswith(domain):
                continue
            for path, cookies in cookies.items():
                # check that the path matches
                urlpath = urlparse.urlparse(url)[2]
                if not urlpath.startswith(path) and not (path == '/' and
                        urlpath == ''):
                    continue
                for sendcookie in cookies.values():
                    # and that the cookie is or isn't secure
                    if sendcookie['secure'] and protocol != 'https':
                        continue
                    # TODO: check max-age
                    cookie_list.append("%s=%s;"%(sendcookie.key,
                        sendcookie.coded_value))
                    cookies_used.append(sendcookie.key)

        if cookie_list:
            h.putheader('Cookie', ' '.join(cookie_list))

        # check that we sent the cookies we expected to
        if self.expect_cookies is not None:
            assert cookies_used == self.expect_cookies, \
                "Didn't use all cookies (%s expected, %s used)"%(
                self.expect_cookies, cookies_used)

        # finish the headers
        h.endheaders()

        if params is not None:
            h.send(params)

        # handle the reply
        errcode, errmsg, headers = h.getreply()

        # get the body and save it
        f = h.getfile()
        g = cStringIO.StringIO()
        d = f.read()
        while d:
            g.write(d)
            d = f.read()
        response = HTTPResponse(self.cookies, protocol, server, port, url,
            errcode, errmsg, headers, g.getvalue(), self.error_content)
        f.close()

        if errcode not in ok_codes:
            if VERBOSE:
                sys.stdout.write('e')
                sys.stdout.flush()
            raise HTTPError(response)

        # decode the cookies
        if self.accept_cookies:
            try:
                # decode the cookies and update the cookies store
                cookie.decodeCookies(url, server, headers, self.cookies)
            except:
                if VERBOSE:
                    sys.stdout.write('c')
                    sys.stdout.flush()
                raise

        # Check errors
        if self.error_content:
            data = response.body
            for content in self.error_content:
                if data.find(content) != -1:
                    msg = "Matched error: %s"%content
                    if hasattr(self, 'results') and self.results:
                        self.writeError(url, msg)
                    self.log('Matched error'+`(url, content)`, data)
                    if VERBOSE:
                        sys.stdout.write('c')
                        sys.stdout.flush()
                    raise self.failureException, msg

        if VERBOSE:
            sys.stdout.write('_')
            sys.stdout.flush()
        return response

Example 24

Project: fixofx
Source File: __init__.py
View license
def make_environ(inp, host, port, script_name):
    """
    Take 'inp' as if it were HTTP-speak being received on host:port,
    and parse it into a WSGI-ok environment dictionary.  Return the
    dictionary.

    Set 'SCRIPT_NAME' from the 'script_name' input, and, if present,
    remove it from the beginning of the PATH_INFO variable.
    """
    #
    # parse the input up to the first blank line (or its end).
    #

    environ = {}
    
    method_line = inp.readline()
    
    content_type = None
    content_length = None
    cookies = []
    
    for line in inp:
        if not line.strip():
            break

        k, v = line.strip().split(':', 1)
        v = v.lstrip()

        #
        # take care of special headers, and for the rest, put them
        # into the environ with HTTP_ in front.
        #

        if k.lower() == 'content-type':
            content_type = v
        elif k.lower() == 'content-length':
            content_length = v
        elif k.lower() == 'cookie' or k.lower() == 'cookie2':
            cookies.append(v)
        else:
            h = k.upper()
            h = h.replace('-', '_')
            environ['HTTP_' + h] = v
            
        if debuglevel >= 2:
            print 'HEADER:', k, v

    #
    # decode the method line
    #

    if debuglevel >= 2:
        print 'METHOD LINE:', method_line
        
    method, url, protocol = method_line.split(' ')

    # clean the script_name off of the url, if it's there.
    if not url.startswith(script_name):
        script_name = ''                # @CTB what to do -- bad URL.  scrap?
    else:
        url = url[len(script_name):]

    url = url.split('?', 1)
    path_info = url[0]
    query_string = ""
    if len(url) == 2:
        query_string = url[1]

    if debuglevel:
        print "method: %s; script_name: %s; path_info: %s; query_string: %s" % (method, script_name, path_info, query_string)

    r = inp.read()
    inp = StringIO(r)

    #
    # fill out our dictionary.
    #
    
    environ.update({ "wsgi.version" : (1,0),
                     "wsgi.url_scheme": "http",
                     "wsgi.input" : inp,           # to read for POSTs
                     "wsgi.errors" : StringIO(),
                     "wsgi.multithread" : 0,
                     "wsgi.multiprocess" : 0,
                     "wsgi.run_once" : 0,
    
                     "PATH_INFO" : path_info,
                     "QUERY_STRING" : query_string,
                     "REMOTE_ADDR" : '127.0.0.1',
                     "REQUEST_METHOD" : method,
                     "SCRIPT_NAME" : script_name,
                     "SERVER_NAME" : host,
                     "SERVER_PORT" : str(port),
                     "SERVER_PROTOCOL" : protocol,
                     })

    #
    # query_string, content_type & length are optional.
    #

    if query_string:
        environ['QUERY_STRING'] = query_string
        
    if content_type:
        environ['CONTENT_TYPE'] = content_type
        if debuglevel >= 2:
            print 'CONTENT-TYPE:', content_type
    if content_length:
        environ['CONTENT_LENGTH'] = content_length
        if debuglevel >= 2:
            print 'CONTENT-LENGTH:', content_length

    #
    # handle cookies.
    #
    if cookies:
        environ['HTTP_COOKIE'] = "; ".join(cookies)

    if debuglevel:
        print 'WSGI environ dictionary:', environ

    return environ

Example 25

View license
    def test_process_scpv2(self):

        # SearchCommand.process should

        # 1. Recognize all standard options:

        metadata = (
            '{{'
                '"action": "getinfo", "preview": false, "searchinfo": {{'
                    '"latest_time": "0",'
                    '"splunk_version": "20150522",'
                    '"username": "admin",'
                    '"app": "searchcommands_app",'
                    '"args": ['
                        '"logging_configuration={logging_configuration}",'
                        '"logging_level={logging_level}",'
                        '"record={record}",'
                        '"show_configuration={show_configuration}",'
                        '"required_option_1=value_1",'
                        '"required_option_2=value_2"'
                    '],'
                    '"search": "%7C%20inputlookup%20tweets%20%7C%20countmatches%20fieldname%3Dword_count%20pattern%3D%22%5Cw%2B%22%20text%20record%3Dt%20%7C%20export%20add_timestamp%3Df%20add_offset%3Dt%20format%3Dcsv%20segmentation%3Draw",'
                    '"earliest_time": "0",'
                    '"session_key": "0JbG1fJEvXrL6iYZw9y7tmvd6nHjTKj7ggaE7a4Jv5R0UIbeYJ65kThn^3hiNeoqzMT_LOtLpVR3Y8TIJyr5bkHUElMijYZ8l14wU0L4n^Oa5QxepsZNUIIQCBm^",'
                    '"owner": "admin",'
                    '"sid": "1433261372.158",'
                    '"splunkd_uri": "https://127.0.0.1:8089",'
                    '"dispatch_dir": {dispatch_dir},'
                    '"raw_args": ['
                        '"logging_configuration={logging_configuration}",'
                        '"logging_level={logging_level}",'
                        '"record={record}",'
                        '"show_configuration={show_configuration}",'
                        '"required_option_1=value_1",'
                        '"required_option_2=value_2"'
                    ']'
                '}}'
            '}}')

        basedir = self._package_directory

        default_logging_configuration = os.path.join(basedir, 'apps', 'app_with_logging_configuration', 'default', 'logging.conf')
        dispatch_dir = os.path.join(basedir, 'recordings', 'scpv2', 'Splunk-6.3', 'countmatches.dispatch_dir')
        logging_configuration = os.path.join(basedir, 'apps', 'app_with_logging_configuration', 'logging.conf')
        logging_level = 'ERROR'
        record = False
        show_configuration = True

        getinfo_metadata = metadata.format(
            dispatch_dir=encode_string(dispatch_dir),
            logging_configuration=encode_string(logging_configuration)[1:-1],
            logging_level=logging_level,
            record=('true' if record is True else 'false'),
            show_configuration=('true' if show_configuration is True else 'false'))

        execute_metadata = '{"action":"execute","finished":true}'
        execute_body = 'test\r\ndata\r\n'

        ifile = StringIO(
            'chunked 1.0,{},0\n{}'.format(len(getinfo_metadata), getinfo_metadata) +
            'chunked 1.0,{},{}\n{}{}'.format(len(execute_metadata), len(execute_body), execute_metadata, execute_body))

        command = TestCommand()
        result = StringIO()
        argv = ['some-external-search-command.py']

        self.assertEqual(command.logging_level, 'WARNING')
        self.assertIs(command.record, None)
        self.assertIs(command.show_configuration, None)

        try:
            # noinspection PyTypeChecker
            command.process(argv, ifile, ofile=result)
        except SystemExit as error:
            self.fail('Unexpected exception: {}: {}'.format(type(error).__name__, error))

        self.assertEqual(command.logging_configuration, logging_configuration)
        self.assertEqual(command.logging_level, 'ERROR')
        self.assertEqual(command.record, record)
        self.assertEqual(command.show_configuration, show_configuration)
        self.assertEqual(command.required_option_1, 'value_1')
        self.assertEqual(command.required_option_2, 'value_2')

        self.assertEqual(
            'chunked 1.0,68,0\n'
            '{"inspector":{"messages":[["INFO","test command configuration: "]]}}\n'
            'chunked 1.0,17,23\n'
            '{"finished":true}test,__mv_test\r\n'
            'data,\r\n',
            result.getvalue())

        self.assertEqual(command.protocol_version, 2)

        # 2. Provide access to these properties:
        #   fieldnames
        #   input_header
        #   metadata
        #   search_results_info
        #   service

        self.assertEqual([], command.fieldnames)

        command_metadata = command.metadata
        input_header = command.input_header

        self.assertIsNone(input_header['allowStream'])
        self.assertEqual(input_header['infoPath'], os.path.join(command_metadata.searchinfo.dispatch_dir, 'info.csv'))
        self.assertIsNone(input_header['keywords'])
        self.assertEqual(input_header['preview'], command_metadata.preview)
        self.assertIs(input_header['realtime'], False)
        self.assertEqual(input_header['search'], command_metadata.searchinfo.search)
        self.assertEqual(input_header['sid'], command_metadata.searchinfo.sid)
        self.assertEqual(input_header['splunkVersion'], command_metadata.searchinfo.splunk_version)
        self.assertIsNone(input_header['truncated'])

        self.assertEqual(command_metadata.preview, input_header['preview'])
        self.assertEqual(command_metadata.searchinfo.app, 'searchcommands_app')
        self.assertEqual(command_metadata.searchinfo.args, ['logging_configuration=' + logging_configuration, 'logging_level=ERROR', 'record=false', 'show_configuration=true', 'required_option_1=value_1', 'required_option_2=value_2'])
        self.assertEqual(command_metadata.searchinfo.dispatch_dir, os.path.dirname(input_header['infoPath']))
        self.assertEqual(command_metadata.searchinfo.earliest_time, 0.0)
        self.assertEqual(command_metadata.searchinfo.latest_time, 0.0)
        self.assertEqual(command_metadata.searchinfo.owner, 'admin')
        self.assertEqual(command_metadata.searchinfo.raw_args, command_metadata.searchinfo.args)
        self.assertEqual(command_metadata.searchinfo.search, '| inputlookup tweets | countmatches fieldname=word_count pattern="\\w+" text record=t | export add_timestamp=f add_offset=t format=csv segmentation=raw')
        self.assertEqual(command_metadata.searchinfo.session_key, '0JbG1fJEvXrL6iYZw9y7tmvd6nHjTKj7ggaE7a4Jv5R0UIbeYJ65kThn^3hiNeoqzMT_LOtLpVR3Y8TIJyr5bkHUElMijYZ8l14wU0L4n^Oa5QxepsZNUIIQCBm^')
        self.assertEqual(command_metadata.searchinfo.sid, '1433261372.158')
        self.assertEqual(command_metadata.searchinfo.splunk_version, '20150522')
        self.assertEqual(command_metadata.searchinfo.splunkd_uri, 'https://127.0.0.1:8089')
        self.assertEqual(command_metadata.searchinfo.username, 'admin')

        command.search_results_info.search_metrics = command.search_results_info.search_metrics.__dict__
        command.search_results_info.optional_fields_json = command.search_results_info.optional_fields_json.__dict__

        self.maxDiff = None

        self.assertDictEqual(command.search_results_info.__dict__, {
            u'is_summary_index': 0,
            u'bs_thread_count': 1,
            u'rt_backfill': 0,
            u'rtspan': '',
            u'search_StartTime': 1433261392.934936,
            u'read_raw': 1,
            u'root_sid': '',
            u'field_rendering': '',
            u'query_finished': 1,
            u'optional_fields_json': {},
            u'group_list': '',
            u'remoteServers': '',
            u'rt_latest': '',
            u'remote_log_download_mode': 'disabled',
            u'reduce_search': '',
            u'request_finalization': 0,
            u'auth_token': 'UQZSgWwE2f9oIKrj1QG^kVhW^T_cR4H5Z65bPtMhwlHytS5jFrFYyH^dGzjTusDjVTgoBNeR7bvIzctHF7DrLJ1ANevgDOWEWRvABNj6d_k0koqxw9Io',
            u'indexed_realtime': 0,
            u'ppc_bs': '$SPLUNK_HOME/etc',
            u'drop_count': 0,
            u'datamodel_map': '',
            u'search_can_be_event_type': 0,
            u'search_StartUp_Spent': 0,
            u'realtime': 0,
            u'splunkd_uri': 'https://127.0.0.1:8089',
            u'columnOrder': '',
            u'kv_store_settings': 'hosts;127.0.0.1:8191\\;;local;127.0.0.1:8191;read_preference;958513E3-8716-4ABF-9559-DA0C9678437F;replica_set_name;958513E3-8716-4ABF-9559-DA0C9678437F;status;ready;',
            u'label': '',
            u'summary_maxtimespan': '',
            u'indexed_realtime_offset': 0,
            u'sid': 1433261392.159,
            u'msg': [],
            u'internal_only': 0,
            u'summary_id': '',
            u'orig_search_head': '',
            u'ppc_app': 'chunked_searchcommands',
            u'countMap': {
                u'invocations.dispatch.writeStatus': u'1',
                u'duration.dispatch.writeStatus': u'2',
                u'duration.startup.handoff': u'79',
                u'duration.startup.configuration': u'34',
                u'invocations.startup.handoff': u'1',
                u'invocations.startup.configuration': u'1'},
            u'is_shc_mode': 0,
            u'shp_id': '958513E3-8716-4ABF-9559-DA0C9678437F',
            u'timestamp': 1433261392.936374, u'is_remote_sorted': 0,
            u'remote_search': '',
            u'splunkd_protocol': 'https',
            u'site': '',
            u'maxevents': 0,
            u'keySet': '',
            u'summary_stopped': 0,
            u'search_metrics': {
                u'ConsideredEvents': 0,
                u'ConsideredBuckets': 0,
                u'TotalSlicesInBuckets': 0,
                u'EliminatedBuckets': 0,
                u'DecompressedSlices': 0},
            u'summary_mode': 'all', u'now': 1433261392.0,
            u'splunkd_port': 8089, u'is_saved_search': 0,
            u'rtoptions': '',
            u'search': '| inputlookup random_data max=50000 | sum total=total value1 record=t | export add_timestamp=f add_offset=t format=csv segmentation=raw',
            u'bundle_version': 0,
            u'generation_id': 0,
            u'bs_thread_id': 0,
            u'is_batch_mode': 0,
            u'scan_count': 0,
            u'rt_earliest': '',
            u'default_group': '*',
            u'tstats_reduce': '',
            u'kv_store_additional_settings': 'hosts_guids;958513E3-8716-4ABF-9559-DA0C9678437F\\;;',
            u'enable_event_stream': 0,
            u'is_remote': 0,
            u'is_scheduled': 0,
            u'sample_ratio': 1,
            u'ppc_user': 'admin',
            u'sample_seed': 0})

        self.assertIsInstance(command.service, Service)

        self.assertEqual(command.service.authority, command_metadata.searchinfo.splunkd_uri)
        self.assertEqual(command.service.scheme, command.search_results_info.splunkd_protocol)
        self.assertEqual(command.service.port, command.search_results_info.splunkd_port)
        self.assertEqual(command.service.token, command_metadata.searchinfo.session_key)
        self.assertEqual(command.service.namespace.app, command.metadata.searchinfo.app)
        self.assertIsNone(command.service.namespace.owner)
        self.assertIsNone(command.service.namespace.sharing)

        self.assertEqual(command.protocol_version, 2)

        # 3. Produce an error message, log a debug message, and exit when invalid standard option values are encountered

        # Note on loggers
        # Loggers are global and can't be removed once they're created. We create loggers that are keyed by class name
        # Each instance of a class thus created gets access to the same logger. We created one in the prior test and
        # set it's level to ERROR. That level is retained in this test.

        logging_configuration = 'non-existent-logging.conf'
        logging_level = 'NON-EXISTENT-LOGGING-LEVEL'
        record = 'Non-boolean value'
        show_configuration = 'Non-boolean value'

        getinfo_metadata = metadata.format(
            dispatch_dir=encode_string(dispatch_dir),
            logging_configuration=encode_string(logging_configuration)[1:-1],
            logging_level=logging_level,
            record=record,
            show_configuration=show_configuration)

        execute_metadata = '{"action":"execute","finished":true}'
        execute_body = 'test\r\ndata\r\n'

        ifile = StringIO(
            'chunked 1.0,{},0\n{}'.format(len(getinfo_metadata), getinfo_metadata) +
            'chunked 1.0,{},{}\n{}{}'.format(len(execute_metadata), len(execute_body), execute_metadata, execute_body))

        command = TestCommand()
        result = StringIO()
        argv = ['test.py']

        # noinspection PyTypeChecker
        self.assertRaises(SystemExit, command.process, argv, ifile, ofile=result)
        self.assertEqual(command.logging_level, 'ERROR')
        self.assertEqual(command.record, False)
        self.assertEqual(command.show_configuration, False)
        self.assertEqual(command.required_option_1, 'value_1')
        self.assertEqual(command.required_option_2, 'value_2')

        self.assertEqual(
            'chunked 1.0,287,0\n'
            '{"inspector":{"messages":[["ERROR","Illegal value: logging_configuration=non-existent-logging.conf"],'
            '["ERROR","Illegal value: logging_level=NON-EXISTENT-LOGGING-LEVEL"],'
            '["ERROR","Illegal value: record=Non-boolean value"],'
            '["ERROR","Illegal value: show_configuration=Non-boolean value"]]}}\n'
            'chunked 1.0,17,0\n'
            '{"finished":true}',
            result.getvalue())

        self.assertEqual(command.protocol_version, 2)

        # 4. Produce an error message, log an error message that includes a traceback, and exit when an exception is
        #    raised during command execution.

        logging_configuration = os.path.join(basedir, 'apps', 'app_with_logging_configuration', 'logging.conf')
        logging_level = 'WARNING'
        record = False
        show_configuration = False

        getinfo_metadata = metadata.format(
            dispatch_dir=encode_string(dispatch_dir),
            logging_configuration=encode_string(logging_configuration)[1:-1],
            logging_level=logging_level,
            record=('true' if record is True else 'false'),
            show_configuration=('true' if show_configuration is True else 'false'))

        execute_metadata = '{"action":"execute","finished":true}'
        execute_body = 'action\r\nraise_exception\r\n'

        ifile = StringIO(
            'chunked 1.0,{},0\n{}'.format(len(getinfo_metadata), getinfo_metadata) +
            'chunked 1.0,{},{}\n{}{}'.format(len(execute_metadata), len(execute_body), execute_metadata, execute_body))

        command = TestCommand()
        result = StringIO()
        argv = ['test.py']

        try:
            command.process(argv, ifile, ofile=result)
        except SystemExit as error:
            self.assertNotEqual(0, error.code)
        except BaseException as error:
            self.fail('{0}: {1}: {2}\n'.format(type(error).__name__, error, result.getvalue()))
        else:
            self.fail('Expected SystemExit, not a return from TestCommand.process: {}\n'.format(result.getvalue()))

        self.assertEqual(command.logging_configuration, logging_configuration)
        self.assertEqual(command.logging_level, logging_level)
        self.assertEqual(command.record, record)
        self.assertEqual(command.show_configuration, show_configuration)
        self.assertEqual(command.required_option_1, 'value_1')
        self.assertEqual(command.required_option_2, 'value_2')

        finished = r'"finished":true'

        inspector = \
            r'"inspector":\{"messages":\[\["ERROR","StandardError at \\".+\\", line \d+ : test ' \
            r'logging_configuration=\\".+\\" logging_level=\\"WARNING\\" record=\\"f\\" ' \
            r'required_option_1=\\"value_1\\" required_option_2=\\"value_2\\" show_configuration=\\"f\\""\]\]\}'

        self.assertRegexpMatches(
            result.getvalue(),
            r'^chunked 1.0,2,0\n'
            r'\{\}\n'
            r'chunked 1.0,\d+,0\n'
            r'\{(' + inspector + r',' + finished + r'|' + finished + r',' + inspector + r')\}')

        self.assertEqual(command.protocol_version, 2)
        return

Example 26

Project: btb
Source File: fetch_mailboxforwarding_mail.py
View license
    def handle(self, *args, **kwargs):
        base_url = "https://www.mailboxforwarding.com/"
        
        if (not hasattr(settings, "MAILBOX_FORWARDING") or 
                not "username" in settings.MAILBOX_FORWARDING or
                not "password" in settings.MAILBOX_FORWARDING):
            print "Requires MAILBOX_FORWARDING settings, e.g.:"
            print 'MAILBOX_FORWARDING = {'
            print '  "username": "[email protected]",'
            print '  "password": "secret",'
            print '}'
            print "exit 1"
            sys.exit(1)

        sess = requests.Session()
        res = sess.post(base_url + "manage/login.php", {
            "action": "login",
            "email": settings.MAILBOX_FORWARDING["username"],
            "password": settings.MAILBOX_FORWARDING["password"],
            "loginsubmit.x": "0",
            "loginsubmit.y": "0"
        })
        # This is a slightly dirty hack -- we're matching a javascript data
        # structure with a regex, converting the quotes to doubles so it resembles
        # JSON, and then loading it as JSON.  This may prove brittle.
        match = re.search(r"Ext\.grid\.dummyData = (\[.*\]\]);", res.text, re.DOTALL)
        if not match:
            raise Exception("Can't find data. Are login creds correct?")
        text = match.group(1)
        text = text.replace('"', '\\"')
        text = text.replace("'", '"')
        data = json.loads(text)

        scans = {}
        packages = {}
        for checkbox, date, envelope, type_status, dl in data:
            details = {}
            match = re.search("Type: <b>([^<]+)</b>.*Status: <b>([^<]+)</b>", type_status)
            if not match:
                raise Exception("Can't match type/status")
            details['kind'] = match.group(1)
            details['status'] = match.group(2)

            if details['kind'] == "Letter" and details['status'] != "Scanned":
                continue

            match = re.search("pdfview.php\?id=(\d+)", dl)
            if match:
                id_ = match.group(1)
            else:
                # TODO: Handle packages correctly
                continue
                #raise Exception("Can't find ID")

            match = re.search("src=\"([^\"]+)\"", envelope)
            if not match:
                raise Exception("Can't match envelope image")
            details['envelope'] = match.group(1)


            if details['status'] == "Scanned":
                scans[id_] = details
            elif details['kind'] != "Letter":
                packages[id_] = details

        uploader = User.objects.get(username="uploader")
        org = Organization.objects.get(pk=1) #TODO: generalize this? 

        new_count = 0
        for id_, details in scans.iteritems():
            source_id = "mailboxforwarding.com-{}".format(id_)
            if Scan.objects.filter(source_id=source_id).exists():
                continue
            new_count += 1

            print "Downloading pdf", source_id
            res = sess.get("{}manage/pdfview.php?id={}".format(base_url, id_))
            in_pdf_fh = StringIO()
            in_pdf_fh.write(res.content)
            in_pdf_fh.seek(0)
            reader = PdfFileReader(in_pdf_fh)

            print "Downloading envelope", details['envelope']
            res = sess.get(details['envelope'])
            in_envelope_fh = StringIO()
            in_envelope_fh.write(res.content)
            in_envelope_fh.seek(0)
            img = Image.open(in_envelope_fh)
            out_envelope_fh = StringIO()
            img.save(out_envelope_fh, "pdf")
            envelope_reader = PdfFileReader(out_envelope_fh)

            writer = PdfFileWriter()
            writer.addPage(envelope_reader.getPage(0))
            for page in range(reader.getNumPages()):
                writer.addPage(reader.getPage(page))

            with tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) as fh:
                writer.write(fh)
                dest_pdf_name = fh.name

            in_envelope_fh.close()
            out_envelope_fh.close()
            in_pdf_fh.close()

            path = tasks.move_scan_file(filename=dest_pdf_name)
            scan = Scan.objects.create(
                uploader=uploader,
                pdf=os.path.relpath(path, settings.MEDIA_ROOT),
                under_construction=True,
                org=org,
                source_id=source_id
            )
            tasks.split_scan(scan=scan)

        if packages:
            print "Manual action needed on the following at " \
                  "https://www.mailboxforwarding.com/:"
            for id_, details in packages.iteritems():
                new_count += 1
                print details
        print "Examined {} letters, {} new.".format(len(data), new_count)

Example 27

Project: WAPT
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\r\n--' + boundary
        end_boundary = sep_boundary + '--\r\n'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\r\nContent-Disposition: form-data; name="%s"' % key)
                body.write(fn)
                body.write("\r\n\r\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, result.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            raise
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            msg = 'Upload failed (%s): %s' % (status, reason)
            self.announce(msg, log.ERROR)
            raise DistutilsError(msg)

Example 28

Project: smisk
Source File: httpd.py
View license
	def handle_fcgi_request(self, ch):
		rid = 1
		content_length = 0
		query_string = ''
		if '?' in self.path:
			query_string = self.path[self.path.index('?')+1:]
		params = {
			'GATEWAY_INTERFACE': 'CGI/1.1',
			'PATH_INFO': '',
			'QUERY_STRING': query_string,
			'REMOTE_ADDR': self.client_address[0],
			'REMOTE_HOST': self.server.fqdn,
			'REQUEST_METHOD': self.command,
			'SCRIPT_NAME': '/' + self.path.lstrip('/'),
			'SERVER_NAME': '%s:%d' % (self.server.fqdn, self.server.naddr[1]),
			'SERVER_PORT': '%d' % self.server.naddr[1],
			'SERVER_PROTOCOL': self.request_version,
			'SERVER_SOFTWARE': self.server_version,
			
			# Following are not part of CGI 1.1:
			'DOCUMENT_ROOT': self.server.document_root,
			'REMOTE_PORT': '%d' % self.client_address[1],
			'REQUEST_URI': self.path,
			'SCRIPT_FILENAME': self.server.document_root + '/' + self.path.lstrip('/'),
			'SERVER_ADDR': self.server.naddr[0],
		}
		
		# read http headers and transfer to params
		for k in self.headers:
			v = self.headers.get(k)
			params['HTTP_'+k.replace('-','_').upper()] = v
			if k == 'content-length':
				content_length = int(v)
			elif k == 'content-type':
				params['CONTENT_TYPE'] = v
		if content_length:
			params['CONTENT_LENGTH'] = str(content_length)
		
		# begin
		role = fcgi.FCGI_RESPONDER
		flags = 0
		content = '%c%c%c\000\000\000\000\000' % ((role&0xFF00)>>8, role&0xFF, flags)
		ch.writePacket(fcgi.Record(fcgi.FCGI_BEGIN_REQUEST, rid, content))
		
		# params
		content = ''
		for k,v in params.items():
			s = fcgi.writeNameValue(k,v)
			if len(content)+len(s) > fcgi.FCGI_MAX_PACKET_LEN:
				ch.writePacket(fcgi.Record(fcgi.FCGI_PARAMS, rid, content))
				content = s
			else:
				content += s
		ch.writePacket(fcgi.Record(fcgi.FCGI_PARAMS, rid, content))
		ch.writePacket(fcgi.Record(fcgi.FCGI_PARAMS, rid))
		
		# EOF on stdin
		if content_length == 0:
			ch.writePacket(fcgi.Record(fcgi.FCGI_STDIN, rid, ''))
		
		# read reply
		started = False
		wrote_stdin_eof = content_length
		indata = ''
		outbuf = ''
		transfer_encoding = None
		skipout = False
		while 1:
			if content_length:
				try:
					r = ch.readPacket(True)
				except socket.error, e:
					if e.args[0] == 35: # "Resource temporarily unavailable"
						# probably waiting for stdin
						n = content_length
						if n > fcgi.FCGI_MAX_PACKET_LEN:
							n = fcgi.FCGI_MAX_PACKET_LEN
							content_length -= n
						else:
							content_length = 0
						
						indata = self.rfile.read(n)
						
						if not indata:
							log.warn('client sent EOF on stdin even though not all bytes indicated by '\
											 'content-length have been read -- aborting request')
							ch.writePacket(fcgi.Record(fcgi.FCGI_ABORT_REQUEST, rid))
							break
						
						log.info('got %d bytes on http stdin -- forwarding on FCGI channel', len(indata))
						
						ch.writePacket(fcgi.Record(fcgi.FCGI_STDIN, rid, indata))
						
						if content_length == 0:
							# write EOF
							ch.writePacket(fcgi.Record(fcgi.FCGI_STDIN, rid))
							wrote_stdin_eof = True
						
						continue
					else:
						raise
			else:
				r = ch.readPacket()
			log.debug('received packet %r', r)
			if r.type == fcgi.FCGI_STDOUT:
				if not started:
					outbuf += r.content
					r.content = ''
					p = outbuf.find('\r\n\r\n')
					if p != -1:
						sf = StringIO(outbuf[:p])
						r.content = outbuf[p+4:]
						headers = mimetools.Message(sf, True)
						
						# status
						status = headers.get('status', None)
						if status:
							status = status.split(' ',1)
							status[0] = int(status[0])
							self.send_response(*status)
						else:
							self.send_response(200)
						
						# required headers
						skipk = ['server', 'date', 'transfer-encoding']
						self.send_header('Server', headers.getheader('server', self.version_string()))
						self.send_header('Date', headers.getheader('date', self.date_time_string()))
						
						# content length
						if not headers.getheader('content-length', False):
							if self.protocol_version == 'HTTP/1.1':
								transfer_encoding = headers.getheader('server', 'chunked').lower()
								self.send_header('Transfer-Encoding', transfer_encoding)
							else:
								self.close_connection = 1
						
						# send other headers
						for k in headers:
							if k not in skipk:
								self.send_header(k.capitalize(), headers.getheader(k))
						
						self.wfile.write('\r\n')
						started = True
				if r.content and not skipout:
					self.wfile.write(r.content)
			elif r.type == fcgi.FCGI_STDERR:
				log.error('%s: %s', ch, r.content)
			elif r.type == fcgi.FCGI_END_REQUEST:
				if transfer_encoding == 'chunked':
					self.wfile.write('')
				break
		
		# EOF on stdin
		if not wrote_stdin_eof:
			ch.writePacket(fcgi.Record(fcgi.FCGI_STDIN, rid))

Example 29

Project: maxfield
Source File: PlanPrinterMap.py
View license
    def __init__(self,a,outputDir,nagents,color='#FF004D',useGoogle=False,api_key=None):
        self.a = a
        self.n = a.order() # number of nodes
        self.m = a.size()  # number of links

        self.nagents = nagents
        self.outputDir = outputDir
        self.color = color

        # if the ith link to be made is (p,q) then orderedEdges[i] = (p,q)
        self.orderedEdges = [None]*self.m
        for e in a.edges_iter():
            self.orderedEdges[a.edge[e[0]][e[1]]['order']] = e

        # movements[i][j] is the index (in orderedEdges) of agent i's jth link
        self.movements = agentOrder.getAgentOrder(a,nagents,self.orderedEdges)

        # link2agent[i] is the agent that will make the ith link
        self.link2agent = [-1]*self.m
        for i in range(nagents):
            for e in self.movements[i]:
                self.link2agent[e] = i

        # keyneeds[i,j] = number of keys agent i needs for portal j
        self.agentkeyneeds = np.zeros([self.nagents,self.n],dtype=int)
        for i in xrange(self.nagents):
            for e in self.movements[i]:
                p,q = self.orderedEdges[e]
                self.agentkeyneeds[i][q] += 1

        self.names = np.array([a.node[i]['name'] for i in xrange(self.n)])
        # The alphabetical order
        makeLowerCase = np.vectorize(lambda s: s.lower())
        self.nameOrder = np.argsort(makeLowerCase(self.names))

        self.xy = np.array([self.a.node[i]['xy'] for i in xrange(self.n)])
        # print self.xy

        # The order from north to south (for easy-to-find labels)
        self.posOrder = np.argsort(self.xy,axis=0)[::-1,1]

        # The inverse permutation of posOrder
        self.nslabel = [-1]*self.n
        for i in xrange(self.n):
            self.nslabel[self.posOrder[i]] = i

        self.maxNameLen = max([len(a.node[i]['name']) for  i in xrange(self.n)])

        # total stats for this plan
        self.num_portals = self.n
        self.num_links = self.m
        self.num_fields = 0

        if useGoogle:
            # convert xy coordinates to web mercator
            x_merc = np.array([128./np.pi * (self.a.node[i]['geo'][1] + np.pi) for i in self.a.node.keys()])
            min_x_merc = np.min(x_merc)
            #print "min_x_merc",min_x_merc
            x_merc = x_merc - min_x_merc
            #print "Xmin, Xmax",np.min(x_merc),np.max(x_merc)
            y_merc = np.array([128./np.pi * (np.pi - np.log(np.tan(np.pi/4. + self.a.node[i]['geo'][0]/2.))) for i in self.a.node.keys()])
            min_y_merc = np.min(y_merc)
            #print "min_y_merc",min_y_merc
            y_merc = y_merc - min_y_merc
            #print "Ymin, Ymax",np.min(y_merc),np.max(y_merc)
            # determine proper zoom such that the map is smaller than 640 on both sides
            zooms = np.arange(0,20,1)
            largest_x_zoom = 0
            largest_y_zoom = 0
            for zm in zooms:
                #print "X max",np.max(x_merc * 2.**zm + 20.)
                #print "Y max",np.max(y_merc * 2.**zm + 20.)
                if np.max(x_merc * 2.**zm) < 256.:
                    largest_x_zoom = zm
                    #print "X",largest_x_zoom
                if np.max(y_merc * 2.**zm) < 256.:
                    largest_y_zoom = zm
                    #print "Y",largest_y_zoom
            zoom = np.min([largest_x_zoom,largest_y_zoom])
            min_x_merc = min_x_merc*2.**(1+zoom)
            min_y_merc = min_y_merc*2.**(1+zoom)
            self.xy[:,0] = x_merc*2.**(1+zoom)
            self.xy[:,1] = y_merc*2.**(1+zoom)
            for i in xrange(self.n):
                self.a.node[i]['xy'] = self.xy[i]
            xsize = np.max(self.xy[:,0])+20
            ysize = np.max(self.xy[:,1])+20
            self.xylims = [-10,xsize-10,ysize-10,-10]
            # coordinates needed for google maps
            loncenter = np.rad2deg((min_x_merc+xsize/2.-10.)*np.pi/(128.*2.**(zoom+1)) - np.pi)
            latcenter = np.rad2deg(2.*np.arctan(np.exp(-1.*((min_y_merc+ysize/2.-10.)*np.pi/(128.*2.**(zoom+1)) - np.pi))) - np.pi/2.)
            #latmax = np.rad2deg(max([self.a.node[i]['geo'][0] for i in self.a.node.keys()]))
            #latmin = np.rad2deg(min([self.a.node[i]['geo'][0] for i in self.a.node.keys()]))
            #lonmax = np.rad2deg(max([self.a.node[i]['geo'][1] for i in self.a.node.keys()]))
            #lonmin = np.rad2deg(min([self.a.node[i]['geo'][1] for i in self.a.node.keys()]))
            #loncenter = (lonmax-lonmin)/2. + lonmin
            #latcenter = (latmax-latmin)/2. + latmin
            #print "Center Coordinates (lat,lon): ",latcenter,loncenter

            # turn things in to integers for maps API
            map_xwidth = int(xsize)
            map_ywidth = int(ysize)
            zoom = int(zoom)+1

            # google maps API
            # get API key
            if api_key is not None:
                url = "//maps.googleapis.com/maps/api/staticmap?center={0},{1}&size={2}x{3}&zoom={4}&sensor=false&key={5}".format(latcenter,loncenter,map_xwidth,map_ywidth,zoom,api_key)
            else:
                url = "//maps.googleapis.com/maps/api/staticmap?center={0},{1}&size={2}x{3}&zoom={4}&sensor=false".format(latcenter,loncenter,map_xwidth,map_ywidth,zoom)
            #print url

            # determine if we can use google maps
            self.google_image = None
            try:
                buffer = StringIO(urllib2.urlopen(url).read())
                self.google_image = Image.imread(buffer)
                plt.clf()
            except urllib2.URLError as err:
                print("Could not connect to google maps server!")

Example 30

View license
def _update_rc_conf_legacy(infile, interfaces):
    """
    Return data for (sub-)interfaces and routes
    """

    # Updating this file happens in two phases since it's non-trivial to
    # update. The INTERFACES and ROUTES variables the key lines, but they
    # will in turn reference other variables, which may be before or after.
    # As a result, we need to load the entire file, find the main variables
    # and then remove the reference variables. When that is done, we add
    # the lines for the new config.

    # First generate new config
    ifaces = []
    routes = []

    gateway4, gateway6 = commands.network.get_gateways(interfaces)

    ifnames = interfaces.keys()
    ifnames.sort()

    for ifname_prefix in ifnames:
        interface = interfaces[ifname_prefix]

        ip4s = interface['ip4s']
        ip6s = interface['ip6s']

        ifname_suffix_num = 0

        for ip4, ip6 in map(None, ip4s, ip6s):
            if ifname_suffix_num:
                ifname = "%s:%d" % (ifname_prefix, ifname_suffix_num)
            else:
                ifname = ifname_prefix

            line = [ifname]
            if ip4:
                line.append('%(address)s netmask %(netmask)s' % ip4)

            if ip6:
                line.append('add %(address)s/%(prefixlen)s' % ip6)

            ifname_suffix_num += 1

            ifaces.append((ifname.replace(':', '_'), ' '.join(line)))

        for i, route in enumerate(interface['routes']):
            if route['network'] == '0.0.0.0' and \
                    route['netmask'] == '0.0.0.0' and \
                    route['gateway'] == gateway4:
                continue

            line = "-net %(network)s netmask %(netmask)s gw %(gateway)s" % \
                    route

            routes.append(('%s_route%d' % (ifname_prefix, i), line))

    if gateway4:
        routes.append(('gateway', 'default gw %s' % gateway4))
    if gateway6:
        routes.append(('gateway6', 'default gw %s' % gateway6))

    # Then load old file
    lines, variables = _parse_config(infile)

    # Update INTERFACES
    lineno = variables.get('INTERFACES')
    if lineno is not None:
        # Remove old lines
        for name in _parse_variable(lines[lineno], strip_bang=True):
            if name in variables:
                lines[variables[name]] = None
    else:
        lines.append('')
        lineno = len(lines) - 1

    config = []
    names = []
    for name, line in ifaces:
        config.append('%s="%s"' % (name, line))
        names.append(name)

    config.append('INTERFACES=(%s)' % ' '.join(names))
    lines[lineno] = '\n'.join(config)

    # Update ROUTES
    lineno = variables.get('ROUTES')
    if lineno is not None:
        # Remove old lines
        for name in _parse_variable(lines[lineno], strip_bang=True):
            if name in variables:
                lines[variables[name]] = None
    else:
        lines.append('')
        lineno = len(lines) - 1

    config = []
    names = []
    for name, line in routes:
        config.append('%s="%s"' % (name, line))
        names.append(name)

    config.append('ROUTES=(%s)' % ' '.join(names))
    lines[lineno] = '\n'.join(config)

    # (Possibly) comment out NETWORKS
    lineno = variables.get('NETWORKS')
    if lineno is not None:
        for name in _parse_variable(lines[lineno], strip_bang=True):
            nlineno = variables.get(name)
            if nlineno is not None:
                lines[nlineno] = '#' + lines[lineno]

        lines[lineno] = '#' + lines[lineno]

    # (Possibly) update DAEMONS
    lineno = variables.get('DAEMONS')
    if lineno is not None:
        daemons = _parse_variable(lines[lineno])
        try:
            network = daemons.index('!network')
            daemons[network] = 'network'
            if '@net-profiles' in daemons:
                daemons.remove('@net-profiles')
            lines[lineno] = 'DAEMONS=(%s)' % ' '.join(daemons)
        except ValueError:
            pass

    # Filter out any removed lines
    lines = filter(lambda l: l is not None, lines)

    # Serialize into new file
    outfile = StringIO()
    for line in lines:
        print >> outfile, line

    outfile.seek(0)
    return outfile.read()

Example 31

Project: bh2014
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 32

Project: entropy
Source File: client.py
View license
    def _generic_post_handler(self, function_name, params, file_params,
        timeout):
        """
        Given a function name and the request data (dict format), do the actual
        HTTP request and return the response object to caller.
        WARNING: params and file_params dict keys must be ASCII string only.

        @param function_name: name of the function that called this method
        @type function_name: string
        @param params: POST parameters
        @type params: dict
        @param file_params: mapping composed by file names as key and tuple
            composed by (file_name, file object) as values
        @type file_params: dict
        @param timeout: socket timeout
        @type timeout: float
        @return: tuple composed by the server response string or None
            (in case of empty response) and the HTTPResponse object (useful
                for checking response status)
        @rtype: tuple
        """
        if timeout is None:
            timeout = self._default_timeout_secs
        multipart_boundary = "---entropy.services,boundary---"
        request_path = self._request_path.rstrip("/") + "/" + function_name
        const_debug_write(__name__,
            "WebService _generic_post_handler, calling: %s at %s -- %s,"
            " tx_callback: %s, timeout: %s" % (self._request_host, request_path,
                params, self._transfer_callback, timeout,))
        connection = None
        try:
            if self._request_protocol == "http":
                connection = httplib.HTTPConnection(self._request_host,
                    timeout = timeout)
            elif self._request_protocol == "https":
                ssl_context = None
                if hasattr(ssl, 'create_default_context'):
                    ssl_context = ssl.create_default_context(
                        purpose = ssl.Purpose.CLIENT_AUTH)
                connection = httplib.HTTPSConnection(
                    self._request_host, timeout = timeout, context = ssl_context)
            else:
                raise WebService.RequestError("invalid request protocol",
                    method = function_name)

            headers = {
                "Accept": "text/plain",
                "User-Agent": self._generate_user_agent(function_name),
            }

            if file_params is None:
                file_params = {}
            # autodetect file parameters in params
            for k in list(params.keys()):
                if isinstance(params[k], (tuple, list)) \
                    and (len(params[k]) == 2):
                    f_name, f_obj = params[k]
                    if isinstance(f_obj, file):
                        file_params[k] = params[k]
                        del params[k]
                elif const_isunicode(params[k]):
                    # convert to raw string
                    params[k] = const_convert_to_rawstring(params[k],
                        from_enctype = "utf-8")
                elif not const_isstring(params[k]):
                    # invalid ?
                    if params[k] is None:
                        # will be converted to ""
                        continue
                    int_types = const_get_int()
                    supported_types = (float, list, tuple) + int_types
                    if not isinstance(params[k], supported_types):
                        raise WebService.UnsupportedParameters(
                            "%s is unsupported type %s" % (k, type(params[k])))
                    list_types = (list, tuple)
                    if isinstance(params[k], list_types):
                        # not supporting nested lists
                        non_str = [x for x in params[k] if not \
                            const_isstring(x)]
                        if non_str:
                            raise WebService.UnsupportedParameters(
                                "%s is unsupported type %s" % (k,
                                    type(params[k])))

            body = None
            if not file_params:
                headers["Content-Type"] = "application/x-www-form-urlencoded"
                encoded_params = urllib_parse.urlencode(params)
                data_size = len(encoded_params)
                if self._transfer_callback is not None:
                    self._transfer_callback(0, data_size, False)

                if data_size < 65536:
                    try:
                        connection.request("POST", request_path, encoded_params,
                            headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                else:
                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                    sio = StringIO(encoded_params)
                    data_size = len(encoded_params)
                    while True:
                        chunk = sio.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                method = function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(sio.tell(),
                                data_size, False)
                # for both ways, send a signal through the callback
                if self._transfer_callback is not None:
                    self._transfer_callback(data_size, data_size, False)

            else:
                headers["Content-Type"] = "multipart/form-data; boundary=" + \
                    multipart_boundary
                body_file, body_fpath = self._encode_multipart_form(params,
                    file_params, multipart_boundary)
                try:
                    data_size = body_file.tell()
                    headers["Content-Length"] = str(data_size)
                    body_file.seek(0)
                    if self._transfer_callback is not None:
                        self._transfer_callback(0, data_size, False)

                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                    while True:
                        chunk = body_file.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                method = function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(body_file.tell(),
                                data_size, False)
                    if self._transfer_callback is not None:
                        self._transfer_callback(data_size, data_size, False)
                finally:
                    body_file.close()
                    os.remove(body_fpath)

            try:
                response = connection.getresponse()
            except socket.error as err:
                raise WebService.RequestError(err,
                    method = function_name)
            const_debug_write(__name__, "WebService.%s(%s), "
                "response header: %s" % (
                    function_name, params, response.getheaders(),))
            total_length = response.getheader("Content-Length", "-1")
            try:
                total_length = int(total_length)
            except ValueError:
                total_length = -1
            outcome = const_convert_to_rawstring("")
            current_len = 0
            if self._transfer_callback is not None:
                self._transfer_callback(current_len, total_length, True)
            while True:
                try:
                    chunk = response.read(65536)
                except socket.error as err:
                    raise WebService.RequestError(err,
                        method = function_name)
                if not chunk:
                    break
                outcome += chunk
                current_len += len(chunk)
                if self._transfer_callback is not None:
                    self._transfer_callback(current_len, total_length, True)

            if self._transfer_callback is not None:
                self._transfer_callback(total_length, total_length, True)

            if const_is_python3():
                outcome = const_convert_to_unicode(outcome)
            if not outcome:
                return None, response
            return outcome, response

        except httplib.HTTPException as err:
            raise WebService.RequestError(err,
                method = function_name)
        finally:
            if connection is not None:
                connection.close()

Example 33

Project: oleviewdotnet
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 34

Project: PokemonGo-Bot-Desktop
Source File: config.py
View license
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
    """
    Start up a socket server on the specified port, and listen for new
    configurations.

    These will be sent as a file suitable for processing by fileConfig().
    Returns a Thread object on which you can call start() to start the server,
    and which you can join() when appropriate. To stop the server, call
    stopListening().
    """
    if not thread:
        raise NotImplementedError("listen() needs threading to work")

    class ConfigStreamHandler(StreamRequestHandler):
        """
        Handler for a logging configuration request.

        It expects a completely new logging configuration and uses fileConfig
        to install it.
        """
        def handle(self):
            """
            Handle a request.

            Each request is expected to be a 4-byte length, packed using
            struct.pack(">L", n), followed by the config file.
            Uses fileConfig() to do the grunt work.
            """
            import tempfile
            try:
                conn = self.connection
                chunk = conn.recv(4)
                if len(chunk) == 4:
                    slen = struct.unpack(">L", chunk)[0]
                    chunk = self.connection.recv(slen)
                    while len(chunk) < slen:
                        chunk = chunk + conn.recv(slen - len(chunk))
                    try:
                        import json
                        d =json.loads(chunk)
                        assert isinstance(d, dict)
                        dictConfig(d)
                    except:
                        #Apply new configuration.

                        file = cStringIO.StringIO(chunk)
                        try:
                            fileConfig(file)
                        except (KeyboardInterrupt, SystemExit):
                            raise
                        except:
                            traceback.print_exc()
                    if self.server.ready:
                        self.server.ready.set()
            except socket.error as e:
                if e.errno != RESET_ERROR:
                    raise

    class ConfigSocketReceiver(ThreadingTCPServer):
        """
        A simple TCP socket-based logging config receiver.
        """

        allow_reuse_address = 1

        def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
                     handler=None, ready=None):
            ThreadingTCPServer.__init__(self, (host, port), handler)
            logging._acquireLock()
            self.abort = 0
            logging._releaseLock()
            self.timeout = 1
            self.ready = ready

        def serve_until_stopped(self):
            import select
            abort = 0
            while not abort:
                rd, wr, ex = select.select([self.socket.fileno()],
                                           [], [],
                                           self.timeout)
                if rd:
                    self.handle_request()
                logging._acquireLock()
                abort = self.abort
                logging._releaseLock()
            self.socket.close()

    class Server(threading.Thread):

        def __init__(self, rcvr, hdlr, port):
            super(Server, self).__init__()
            self.rcvr = rcvr
            self.hdlr = hdlr
            self.port = port
            self.ready = threading.Event()

        def run(self):
            server = self.rcvr(port=self.port, handler=self.hdlr,
                               ready=self.ready)
            if self.port == 0:
                self.port = server.server_address[1]
            self.ready.set()
            global _listener
            logging._acquireLock()
            _listener = server
            logging._releaseLock()
            server.serve_until_stopped()

    return Server(ConfigSocketReceiver, ConfigStreamHandler, port)

Example 35

Project: termite-data-server
Source File: pdfinvoice.py
View license
    def draw(self, invoice, items_page=10):
        """ Draws the invoice """
        buffer = cStringIO.StringIO()
        invoice_items = invoice['items']
        pages = max((len(invoice_items)-2)/items_page+1,1)
        canvas = Canvas(buffer, pagesize=self.page_size)
        for page in range(pages):
            canvas.translate(0, 29.7 * cm)
            canvas.setFont(self.font_face, 10)

            canvas.saveState()
            canvas.setStrokeColorRGB(0.9, 0.5, 0.2)
            canvas.setFillColorRGB(0.2, 0.2, 0.2)
            canvas.setFont(self.font_face, 16)
            canvas.drawString(1 * cm, -1 * cm, invoice.get('title',''))
            if self.logo:
                canvas.drawInlineImage(self.logo, 1 * cm, -1 * cm, 250, 16)
            canvas.setLineWidth(4)
            canvas.line(0, -1.25 * cm, 21.7 * cm, -1.25 * cm)
            canvas.restoreState()

            canvas.saveState()
            notes = listify(invoice.get('notes',''))
            textobject = canvas.beginText(1 * cm, -25 * cm)
            for line in notes:
                textobject.textLine(line)
            canvas.drawText(textobject)
            textobject = canvas.beginText(18 * cm, -28 * cm)
            textobject.textLine('Pag.%s/%s' % (page+1,pages))
            canvas.drawText(textobject)
            canvas.restoreState()

            canvas.saveState()
            business_details = listify(invoice.get('from','FROM:'))
            canvas.setFont(self.font_face, 9)
            textobject = canvas.beginText(13 * cm, -2.5 * cm)
            for line in business_details:
                textobject.textLine(line)
            canvas.drawText(textobject)
            canvas.restoreState()

            canvas.saveState()
            client_info = listify(invoice.get('to','TO:'))
            textobject = canvas.beginText(1.5 * cm, -2.5 * cm)
            for line in client_info:
                textobject.textLine(line)
            canvas.drawText(textobject)
            canvas.restoreState()

            textobject = canvas.beginText(1.5 * cm, -6.75 * cm)
            textobject.textLine(u'Invoice ID: %s' % invoice.get('id','<invoice id>'))
            textobject.textLine(u'Invoice Date: %s' % invoice.get('date',datetime.date.today()))
            textobject.textLine(u'Client: %s' % invoice.get('client_name','<invoice client>'))
            canvas.drawText(textobject)

            items = invoice_items[1:][page*items_page:(page+1)*items_page]
            if items:
                data = [invoice_items[0]]
                for item in items:
                    data.append([
                            self.format_currency(x)
                            if isinstance(x,float) else x
                            for x in item])
                righta = [k for k,v in enumerate(items[0])
                          if isinstance(v,(int,float,Decimal))]
                if page == pages-1:
                    total = self.format_currency(invoice['total'])
                else:
                    total = ''
                data.append(['']*(len(items[0])-1)+[total])
                colWidths = [2.5*cm]*len(items[0])
                colWidths[1] = (21.5-2.5*len(items[0]))*cm
                table = Table(data, colWidths=colWidths)
                table.setStyle([
                        ('FONT', (0, 0), (-1, -1), self.font_face),
                        ('FONTSIZE', (0, 0), (-1, -1), 8),
                        ('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)),
                        ('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)),
                        ('GRID', (-1, -1), (-1, -1), 1, (0.7, 0.7, 0.7)),
                        ('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8)),
                        ]+[('ALIGN',(k,0),(k,-1),'RIGHT') for k in righta])
                tw, th, = table.wrapOn(canvas, 15 * cm, 19 * cm)
                table.drawOn(canvas, 1 * cm, -8 * cm - th)

            if page == pages-1:
                items = invoice['totals'][1:]
                if items:
                    data = [invoice['totals'][0]]
                    for item in items:
                        data.append([
                                self.format_currency(x)
                                if isinstance(x,float) else x
                                for x in item])
                    righta = [k for k,v in enumerate(items[0])
                              if isinstance(v,(int,float,Decimal))]
                    total = self.format_currency(invoice['total'])
                    data.append(['']*(len(items[0])-1)+[total])
                    colWidths = [2.5*cm]*len(items[0])
                    colWidths[1] = (21.5-2.5*len(items[0]))*cm
                    table = Table(data, colWidths=colWidths)
                    table.setStyle([
                            ('FONT', (0, 0), (-1, -1), self.font_face),
                            ('FONTSIZE', (0, 0), (-1, -1), 8),
                            ('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)),
                            ('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)),
                            ('GRID', (-1, -1), (-1, -1), 1, (0.7, 0.7, 0.7)),
                            ('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8)),
                            ]+[('ALIGN',(k,0),(k,-1),'RIGHT') for k in righta])
                    tw, th, = table.wrapOn(canvas, 15 * cm, 19 * cm)
                    table.drawOn(canvas, 1 * cm, -18 * cm - th)
            canvas.showPage()
            canvas.save()
        return buffer.getvalue()

Example 36

View license
    def upload_file(self, command, pyversion, filename):
        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data
        f = open(filename,'rb')
        content = f.read()
        f.close()
        basename = os.path.basename(filename)
        comment = ''
        if command=='bdist_egg' and self.distribution.has_ext_modules():
            comment = "built on %s" % platform.platform(terse=1)
        data = {
            ':action':'file_upload',
            'protocol_version':'1',
            'name':self.distribution.get_name(),
            'version':self.distribution.get_version(),
            'content':(basename,content),
            'filetype':command,
            'pyversion':pyversion,
            'md5_digest':md5(content).hexdigest(),
            }
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            asc_file = open(filename + ".asc")
            data['gpg_signature'] = (os.path.basename(filename) + ".asc", asc_file.read())
            asc_file.close()

        # set up the authentication
        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if type(value) != type([]):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""
                value = str(value)
                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        # We can't use urllib2 since we need to send the Basic
        # auth right with the first request
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        assert not params and not query and not fragments
        if schema == 'http':
            http = httplib.HTTPConnection(netloc)
        elif schema == 'https':
            http = httplib.HTTPSConnection(netloc)
        else:
            raise AssertionError, "unsupported schema "+schema

        data = ''
        loglevel = log.INFO
        try:
            http.connect()
            http.putrequest("POST", url)
            http.putheader('Content-type',
                           'multipart/form-data; boundary=%s'%boundary)
            http.putheader('Content-length', str(len(body)))
            http.putheader('Authorization', auth)
            http.endheaders()
            http.send(body)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return

        r = http.getresponse()
        if r.status == 200:
            self.announce('Server response (%s): %s' % (r.status, r.reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                          log.ERROR)
        if self.show_response:
            print '-'*75, r.read(), '-'*75

Example 37

View license
    def draw(self, invoice, items_page=10):
        """ Draws the invoice """
        buffer = cStringIO.StringIO()
        invoice_items = invoice['items']
        pages = max((len(invoice_items)-2)/items_page+1,1)
        canvas = Canvas(buffer, pagesize=self.page_size)
        for page in range(pages):
            canvas.translate(0, 29.7 * cm)
            canvas.setFont(self.font_face, 10)

            canvas.saveState()
            canvas.setStrokeColorRGB(0.9, 0.5, 0.2)
            canvas.setFillColorRGB(0.2, 0.2, 0.2)
            canvas.setFont(self.font_face, 16)
            canvas.drawString(1 * cm, -1 * cm, invoice.get('title',''))
            if self.logo:
                canvas.drawInlineImage(self.logo, 1 * cm, -1 * cm, 250, 16)
            canvas.setLineWidth(4)
            canvas.line(0, -1.25 * cm, 21.7 * cm, -1.25 * cm)
            canvas.restoreState()

            canvas.saveState()
            notes = listify(invoice.get('notes',''))
            textobject = canvas.beginText(1 * cm, -25 * cm)
            for line in notes:
                textobject.textLine(line)
            canvas.drawText(textobject)
            textobject = canvas.beginText(18 * cm, -28 * cm)
            textobject.textLine('Pag.%s/%s' % (page+1,pages))
            canvas.drawText(textobject)
            canvas.restoreState()

            canvas.saveState()
            business_details = listify(invoice.get('from','FROM:'))
            canvas.setFont(self.font_face, 9)
            textobject = canvas.beginText(13 * cm, -2.5 * cm)
            for line in business_details:
                textobject.textLine(line)
            canvas.drawText(textobject)
            canvas.restoreState()

            canvas.saveState()
            client_info = listify(invoice.get('to','TO:'))
            textobject = canvas.beginText(1.5 * cm, -2.5 * cm)
            for line in client_info:
                textobject.textLine(line)
            canvas.drawText(textobject)
            canvas.restoreState()

            textobject = canvas.beginText(1.5 * cm, -6.75 * cm)
            textobject.textLine(u'Invoice ID: %s' % invoice.get('id','<invoice id>'))
            textobject.textLine(u'Invoice Date: %s' % invoice.get('date',datetime.date.today()))
            textobject.textLine(u'Client: %s' % invoice.get('client_name','<invoice client>'))
            canvas.drawText(textobject)

            items = invoice_items[1:][page*items_page:(page+1)*items_page]
            if items:
                data = [invoice_items[0]]
                for item in items:
                    data.append([
                            self.format_currency(x)
                            if isinstance(x,float) else x
                            for x in item])
                righta = [k for k,v in enumerate(items[0])
                          if isinstance(v,(int,float,Decimal))]
                if page == pages-1:
                    total = self.format_currency(invoice['total'])
                else:
                    total = ''
                data.append(['']*(len(items[0])-1)+[total])
                colWidths = [2.5*cm]*len(items[0])
                colWidths[1] = (21.5-2.5*len(items[0]))*cm
                table = Table(data, colWidths=colWidths)
                table.setStyle([
                        ('FONT', (0, 0), (-1, -1), self.font_face),
                        ('FONTSIZE', (0, 0), (-1, -1), 8),
                        ('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)),
                        ('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)),
                        ('GRID', (-1, -1), (-1, -1), 1, (0.7, 0.7, 0.7)),
                        ('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8)),
                        ]+[('ALIGN',(k,0),(k,-1),'RIGHT') for k in righta])
                tw, th, = table.wrapOn(canvas, 15 * cm, 19 * cm)
                table.drawOn(canvas, 1 * cm, -8 * cm - th)

            if page == pages-1:
                items = invoice['totals'][1:]
                if items:
                    data = [invoice['totals'][0]]
                    for item in items:
                        data.append([
                                self.format_currency(x)
                                if isinstance(x,float) else x
                                for x in item])
                    righta = [k for k,v in enumerate(items[0])
                              if isinstance(v,(int,float,Decimal))]
                    total = self.format_currency(invoice['total'])
                    data.append(['']*(len(items[0])-1)+[total])
                    colWidths = [2.5*cm]*len(items[0])
                    colWidths[1] = (21.5-2.5*len(items[0]))*cm
                    table = Table(data, colWidths=colWidths)
                    table.setStyle([
                            ('FONT', (0, 0), (-1, -1), self.font_face),
                            ('FONTSIZE', (0, 0), (-1, -1), 8),
                            ('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)),
                            ('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)),
                            ('GRID', (-1, -1), (-1, -1), 1, (0.7, 0.7, 0.7)),
                            ('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8)),
                            ]+[('ALIGN',(k,0),(k,-1),'RIGHT') for k in righta])
                    tw, th, = table.wrapOn(canvas, 15 * cm, 19 * cm)
                    table.drawOn(canvas, 1 * cm, -18 * cm - th)
            canvas.showPage()
            canvas.save()
        return buffer.getvalue()

Example 38

Project: py-zfec
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data
        content = open(filename,'rb').read()
        basename = os.path.basename(filename)
        comment = ''
        if command=='bdist_egg' and self.distribution.has_ext_modules():
            comment = "built on %s" % platform.platform(terse=1)
        data = {
            ':action':'file_upload',
            'protcol_version':'1',
            'name':self.distribution.get_name(),
            'version':self.distribution.get_version(),
            'content':(basename,content),
            'filetype':command,
            'pyversion':pyversion,
            'md5_digest':md5(content).hexdigest(),
            }
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if type(value) != type([]):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""
                value = str(value)
                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        # We can't use urllib2 since we need to send the Basic
        # auth right with the first request
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        assert not params and not query and not fragments
        if schema == 'http':
            http = httplib.HTTPConnection(netloc)
        elif schema == 'https':
            http = httplib.HTTPSConnection(netloc)
        else:
            raise AssertionError, "unsupported schema "+schema

        data = ''
        loglevel = log.INFO
        try:
            http.connect()
            http.putrequest("POST", url)
            http.putheader('Content-type',
                           'multipart/form-data; boundary=%s'%boundary)
            http.putheader('Content-length', str(len(body)))
            http.putheader('Authorization', auth)
            http.endheaders()
            http.send(body)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return

        r = http.getresponse()
        if r.status == 200:
            self.announce('Server response (%s): %s' % (r.status, r.reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                          log.ERROR)
        if self.show_response:
            print '-'*75, r.read(), '-'*75

Example 39

Project: Veil-Evasion
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 40

Project: autonetkit
Source File: graphml.py
View license
def load_graphml(input_data, defaults = True):


    # TODO: allow default properties to be passed in as dicts

    try:
        graph = nx.read_graphml(input_data)
    except IOError, e:
        acceptable_errors = set([2, 36, 63])  # no such file or directory
                                              # input string too long for filename
                                              # input string too long for filename
        if e.errno in acceptable_errors:
            from xml.etree.cElementTree import ParseError

            # try as data string rather than filename string

            try:
                input_pseduo_fh = StringIO(input_data)  # load into filehandle to networkx
                graph = nx.read_graphml(input_pseduo_fh)
            except IOError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except IndexError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except ParseError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except ParseError:
                raise autonetkit.exception.AnkIncorrectFileFormat
        else:
            raise e

    if graph.is_multigraph():
        log.info('Input graph is multigraph. Converting to single-edge graph'
                 )
        if graph.is_directed():
            graph = nx.DiGraph(graph)
        else:
            graph = nx.Graph(graph)

    # TODO: need to support edge index keying for multi graphs
    graph.remove_edges_from(edge for edge in graph.selfloop_edges())

# TODO: if selfloops then log that are removing

    letters_single = (c for c in string.lowercase)  # a, b, c, ... z
    letters_double = ('%s%s' % (a, b) for (a, b) in
        itertools.product(string.lowercase, string.lowercase))  # aa, ab, ... zz
    letters = itertools.chain(letters_single, letters_double)  # a, b, c, .. z, aa, ab, ac, ... zz

# TODO: need to get set of current labels, and only return if not in this set

    # TODO: add cloud, host, etc
    # prefixes for unlabelled devices, ie router -> r_a

    label_prefixes = {'router': 'r', 'switch': 'sw', 'server': 'se'}

    current_labels = set(graph.node[node].get('label') for node in
                         graph.nodes_iter())
    unique_label = (letter for letter in letters if letter
        not in current_labels)

# TODO: make sure device label set

    ank_graph_defaults = settings['Graphml']['Graph Defaults']
    for (key, val) in ank_graph_defaults.items():
        if key not in graph.graph:
            graph.graph[key] = val

    # handle yEd exported booleans: if a boolean is set, then only the nodes marked true have the attribute. need to map the remainder to be false to allow ANK logic
    # for node in graph.nodes(data=True):
        # print node

    all_labels = dict((n, d.get('label')) for (n, d) in
                      graph.nodes(data=True))
    label_counts = defaultdict(list)
    for (node, label) in all_labels.items():
        label_counts[label].append(node)

    # set default name for blank labels to ensure unique

    try:
        blank_labels = [v for (k, v) in label_counts.items()
                        if not k].pop()  # strip outer list
    except IndexError:
        blank_labels = []  # no blank labels
    for (index, node) in enumerate(blank_labels):

        # TODO: log message that no label set, so setting default

        graph.node[node]['label'] = 'none___%s' % index

    duplicates = [(k, v) for (k, v) in label_counts.items() if k
                  and len(v) > 1]
    for (label, nodes) in duplicates:
        for node in nodes:

            # TODO: need to check they don't all have same ASN... if so then warn

            try:
                graph.node[node]['label'] = '%s_%s' \
                    % (graph.node[node]['label'], graph.node[node]['asn'
                       ])
            except KeyError:
                log.warning('Unable to set new label for duplicate node %s: %s'
                             % (node, graph.node[node].get('label')))

    boolean_attributes = set(k for (n, d) in graph.nodes(data=True)
                             for (k, v) in d.items() if isinstance(v,
                             bool))

    for node in graph:
        for attr in boolean_attributes:
            if attr not in graph.node[node]:
                graph.node[node][attr] = False

    boolean_attributes = set(k for (n1, d1) in graph.edge.items()
                             for (n2, d2) in d1.items() for (k, v) in
                             d2.items() if isinstance(v, bool))
    for (n1, d1) in graph.edge.items():
        for (n2, d2) in d1.items():
            for attr in boolean_attributes:
                if attr not in graph.edge[n1][n2]:
                    graph.edge[n1][n2][attr] = False

# TODO: store these in config file

    if defaults:
        ank_node_defaults = settings['Graphml']['Node Defaults']
        node_defaults = graph.graph['node_default']  # update with defaults from graphml
        for (key, val) in node_defaults.items():
            if val == 'False':
                node_defaults[key] = False

    # TODO: do a dict update before applying so only need to iterate nodes once

        for (key, val) in ank_node_defaults.items():
            if key not in node_defaults or node_defaults[key] == 'None':
                node_defaults[key] = val

        for node in graph:
            for (key, val) in node_defaults.items():
                if key not in graph.node[node]:
                    graph.node[node][key] = val

    # set address family

        graph.graph['address_family'] = 'v4'
        graph.graph['enable_routing'] = True

    # map lat/lon from zoo to crude x/y approximation

    if graph.graph.get('Creator') == 'Topology Zoo Toolset':
        all_lat = [graph.node[n].get('Latitude') for n in graph
                   if graph.node[n].get('Latitude')]
        all_lon = [graph.node[n].get('Longitude') for n in graph
                   if graph.node[n].get('Longitude')]

        lat_min = min(all_lat)
        lon_min = min(all_lon)
        lat_max = max(all_lat)
        lon_max = max(all_lon)
        lat_mean = (lat_max - lat_min) / 2
        lon_mean = (lon_max - lon_min) / 2
        lat_scale = 500 / (lat_max - lat_min)
        lon_scale = 500 / (lon_max - lon_min)
        for node in graph:
            lat = graph.node[node].get('Latitude') or lat_mean  # set default to be mean of min/max
            lon = graph.node[node].get('Longitude') or lon_mean  # set default to be mean of min/max
            graph.node[node]['y'] = -1 * lat * lat_scale
            graph.node[node]['x'] = lon * lon_scale

    if not (any(graph.node[n].get('x') for n in graph)
            and any(graph.node[n].get('y') for n in graph)):

# No x, y set, layout in a grid

        grid_length = int(math.ceil(math.sqrt(len(graph))))
        co_ords = [(x * 100, y * 100) for y in range(grid_length)
                   for x in range(grid_length)]

        # (0,0), (100, 0), (200, 0), (0, 100), (100, 100) ....

        for node in sorted(graph):
            (x, y) = co_ords.pop(0)
            graph.node[node]['x'] = x
            graph.node[node]['y'] = y

    # and ensure asn is integer, x and y are floats

    for node in sorted(graph):
        graph.node[node]['asn'] = int(graph.node[node]['asn'])
        if graph.node[node]['asn'] == 0:
            log.debug('Node %s has ASN set to 0. Setting to 1'
                      % graph.node[node]['label'])
            graph.node[node]['asn'] = 1
        try:
            x = float(graph.node[node]['x'])
        except KeyError:
            x = 0
        graph.node[node]['x'] = x
        try:
            y = float(graph.node[node]['y'])
        except KeyError:
            y = 0
        graph.node[node]['y'] = y
        try:
            graph.node[node]['label']
        except KeyError:
            device_type = graph.node[node]['device_type']
            graph.node[node]['label'] = '%s_%s' \
                % (label_prefixes[device_type], unique_label.next())

    if defaults:
        ank_edge_defaults = settings['Graphml']['Edge Defaults']
        edge_defaults = graph.graph['edge_default']
        for (key, val) in ank_edge_defaults.items():
            if key not in edge_defaults or edge_defaults[key] == 'None':
                edge_defaults[key] = val

        for (src, dst) in graph.edges():
            for (key, val) in edge_defaults.items():
                if key not in graph[src][dst]:
                    graph[src][dst][key] = val

# apply defaults
# relabel nodes
# other handling... split this into seperate module!
# relabel based on label: assume unique by now!
    # if graph.graph.get("Network") == "European NRENs":
        # TODO: test if non-unique labels, if so then warn and proceed with this logic
        # we need to map node ids to contain network to ensure unique labels
        # mapping = dict( (n, "%s__%s" % (d['label'], d['asn'])) for n, d in graph.nodes(data=True))


    mapping = dict((n, d['label']) for (n, d) in graph.nodes(data=True))  # TODO: use dict comprehension
    if not all(key == val for (key, val) in mapping.items()):
        nx.relabel_nodes(graph, mapping, copy=False)  # Networkx wipes data if remap with same labels

    graph.graph['file_type'] = 'graphml'

    selfloop_count = graph.number_of_selfloops()
    if selfloop_count > 0:
        log.warning("Self loops present: do multiple nodes have the same label?")
        selfloops = ", ".join(str(e) for e in graph.selfloop_edges())
        log.warning("Removing selfloops: %s" % selfloops)
        graph.remove_edges_from(edge for edge in graph.selfloop_edges())


    return graph

Example 41

Project: viper
Source File: cuckoo.py
View license
    def run(self):
        super(Cuckoo, self).run()
        if self.args is None:
            return

        # Get the connections string from config

        if cfg.cuckoo.cuckoo_host:
            cuckoo_host = cfg.cuckoo.cuckoo_host
        else:
            self.log('error', 'Cuckoo Config Not Set')
            return

        if cfg.cuckoo.cuckoo_modified:
            search_url = '{0}/api/tasks/search/sha256'.format(cuckoo_host)
            submit_file_url = '{0}/api/tasks/create/file/'.format(cuckoo_host)
            status_url = '{0}/api/cuckoo/status'.format(cuckoo_host)
        else:
            search_url = '{0}/tasks/list'.format(cuckoo_host)
            submit_file_url = '{0}/tasks/create/file'.format(cuckoo_host)
            status_url = '{0}/cuckoo/status'.format(cuckoo_host)

        if self.args.status:
            # get the JSON
            try:
                api_status = self.api_query('get', status_url).json()
            except:
                return

            if cfg.cuckoo.cuckoo_modified:
                cuckoo_version = api_status['data']['version']
                machines = '{0}/{1}'.format(api_status['data']['machines']['available'],
                                            api_status['data']['machines']['total']
                                            )
                tasks = [api_status['data']['tasks']['completed'],
                         api_status['data']['tasks']['pending'],
                         api_status['data']['tasks']['reported'],
                         api_status['data']['tasks']['running'],
                         api_status['data']['tasks']['total']
                         ]
            else:
                cuckoo_version = api_status['version']
                machines = '{0}/{1}'.format(api_status['machines']['available'],
                                            api_status['machines']['total']
                                            )
                tasks = [api_status['tasks']['completed'],
                         api_status['tasks']['pending'],
                         api_status['tasks']['reported'],
                         api_status['tasks']['running'],
                         api_status['tasks']['total']
                         ]

            self.log('info', "Cuckoo")
            self.log('item', "Version: {0}".format(cuckoo_version))
            self.log('item', "Available Machines: {0}".format(machines))

            self.log('info', "Tasks")
            self.log('item', "Completed: {0}".format(tasks[0]))
            self.log('item', "Pending: {0}".format(tasks[1]))
            self.log('item', "Reported: {0}".format(tasks[2]))
            self.log('item', "Running: {0}".format(tasks[3]))
            self.log('item', "Total: {0}".format(tasks[4]))


        if self.args.file:
            if not __sessions__.is_set():
                self.log('error', "No open session")
                return

            if not self.args.resubmit:
                # Check for existing Session
                if cfg.cuckoo.cuckoo_modified:
                    search_results = self.api_query('get', '{0}/{1}'.format(search_url, __sessions__.current.file.sha256)).json()
                    if search_results['data'] != "Sample not found in database":
                        self.log('info', "Found {0} Results".format(len(search_results['data'])))
                        rows = []
                        header = ['ID', 'Started On', 'Status', 'Completed On']
                        for result in search_results['data']:
                            rows.append([result['id'], result['started_on'], result['status'], result['completed_on']])
                        self.log('table', dict(header=header, rows=rows))
                        self.log('warning', "use -r, --resubmit to force a new analysis")
                        return
                else:
                    search_results = self.api_query('get', search_url).json()
                    count = 0
                    if 'tasks' in search_results:
                        rows = []
                        header = ['ID', 'Started On', 'Status', 'Completed On']
                        for result in search_results['tasks']:
                            try:
                                if result['sample']['sha256'] == __sessions__.current.file.sha256:
                                    rows.append([result['id'], result['started_on'], result['status'], result['completed_on']])
                                    count += 1
                            except:
                                pass
                        if len(rows) > 0:
                            self.log('info', "Found {0} Results".format(count))
                            self.log('table', dict(header=header, rows=rows))
                            self.log('warning', "use -r, --resubmit to force a new analysis")
                            return
            # Submit the file
            params = {}
            if self.args.machine:
                params['machine'] = self.args.machine
            if self.args.package:
                params['package'] = self.args.package
            if self.args.options:
                params['options'] = self.args.options

            files = {'file': (__sessions__.current.file.name, open(__sessions__.current.file.path, 'rb').read())}
            submit_file = self.api_query('post', submit_file_url, files=files, params=params).json()
            try:
                self.log('info', "Task Submitted ID: {0}".format(submit_file['task_id']))
            except KeyError:
                try:
                    self.log('info', "Task Submitted ID: {0}".format(submit_file['task_ids'][0]))
                except KeyError:
                    self.log('error', submit_file)

        if self.args.dropped and __sessions__.is_set():
            try:
                task_id = int(self.args.dropped)
            except:
                self.log('error', "Not a valid task id")

            # Handle Modified-Cuckoo
            if cfg.cuckoo.cuckoo_modified:
                dropped_url = '{0}/api/tasks/get/dropped/{1}'.format(cuckoo_host, task_id)
            else:
                dropped_url = '{0}/tasks/report/{1}/dropped'.format(cuckoo_host, task_id)

            dropped_result = self.api_query('get', dropped_url)

            if dropped_result.content.startswith('BZ'):
                # explode BZ
                with tarfile.open(fileobj=StringIO(dropped_result.content)) as bz_file:
                    for item in bz_file:
                        # Write Files to tmp dir
                        if item.isreg():
                            # Create temp dirs to prevent duplicate filenames creating Errors
                            with create_temp() as temp_dir:
                                file_path = os.path.join(temp_dir, os.path.basename(item.name))
                                with open(file_path, 'wb') as out:
                                    out.write(bz_file.extractfile(item).read())
                                # Add the file
                                self.log('info', "Storing Dropped File {0}".format(item.name))
                                self.add_file(file_path,
                                              'Cuckoo_ID_{0}'.format(task_id),
                                              __sessions__.current.file.sha256
                                              )
                return

            else:
                if not __sessions__.is_set():
                    self.log('error', "No open session")
                    return
                try:
                    json_error = dropped_result.json()
                    self.log('error', json_error['data'])
                except Exception as e:
                    self.log('error', "Your broke something, {0}".format(e))

Example 42

Project: OwnTube
Source File: track.py
View license
    def get_infopage(self):
        try:
            if not self.config['show_infopage']:
                return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
            red = self.config['infopage_redirect']
            if red:
                return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
                        '<A HREF="'+red+'">Click Here</A>')
            
            s = StringIO()
            s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
                '<html><head><title>BitTorrent download info</title>\n')
            if self.favicon is not None:
                s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
            s.write('</head>\n<body>\n' \
                '<h3>BitTorrent download info</h3>\n'\
                '<ul>\n'
                '<li><strong>tracker version:</strong> %s</li>\n' \
                '<li><strong>server time:</strong> %s</li>\n' \
                '</ul>\n' % (version, isotime()))
            if self.config['allowed_dir']:
                if self.show_names:
                    names = [ (self.allowed[hash]['name'],hash)
                              for hash in self.allowed.keys() ]
                else:
                    names = [ (None,hash)
                              for hash in self.allowed.keys() ]
            else:
                names = [ (None,hash) for hash in self.downloads.keys() ]
            if not names:
                s.write('<p>not tracking any files yet...</p>\n')
            else:
                names.sort()
                tn = 0
                tc = 0
                td = 0
                tt = 0  # Total transferred
                ts = 0  # Total size
                nf = 0  # Number of files displayed
                if self.config['allowed_dir'] and self.show_names:
                    s.write('<table summary="files" border="1">\n' \
                        '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
                else:
                    s.write('<table summary="files">\n' \
                        '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
                for name,hash in names:
                    l = self.downloads[hash]
                    n = self.completed.get(hash, 0)
                    tn = tn + n
                    c = self.seedcount[hash]
                    tc = tc + c
                    d = len(l) - c
                    td = td + d
                    if self.config['allowed_dir'] and self.show_names:
                        if self.allowed.has_key(hash):
                            nf = nf + 1
                            sz = self.allowed[hash]['length']  # size
                            ts = ts + sz
                            szt = sz * n   # Transferred for this torrent
                            tt = tt + szt
                            if self.allow_get == 1:
                                linkname = '<a href="/file?info_hash=' + quote(hash) + '">' + name + '</a>'
                            else:
                                linkname = name
                            s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
                                % (b2a_hex(hash), linkname, size_format(sz), c, d, n, size_format(szt)))
                    else:
                        s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
                            % (b2a_hex(hash), c, d, n))
                ttn = 0
                for i in self.completed.values():
                    ttn = ttn + i
                if self.config['allowed_dir'] and self.show_names:
                    s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n'
                            % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt)))
                else:
                    s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n'
                            % (nf, tc, td, tn, ttn))
                s.write('</table>\n' \
                    '<ul>\n' \
                    '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
                    '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
                    '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
                    '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \
                    '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
                    '</ul>\n')

            s.write('</body>\n' \
                '</html>\n')
            return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
        except:
            print_exc()
            return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')

Example 43

View license
    def test_process_scpv1(self):

        # TestCommand.process should complain if supports_getinfo == False
        # We support dynamic configuration, not static

        # The exception line number may change, so we're using a regex match instead of a string match

        expected = re.compile(
            r'error_message=RuntimeError at ".+search_command\.py", line \d\d\d : Command test appears to be '
            r'statically configured for search command protocol version 1 and static configuration is unsupported by '
            r'splunklib.searchcommands. Please ensure that default/commands.conf contains this stanza:\n'
            r'\[test\]\n'
            r'filename = test.py\n'
            r'enableheader = true\n'
            r'outputheader = true\n'
            r'requires_srinfo = true\n'
            r'supports_getinfo = true\n'
            r'supports_multivalues = true\n'
            r'supports_rawargs = true')

        argv = ['test.py', 'not__GETINFO__or__EXECUTE__', 'option=value', 'fieldname']
        command = TestCommand()
        result = StringIO()

        self.assertRaises(SystemExit, command.process, argv, ofile=result)
        self.assertRegexpMatches(result.getvalue(), expected)

        # TestCommand.process should return configuration settings on Getinfo probe

        argv = ['test.py', '__GETINFO__', 'required_option_1=value', 'required_option_2=value']
        command = TestCommand()
        ifile = StringIO('\n')
        result = StringIO()

        self.assertEqual(str(command.configuration), '')

        self.assertEqual(
            repr(command.configuration),
            "[(u'clear_required_fields', None, [1]), (u'distributed', None, [2]), (u'generates_timeorder', None, [1]), "
            "(u'generating', None, [1, 2]), (u'maxinputs', None, [2]), (u'overrides_timeorder', None, [1]), "
            "(u'required_fields', None, [1, 2]), (u'requires_preop', None, [1]), (u'retainsevents', None, [1]), "
            "(u'run_in_preview', None, [2]), (u'streaming', None, [1]), (u'streaming_preop', None, [1, 2]), "
            "(u'type', None, [2])]")

        try:
            # noinspection PyTypeChecker
            command.process(argv, ifile, ofile=result)
        except BaseException as error:
            self.fail('{0}: {1}: {2}\n'.format(type(error).__name__, error, result.getvalue()))

        self.assertEqual('\r\n\r\n\r\n', result.getvalue())  # No message header and no configuration settings

        ifile = StringIO('\n')
        result = StringIO()

        # We might also put this sort of code into our SearchCommand.prepare override ...

        configuration = command.configuration

        # SCP v1/v2 configuration settings
        configuration.generating = True
        configuration.required_fields = ['foo', 'bar']
        configuration.streaming_preop = 'some streaming command'

        # SCP v1 configuration settings
        configuration.clear_required_fields = True
        configuration.generates_timeorder = True
        configuration.overrides_timeorder = True
        configuration.requires_preop = True
        configuration.retainsevents = True
        configuration.streaming = True

        # SCP v2 configuration settings (SCP v1 requires that maxinputs and run_in_preview are set in commands.conf)
        configuration.distributed = True
        configuration.maxinputs = 50000
        configuration.run_in_preview = True
        configuration.type = 'streaming'

        self.assertEqual(
            str(command.configuration),
            'clear_required_fields="True", generates_timeorder="True", generating="True", overrides_timeorder="True", '
            'required_fields="[u\'foo\', u\'bar\']", requires_preop="True", retainsevents="True", streaming="True", '
            'streaming_preop="some streaming command"')

        self.assertEqual(
            repr(command.configuration),
            "[(u'clear_required_fields', True, [1]), (u'distributed', True, [2]), (u'generates_timeorder', True, [1]), "
            "(u'generating', True, [1, 2]), (u'maxinputs', 50000, [2]), (u'overrides_timeorder', True, [1]), "
            "(u'required_fields', [u'foo', u'bar'], [1, 2]), (u'requires_preop', True, [1]), "
            "(u'retainsevents', True, [1]), (u'run_in_preview', True, [2]), (u'streaming', True, [1]), "
            "(u'streaming_preop', u'some streaming command', [1, 2]), (u'type', u'streaming', [2])]")

        try:
            # noinspection PyTypeChecker
            command.process(argv, ifile, ofile=result)
        except BaseException as error:
            self.fail('{0}: {1}: {2}\n'.format(type(error).__name__, error, result.getvalue()))

        result.reset()
        reader = csv.reader(result)
        self.assertEqual([], reader.next())
        observed = dict(izip(reader.next(), reader.next()))
        self.assertRaises(StopIteration, reader.next)

        expected = {
            'clear_required_fields': '1',                '__mv_clear_required_fields': '',
            'generating': '1',                           '__mv_generating': '',
            'generates_timeorder': '1',                  '__mv_generates_timeorder': '',
            'overrides_timeorder': '1',                  '__mv_overrides_timeorder': '',
            'requires_preop': '1',                       '__mv_requires_preop': '',
            'required_fields': 'foo,bar',                '__mv_required_fields': '',
            'retainsevents': '1',                        '__mv_retainsevents': '',
            'streaming': '1',                            '__mv_streaming': '',
            'streaming_preop': 'some streaming command', '__mv_streaming_preop': '',
        }

        self.assertDictEqual(expected, observed)  # No message header and no configuration settings

        for action in '__GETINFO__', '__EXECUTE__':

            # TestCommand.process should produce an error record on parser errors

            argv = [
                'test.py', action, 'required_option_1=value', 'required_option_2=value', 'undefined_option=value',
                'fieldname_1', 'fieldname_2']

            command = TestCommand()
            ifile = StringIO('\n')
            result = StringIO()

            self.assertRaises(SystemExit, command.process, argv, ifile, ofile=result)
            self.assertTrue(
                'error_message=Unrecognized test command option: undefined_option="value"\r\n\r\n',
                result.getvalue())

            # TestCommand.process should produce an error record when required options are missing

            argv = ['test.py', action, 'required_option_2=value', 'fieldname_1']
            command = TestCommand()
            ifile = StringIO('\n')
            result = StringIO()

            self.assertRaises(SystemExit, command.process, argv, ifile, ofile=result)

            self.assertTrue(
                'error_message=A value for test command option required_option_1 is required\r\n\r\n',
                result.getvalue())

            argv = ['test.py', action, 'fieldname_1']
            command = TestCommand()
            ifile = StringIO('\n')
            result = StringIO()

            self.assertRaises(SystemExit, command.process, argv, ifile, ofile=result)

            self.assertTrue(
                'error_message=Values for these test command options are required: required_option_1, required_option_2'
                '\r\n\r\n',
                result.getvalue())

        # TestStreamingCommand.process should exit on processing exceptions

        ifile = StringIO('\naction\r\nraise_error\r\n')
        argv = ['test.py', '__EXECUTE__']
        command = TestStreamingCommand()
        result = StringIO()

        try:
            # noinspection PyTypeChecker
            command.process(argv, ifile, ofile=result)
        except SystemExit as error:
            self.assertNotEqual(error.code, 0)
            self.assertRegexpMatches(
                result.getvalue(),
                r'^error_message=RuntimeError at ".+", line \d+ : Testing\r\n\r\n$')
        except BaseException as error:
            self.fail('Expected SystemExit, but caught {}: {}'.format(type(error).__name__, error))
        else:
            self.fail('Expected SystemExit, but no exception was raised')

        # Command.process should provide access to search results info
        info_path = os.path.join(
            self._package_directory, 'recordings', 'scpv1', 'Splunk-6.3', 'countmatches.execute.dispatch_dir',
            'externSearchResultsInfo.csv')

        ifile = StringIO('infoPath:' + info_path + '\n\naction\r\nget_search_results_info\r\n')
        argv = ['test.py', '__EXECUTE__']
        command = TestStreamingCommand()
        result = StringIO()

        try:
            # noinspection PyTypeChecker
            command.process(argv, ifile, ofile=result)
        except BaseException as error:
            self.fail('Expected no exception, but caught {}: {}'.format(type(error).__name__, error))
        else:
            self.assertRegexpMatches(
                result.getvalue(),
                r'^\r\n'
                r'('
                r'data,__mv_data,_serial,__mv__serial\r\n'
                r'"\{.*u\'is_summary_index\': 0, .+\}",,0,'
                r'|'
                r'_serial,__mv__serial,data,__mv_data\r\n'
                r'0,,"\{.*u\'is_summary_index\': 0, .+\}",'
                r')'
                r'\r\n$'
            )

        # TestStreamingCommand.process should provide access to a service object when search results info is available

        self.assertIsInstance(command.service, Service)

        self.assertEqual(command.service.authority,
                         command.search_results_info.splunkd_uri)

        self.assertEqual(command.service.scheme,
                         command.search_results_info.splunkd_protocol)

        self.assertEqual(command.service.port,
                         command.search_results_info.splunkd_port)

        self.assertEqual(command.service.token,
                         command.search_results_info.auth_token)

        self.assertEqual(command.service.namespace.app,
                         command.search_results_info.ppc_app)

        self.assertEqual(command.service.namespace.owner,
                         None)
        self.assertEqual(command.service.namespace.sharing,
                         None)

        # Command.process should not provide access to search results info or a service object when the 'infoPath'
        # input header is unavailable

        ifile = StringIO('\naction\r\nget_search_results_info')
        argv = ['teststreaming.py', '__EXECUTE__']
        command = TestStreamingCommand()

        # noinspection PyTypeChecker
        command.process(argv, ifile, ofile=result)

        self.assertIsNone(command.search_results_info)
        self.assertIsNone(command.service)

        return

Example 44

Project: maltrail
Source File: httpd.py
View license
def start_httpd(address=None, port=None, join=False, pem=None):
    """
    Starts HTTP server
    """

    class ThreadingServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
        def server_bind(self):
            self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
            BaseHTTPServer.HTTPServer.server_bind(self)

        def finish_request(self, *args, **kwargs):
            try:
                BaseHTTPServer.HTTPServer.finish_request(self, *args, **kwargs)
            except:
                if config.SHOW_DEBUG:
                    traceback.print_exc()

    class SSLThreadingServer(ThreadingServer):
        def __init__(self, server_address, pem, HandlerClass):
            import OpenSSL  # python-openssl

            ThreadingServer.__init__(self, server_address, HandlerClass)
            ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
            ctx.use_privatekey_file(pem)
            ctx.use_certificate_file(pem)
            self.socket = OpenSSL.SSL.Connection(ctx, socket.socket(self.address_family, self.socket_type))
            self.server_bind()
            self.server_activate()

        def shutdown_request(self, request):
            try:
                request.shutdown()
            except:
                if config.SHOW_DEBUG:
                    traceback.print_exc()

    class ReqHandler(BaseHTTPServer.BaseHTTPRequestHandler):
        def do_GET(self):
            path, query = self.path.split('?', 1) if '?' in self.path else (self.path, "")
            params = {}
            content = None
            skip = False

            if hasattr(self, "data"):
                params.update(urlparse.parse_qs(self.data))

            if query:
                params.update(urlparse.parse_qs(query))

            for key in params:
                if params[key]:
                    params[key] = params[key][-1]

            if path == '/':
                path = "index.html"

            path = path.strip('/')
            extension = os.path.splitext(path)[-1].lower()

            if hasattr(self, "_%s" % path):
                content = getattr(self, "_%s" % path)(params)

            else:
                path = path.replace('/', os.path.sep)
                path = os.path.abspath(os.path.join(HTML_DIR, path)).strip()

                if not os.path.isfile(path) and os.path.isfile("%s.html" % path):
                    path = "%s.html" % path

                if ".." not in os.path.relpath(path, HTML_DIR) and os.path.isfile(path) and (extension not in DISABLED_CONTENT_EXTENSIONS or os.path.split(path)[-1] in CONTENT_EXTENSIONS_EXCLUSIONS):
                    mtime = time.gmtime(os.path.getmtime(path))
                    if_modified_since = self.headers.get(HTTP_HEADER.IF_MODIFIED_SINCE)

                    if if_modified_since and extension not in (".htm", ".html"):
                        if_modified_since = [_ for _ in if_modified_since.split(';') if _.upper().endswith("GMT")][0]
                        if time.mktime(mtime) <= time.mktime(time.strptime(if_modified_since, HTTP_TIME_FORMAT)):
                            self.send_response(httplib.NOT_MODIFIED)
                            self.send_header(HTTP_HEADER.CONNECTION, "close")
                            skip = True

                    if not skip:
                        content = open(path, "rb").read()
                        last_modified = time.strftime(HTTP_TIME_FORMAT, mtime)
                        self.send_response(httplib.OK)
                        self.send_header(HTTP_HEADER.CONNECTION, "close")
                        self.send_header(HTTP_HEADER.CONTENT_TYPE, mimetypes.guess_type(path)[0] or "application/octet-stream")
                        self.send_header(HTTP_HEADER.LAST_MODIFIED, last_modified)
                        if extension not in (".htm", ".html"):
                            self.send_header(HTTP_HEADER.EXPIRES, "Sun, 17-Jan-2038 19:14:07 GMT")        # Reference: http://blog.httpwatch.com/2007/12/10/two-simple-rules-for-http-caching/
                            self.send_header(HTTP_HEADER.CACHE_CONTROL, "max-age=3600, must-revalidate")  # Reference: http://stackoverflow.com/a/5084555
                        else:
                            self.send_header(HTTP_HEADER.CACHE_CONTROL, "no-cache")

                else:
                    self.send_response(httplib.NOT_FOUND)
                    self.send_header(HTTP_HEADER.CONNECTION, "close")
                    content = '<!DOCTYPE html><html lang="en"><head><title>404 Not Found</title></head><body><h1>Not Found</h1><p>The requested URL %s was not found on this server.</p></body></html>' % self.path.split('?')[0]

            if content is not None:
                for match in re.finditer(r"<\!(\w+)\!>", content):
                    name = match.group(1)
                    _ = getattr(self, "_%s" % name.lower(), None)
                    if _:
                        content = self._format(content, **{ name: _() })

                if "gzip" in self.headers.getheader(HTTP_HEADER.ACCEPT_ENCODING, ""):
                    self.send_header(HTTP_HEADER.CONTENT_ENCODING, "gzip")
                    _ = cStringIO.StringIO()
                    compress = gzip.GzipFile("", "w+b", 9, _)
                    compress._stream = _
                    compress.write(content)
                    compress.flush()
                    compress.close()
                    content = compress._stream.getvalue()

                self.send_header(HTTP_HEADER.CONTENT_LENGTH, str(len(content)))

            self.end_headers()

            if content:
                self.wfile.write(content)

            self.wfile.flush()
            self.wfile.close()

        def do_POST(self):
            length = self.headers.getheader(HTTP_HEADER.CONTENT_LENGTH)
            data = self.rfile.read(int(length))
            data = urllib.unquote_plus(data)
            self.data = data
            self.do_GET()

        def get_session(self):
            retval = None
            cookie = self.headers.get(HTTP_HEADER.COOKIE)

            if cookie:
                match = re.search(r"%s\s*=\s*([^;]+)" % SESSION_COOKIE_NAME, cookie)
                if match:
                    session = match.group(1)
                    if session in SESSIONS:
                        if SESSIONS[session].client_ip != self.client_address[0]:
                            pass
                        elif SESSIONS[session].expiration > time.time():
                            retval = SESSIONS[session]
                        else:
                            del SESSIONS[session]

            return retval

        def delete_session(self):
            cookie = self.headers.get(HTTP_HEADER.COOKIE)

            if cookie:
                match = re.search(r"%s=(.+)" % SESSION_COOKIE_NAME, cookie)
                if match:
                    session = match.group(1)
                    if session in SESSIONS:
                        del SESSIONS[session]

        def version_string(self):
            return SERVER_HEADER

        def end_headers(self):
            if not hasattr(self, "_headers_ended"):
                BaseHTTPServer.BaseHTTPRequestHandler.end_headers(self)
                self._headers_ended = True

        def log_message(self, format, *args):
            return

        def finish(self):
            try:
                BaseHTTPServer.BaseHTTPRequestHandler.finish(self)
            except:
                if config.SHOW_DEBUG:
                    traceback.print_exc()

        def _version(self):
            return VERSION

        def _format(self, content, **params):
            if content:
                for key, value in params.items():
                    content = content.replace("<!%s!>" % key, value)

            return content

        def _login(self, params):
            valid = False

            if params.get("username") and params.get("hash") and params.get("nonce"):
                if params.get("nonce") not in DISPOSED_NONCES:
                    DISPOSED_NONCES.add(params.get("nonce"))
                    for entry in (config.USERS or []):
                        entry = re.sub(r"\s", "", entry)
                        username, stored_hash, uid, netfilter = entry.split(':')
                        if username == params.get("username"):
                            try:
                                if params.get("hash") == hashlib.sha256(stored_hash.strip() + params.get("nonce")).hexdigest():
                                    valid = True
                                    break
                            except:
                                if config.SHOW_DEBUG:
                                    traceback.print_exc()

            if valid:
                session_id = os.urandom(SESSION_ID_LENGTH).encode("hex")
                expiration = time.time() + 3600 * SESSION_EXPIRATION_HOURS

                self.send_response(httplib.OK)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                self.send_header(HTTP_HEADER.SET_COOKIE, "%s=%s; expires=%s; path=/; HttpOnly" % (SESSION_COOKIE_NAME, session_id, time.strftime(HTTP_TIME_FORMAT, time.gmtime(expiration))))

                if netfilter in ("", "0.0.0.0/0"):
                    netfilters = None
                else:
                    addresses = set()
                    netmasks = set()

                    for item in set(re.split(r"[;,]", netfilter)):
                        item = item.strip()
                        if '/' in item:
                            _ = item.split('/')[-1]
                            if _.isdigit() and int(_) >= 16:
                                lower = addr_to_int(item.split('/')[0])
                                mask = make_mask(int(_))
                                upper = lower | (0xffffffff ^ mask)
                                while lower <= upper:
                                    addresses.add(int_to_addr(lower))
                                    lower += 1
                            else:
                                netmasks.add(item)
                        elif '-' in item:
                            _ = item.split('-')
                            lower, upper = addr_to_int(_[0]), addr_to_int(_[1])
                            while lower <= upper:
                                addresses.add(int_to_addr(lower))
                                lower += 1
                        elif re.search(r"\d+\.\d+\.\d+\.\d+", item):
                            addresses.add(item)

                    netfilters = netmasks
                    if addresses:
                        netfilters.add(get_regex(addresses))

                SESSIONS[session_id] = AttribDict({"username": username, "uid": uid, "netfilters": netfilters, "expiration": expiration, "client_ip": self.client_address[0]})
            else:
                time.sleep(UNAUTHORIZED_SLEEP_TIME)
                self.send_response(httplib.UNAUTHORIZED)
                self.send_header(HTTP_HEADER.CONNECTION, "close")

            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
            content = "Login %s" % ("success" if valid else "failed")

            if not subprocess.mswindows:
                try:
                    subprocess.check_output("logger -p auth.info -t \"%s[%d]\" \"%s password for %s from %s port %s\"" % (NAME.lower(), os.getpid(), "Accepted" if valid else "Failed", params.get("username"), self.client_address[0], self.client_address[1]), stderr=subprocess.STDOUT, shell=True)
                except Exception:
                    if config.SHOW_DEBUG:
                        traceback.print_exc()

            return content

        def _logout(self, params):
            self.delete_session()
            self.send_response(httplib.FOUND)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.LOCATION, "/")

        def _whoami(self, params):
            session = self.get_session()
            username = session.username if session else ""

            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

            return username

        def _check_ip(self, params):
            session = self.get_session()

            if session is None:
                self.send_response(httplib.UNAUTHORIZED)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                return None

            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

            try:
                result_worst = worst_asns(params.get("address"))
                if result_worst:
                    result_ipcat = result_worst
                else:
                    _ = (ipcat_lookup(params.get("address")) or "").lower().split(' ')
                    result_ipcat = _[1] if _[0] == 'the' else _[0]
                return ("%s" if not params.get("callback") else "%s(%%s)" % params.get("callback")) % json.dumps({"ipcat": result_ipcat, "worst_asns": str(result_worst is not None).lower()})
            except:
                if config.SHOW_DEBUG:
                    traceback.print_exc()

        def _trails(self, params):
            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

            return open(TRAILS_FILE, "rb").read()

        def _ping(self, params):
            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

            return PING_RESPONSE

        def _events(self, params):
            session = self.get_session()

            if session is None:
                self.send_response(httplib.UNAUTHORIZED)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                return None

            start, end, size, total = None, None, -1, None
            content = None
            log_exists = False
            dates = params.get("date", "")

            if ".." in dates:
                pass
            elif '_' not in dates:
                try:
                    date = datetime.datetime.strptime(dates, "%Y-%m-%d").strftime("%Y-%m-%d")
                    event_log_path = os.path.join(config.LOG_DIR, "%s.log" % date)
                    if os.path.exists(event_log_path):
                        range_handle = open(event_log_path, "rb")
                        log_exists = True
                except ValueError:
                    print "[!] invalid date format in request"
                    log_exists = False
            else:
                logs_data = ""
                date_interval = dates.split("_", 1)
                try:
                    start_date = datetime.datetime.strptime(date_interval[0], "%Y-%m-%d").date()
                    end_date = datetime.datetime.strptime(date_interval[1], "%Y-%m-%d").date()
                    for i in xrange(int((end_date - start_date).days) + 1):
                        date = start_date + datetime.timedelta(i)
                        event_log_path = os.path.join(config.LOG_DIR, "%s.log" % date.strftime("%Y-%m-%d"))
                        if os.path.exists(event_log_path):
                            log_handle = open(event_log_path, "rb")
                            logs_data += log_handle.read()
                            log_handle.close()

                    range_handle = io.BytesIO(logs_data)
                    log_exists = True
                except ValueError:
                    print "[!] invalid date format in request"
                    log_exists = False

            if log_exists:
                range_handle.seek(0, 2)
                total = range_handle.tell()
                range_handle.seek(0)

                if self.headers.get(HTTP_HEADER.RANGE):
                    match = re.search(r"bytes=(\d+)-(\d+)", self.headers[HTTP_HEADER.RANGE])
                    if match:
                        start, end = int(match.group(1)), int(match.group(2))
                        max_size = end - start + 1
                        end = min(total - 1, end)
                        size = end - start + 1

                        if start == 0 or not session.range_handle:
                            session.range_handle = range_handle

                        if session.netfilters is None:
                            session.range_handle.seek(start)
                            self.send_response(httplib.PARTIAL_CONTENT)
                            self.send_header(HTTP_HEADER.CONNECTION, "close")
                            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
                            self.send_header(HTTP_HEADER.CONTENT_RANGE, "bytes %d-%d/%d" % (start, end, total))
                            content = session.range_handle.read(size)
                        else:
                            self.send_response(httplib.OK)
                            self.send_header(HTTP_HEADER.CONNECTION, "close")
                            self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")

                            buffer, addresses, netmasks, regex = cStringIO.StringIO(), set(), [], ""
                            for netfilter in session.netfilters:
                                if not netfilter:
                                    continue
                                if '/' in netfilter:
                                    netmasks.append(netfilter)
                                elif re.search(r"\A[\d.]+\Z", netfilter):
                                    addresses.add(netfilter)
                                elif '\.' in netfilter:
                                    regex = r"\b(%s)\b" % netfilter
                                else:
                                    print "[!] invalid network filter '%s'" % netfilter
                                    return

                            for line in session.range_handle:
                                display = False
                                ip = None

                                if regex:
                                    match = re.search(regex, line)
                                    if match:
                                        ip = match.group(1)
                                        display = True

                                if not display and (addresses or netmasks):
                                    for match in re.finditer(r"\b(\d+\.\d+\.\d+\.\d+)\b", line):
                                        if not display:
                                            ip = match.group(1)
                                        else:
                                            break

                                        if ip in addresses:
                                            display = True
                                            break
                                        elif netmasks:
                                            for _ in netmasks:
                                                prefix, mask = _.split('/')
                                                if addr_to_int(ip) & make_mask(int(mask)) == addr_to_int(prefix):
                                                    addresses.add(ip)
                                                    display = True
                                                    break

                                if display:
                                    if ",%s" % ip in line or "%s," % ip in line:
                                        line = re.sub(r" ([\d.,]+,)?%s(,[\d.,]+)? " % re.escape(ip), " %s " % ip, line)
                                    buffer.write(line)
                                    if buffer.tell() >= max_size:
                                        break

                            content = buffer.getvalue()
                            end = start + len(content) - 1
                            self.send_header(HTTP_HEADER.CONTENT_RANGE, "bytes %d-%d/%d" % (start, end, end + 1 + max_size * (len(content) >= max_size)))

                        if len(content) < max_size:
                            session.range_handle.close()
                            session.range_handle = None

                if size == -1:
                    self.send_response(httplib.OK)
                    self.send_header(HTTP_HEADER.CONNECTION, "close")
                    self.send_header(HTTP_HEADER.CONTENT_TYPE, "text/plain")
                    self.end_headers()

                    with range_handle as f:
                        while True:
                            data = f.read(io.DEFAULT_BUFFER_SIZE)
                            if not data:
                                break
                            else:
                                self.wfile.write(data)

            else:
                self.send_response(httplib.OK)  # instead of httplib.NO_CONTENT (compatibility reasons)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                if self.headers.get(HTTP_HEADER.RANGE):
                    self.send_header(HTTP_HEADER.CONTENT_RANGE, "bytes 0-0/0")

            return content

        def _counts(self, params):
            counts = {}

            session = self.get_session()

            if session is None:
                self.send_response(httplib.UNAUTHORIZED)
                self.send_header(HTTP_HEADER.CONNECTION, "close")
                return None

            self.send_response(httplib.OK)
            self.send_header(HTTP_HEADER.CONNECTION, "close")
            self.send_header(HTTP_HEADER.CONTENT_TYPE, "application/json")

            match = re.search(r"\d+\-\d+\-\d+", params.get("from", ""))
            if match:
                min_ = datetime.datetime.strptime(match.group(0), DATE_FORMAT)
            else:
                min_ = datetime.datetime.fromtimestamp(0)

            match = re.search(r"\d+\-\d+\-\d+", params.get("to", ""))
            if match:
                max_ = datetime.datetime.strptime(match.group(0), DATE_FORMAT)
            else:
                max_ = datetime.datetime.now()

            min_ = min_.replace(hour=0, minute=0, second=0, microsecond=0)
            max_ = max_.replace(hour=23, minute=59, second=59, microsecond=999999)

            for filepath in sorted(glob.glob(os.path.join(config.LOG_DIR, "*.log"))):
                filename = os.path.basename(filepath)
                if not re.search(r"\A\d{4}-\d{2}-\d{2}\.log\Z", filename):
                    continue
                try:
                    current = datetime.datetime.strptime(os.path.splitext(filename)[0], DATE_FORMAT)
                except:
                    if config.SHOW_DEBUG:
                        traceback.print_exc()
                else:
                    if min_ <= current <= max_:
                        timestamp = int(time.mktime(current.timetuple()))
                        size = os.path.getsize(filepath)
                        with open(filepath, "rb") as f:
                            content = f.read(io.DEFAULT_BUFFER_SIZE)
                            if size >= io.DEFAULT_BUFFER_SIZE:
                                total = 1.0 * content.count('\n') * size / io.DEFAULT_BUFFER_SIZE
                                counts[timestamp] = int(round(total / 100) * 100)
                            else:
                                counts[timestamp] = content.count('\n')

            return json.dumps(counts)

    class SSLReqHandler(ReqHandler):
        def setup(self):
            self.connection = self.request
            self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
            self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)

    try:
        if pem:
            server = SSLThreadingServer((address or '', int(port) if str(port or "").isdigit() else 0), pem, SSLReqHandler)
        else:
            server = ThreadingServer((address or '', int(port) if str(port or "").isdigit() else 0), ReqHandler)
    except Exception as ex:
        if "Address already in use" in str(ex):
            exit("[!] another instance already running")
        elif "Name or service not known" in str(ex):
            exit("[!] invalid configuration value for 'HTTP_ADDRESS' ('%s')" % config.HTTP_ADDRESS)
        elif "Cannot assign requested address" in str(ex):
            exit("[!] can't use configuration value for 'HTTP_ADDRESS' ('%s')" % config.HTTP_ADDRESS)
        else:
            raise

    print "[i] starting HTTP%s server at 'http%s://%s:%d/'" % ('S' if pem else "", 's' if pem else "", server.server_address[0], server.server_address[1])

    print "[o] running..."

    if join:
        server.serve_forever()
    else:
        thread = threading.Thread(target=server.serve_forever)
        thread.daemon = True
        thread.start()

Example 45

Project: pymo
Source File: config.py
View license
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
    """
    Start up a socket server on the specified port, and listen for new
    configurations.

    These will be sent as a file suitable for processing by fileConfig().
    Returns a Thread object on which you can call start() to start the server,
    and which you can join() when appropriate. To stop the server, call
    stopListening().
    """
    if not thread:
        raise NotImplementedError("listen() needs threading to work")

    class ConfigStreamHandler(StreamRequestHandler):
        """
        Handler for a logging configuration request.

        It expects a completely new logging configuration and uses fileConfig
        to install it.
        """
        def handle(self):
            """
            Handle a request.

            Each request is expected to be a 4-byte length, packed using
            struct.pack(">L", n), followed by the config file.
            Uses fileConfig() to do the grunt work.
            """
            import tempfile
            try:
                conn = self.connection
                chunk = conn.recv(4)
                if len(chunk) == 4:
                    slen = struct.unpack(">L", chunk)[0]
                    chunk = self.connection.recv(slen)
                    while len(chunk) < slen:
                        chunk = chunk + conn.recv(slen - len(chunk))
                    try:
                        import json
                        d =json.loads(chunk)
                        assert isinstance(d, dict)
                        dictConfig(d)
                    except:
                        #Apply new configuration.

                        file = cStringIO.StringIO(chunk)
                        try:
                            fileConfig(file)
                        except (KeyboardInterrupt, SystemExit):
                            raise
                        except:
                            traceback.print_exc()
                    if self.server.ready:
                        self.server.ready.set()
            except socket.error, e:
                if not isinstance(e.args, tuple):
                    raise
                else:
                    errcode = e.args[0]
                    if errcode != RESET_ERROR:
                        raise

    class ConfigSocketReceiver(ThreadingTCPServer):
        """
        A simple TCP socket-based logging config receiver.
        """

        allow_reuse_address = 1

        def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
                     handler=None, ready=None):
            ThreadingTCPServer.__init__(self, (host, port), handler)
            logging._acquireLock()
            self.abort = 0
            logging._releaseLock()
            self.timeout = 1
            self.ready = ready

        def serve_until_stopped(self):
            import select
            abort = 0
            while not abort:
                rd, wr, ex = select.select([self.socket.fileno()],
                                           [], [],
                                           self.timeout)
                if rd:
                    self.handle_request()
                logging._acquireLock()
                abort = self.abort
                logging._releaseLock()
            self.socket.close()

    class Server(threading.Thread):

        def __init__(self, rcvr, hdlr, port):
            super(Server, self).__init__()
            self.rcvr = rcvr
            self.hdlr = hdlr
            self.port = port
            self.ready = threading.Event()

        def run(self):
            server = self.rcvr(port=self.port, handler=self.hdlr,
                               ready=self.ready)
            if self.port == 0:
                self.port = server.server_address[1]
            self.ready.set()
            global _listener
            logging._acquireLock()
            _listener = server
            logging._releaseLock()
            server.serve_until_stopped()

    return Server(ConfigSocketReceiver, ConfigStreamHandler, port)

Example 46

Project: robofab
Source File: glifLib.py
View license
def writeGlyphToString(glyphName, glyphObject=None, drawPointsFunc=None, writer=None):
	"""Return .glif data for a glyph as a UTF-8 encoded string.
	The 'glyphObject' argument can be any kind of object (even None);
	the writeGlyphToString() method will attempt to get the following
	attributes from it:
		"width"     the advance with of the glyph
		"unicodes"  a list of unicode values for this glyph
		"note"      a string
		"lib"       a dictionary containing custom data

	All attributes are optional: if 'glyphObject' doesn't
	have the attribute, it will simply be skipped.

	To write outline data to the .glif file, writeGlyphToString() needs
	a function (any callable object actually) that will take one
	argument: an object that conforms to the PointPen protocol.
	The function will be called by writeGlyphToString(); it has to call the
	proper PointPen methods to transfer the outline to the .glif file.
	"""
	if writer is None:
		try:
			from xmlWriter import XMLWriter
		except ImportError:
			# try the other location
			from fontTools.misc.xmlWriter import XMLWriter
		aFile = StringIO()
		writer = XMLWriter(aFile, encoding="UTF-8")
	else:
		aFile = None
	writer.begintag("glyph", [("name", glyphName), ("format", "1")])
	writer.newline()

	width = getattr(glyphObject, "width", None)
	if width is not None:
		if not isinstance(width, (int, float)):
			raise GlifLibError, "width attribute must be int or float"
		writer.simpletag("advance", width=repr(width))
		writer.newline()

	unicodes = getattr(glyphObject, "unicodes", None)
	if unicodes:
		if isinstance(unicodes, int):
			unicodes = [unicodes]
		for code in unicodes:
			if not isinstance(code, int):
				raise GlifLibError, "unicode values must be int"
			hexCode = hex(code)[2:].upper()
			if len(hexCode) < 4:
				hexCode = "0" * (4 - len(hexCode)) + hexCode
			writer.simpletag("unicode", hex=hexCode)
			writer.newline()

	note = getattr(glyphObject, "note", None)
	if note is not None:
		if not isinstance(note, (str, unicode)):
			raise GlifLibError, "note attribute must be str or unicode"
		note = note.encode('utf-8')
		writer.begintag("note")
		writer.newline()
		for line in note.splitlines():
			writer.write(line.strip())
			writer.newline()
		writer.endtag("note")
		writer.newline()

	if drawPointsFunc is not None:
		writer.begintag("outline")
		writer.newline()
		pen = GLIFPointPen(writer)
		drawPointsFunc(pen)
		writer.endtag("outline")
		writer.newline()

	lib = getattr(glyphObject, "lib", None)
	if lib:
		from robofab.plistlib import PlistWriter
		if not isinstance(lib, dict):
			lib = dict(lib)
		writer.begintag("lib")
		writer.newline()
		plistWriter = PlistWriter(writer.file, indentLevel=writer.indentlevel,
				indent=writer.indentwhite, writeHeader=False)
		plistWriter.writeValue(lib)
		writer.endtag("lib")
		writer.newline()

	writer.endtag("glyph")
	writer.newline()
	if aFile is not None:
		return aFile.getvalue()
	else:
		return None

Example 47

View license
    def handle(self, *args, **kwargs):
        if len(args) > 0:
            if args[0] in ("trusted", "possible", "full"):
                display = args[0]
            else:
                print "Unexpected argument '%s'.  Options are 'trusted' or 'full'" % args[0]
                return
        else:
            display = 'full'

        cursor = connection.cursor()
        # ORM is way too slow for this on 100,000+ rows.
        cursor.execute("""
            SELECT DISTINCT a.entity_id,a.alias,m.state,m.party,m.seat FROM
            matchbox_entityalias a
            LEFT JOIN politician_metadata_latest_cycle_view m ON m.entity_id=a.entity_id
            LEFT JOIN matchbox_entity e ON m.entity_id=e.id
            WHERE e.type = %s
            """, ['politician'])
        rows = cursor.fetchall()
        # Add person name classes
        rows = [(e, PersonName(fix_name(a or "")), s or "", p or "", o or "") for e,a,s,p,o in rows]
        by_last_name = defaultdict(list)

        # group all entities by last name
        for row in rows:
            by_last_name[row[1].last].append(row)

        #count = 0
        #grand_total = len(rows)
        totals = defaultdict(int)
        groups = defaultdict(list)

        for last_name, entities in by_last_name.iteritems():
            #print count, grand_total, last_name, len(entities)
            #count += len(entities)

            # for each last name, split enities into a groups of state and federal politicians
            # this will make all the "left sides" of the matches federal and all the right sides state
            fed_entities =  [ entity for entity in entities if entity[4].startswith('federal') ]
            state_entities =  [ entity for entity in entities if entity[4].startswith('state') ]


            for eid1, name1, state1, party1, office1 in fed_entities:

                for eid2, name2, state2, party2, office2 in state_entities:
                    # skip if maximal fuzziness fails
                    if not name1.matches(name2):
                        continue

                    state_checks = {
                        'same state': state1 == state2,
                        'diff state': state1 != state2,
                        'missing one state': (not state1 or not state2) and state1 != state2,
                        'missing two states': not state1 and not state2,
                    }
                    party_checks = {
                        'same party': party1 == party2,
                        'diff party; both 3rd': party1 not in "RD" and
                            party2 not in "RD" and party1 != party2,
                        'diff party; one 3rd': party1 != party2 and
                            (party1 not in "RD" or party2 not in "RD") and
                            (party1 in "RD" or party2 in "RD"),
                        'diff party; R or D': party1 != party2 and
                            (party1 in "RD" and party2 in "RD"),
                    }
                    # Check all combinations of name matching conditions
                    all_conditions = ('missing_middle', 'nicknames', 'missing_suffix',
                            'initials', 'first_as_middle')

                    name_checks = {
                        'exact': name1.matches(name2, exact=True)
                    }

                    def check(conditions):
                        if len(conditions) == 0:
                            return name_checks['exact']
                        key = ", ".join(conditions)
                        name_checks[key] = name_checks.get(key, (
                            not check(conditions[:-1]) and
                            name1.matches(name2, exact=True, **dict((c, True) for c in conditions))
                        ))
                        return name_checks[key]

                    def get_minimum_match():
                        for r in range(len(all_conditions)):
                            for conds in combinations(all_conditions, r):
                                if check(conds):
                                    return True
                    get_minimum_match()

                    for n1, c1 in state_checks.iteritems():
                        if c1:
                            for n2, c2 in party_checks.iteritems():
                                if c2:
                                    for n3, c3 in name_checks.iteritems():
                                        if c3:
                                            key = " | ".join((n1, n2, n3))
                                            totals[key] += 1
                                            match = (
                                                (eid1, name1.name, state1, party1, office1),
                                                (eid2, name2.name, state2, party2, office2)
                                            )
                                            # names can have multiple aliases which cause duplicate entity matches
                                            # don't add these
                                            if match not in groups[key]:
                                                groups[key].append(match)

                                            break



        if display == 'full':
            pprint(dict(totals))
            for group in sorted(groups.keys()):
                print group, len(groups[group])
                for n1, n2 in groups[group]:
                    print " ", n1, n2
        elif display in ('trusted', 'possible'):
            out = StringIO()
            writer = csv.writer(out)
            matches = getattr(self, display)
            for group in matches:
                for n1, n2 in groups[group]:
                    if not n1[1] in self.excluded and not n2[1] in self.excluded:
                        writer.writerow(n1 + n2)
            print out.getvalue()
            out.close()

Example 48

Project: ssbench
Source File: reporter.py
View license
    def generate_default_report(self, output_csv=False):
        """Format a default summary report based on calculated statistics for
        an executed scenario.

        :returns: A report (string) suitable for printing, emailing, etc.
        """

        stats = self.stats
        template = Template(self.scenario_template())
        tmpl_vars = {
            'size_data': [],
            'stat_list': [
                ('TOTAL', stats['agg_stats'], stats['size_stats']),
                ('CREATE', stats['op_stats'][ssbench.CREATE_OBJECT],
                 stats['op_stats'][ssbench.CREATE_OBJECT]['size_stats']),
                ('READ', stats['op_stats'][ssbench.READ_OBJECT],
                 stats['op_stats'][ssbench.READ_OBJECT]['size_stats']),
                ('UPDATE', stats['op_stats'][ssbench.UPDATE_OBJECT],
                 stats['op_stats'][ssbench.UPDATE_OBJECT]['size_stats']),
                ('DELETE', stats['op_stats'][ssbench.DELETE_OBJECT],
                 stats['op_stats'][ssbench.DELETE_OBJECT]['size_stats']),
            ],
            'agg_stats': stats['agg_stats'],
            'nth_pctile': stats['nth_pctile'],
            'start_time': datetime.utcfromtimestamp(
                stats['time_series']['start_time']
            ).strftime(REPORT_TIME_FORMAT),
            'stop_time': datetime.utcfromtimestamp(
                stats['time_series']['stop']).strftime(REPORT_TIME_FORMAT),
            'duration': stats['time_series']['stop']
            - stats['time_series']['start_time'],
            'jobs_per_worker_stats': stats['jobs_per_worker_stats'],
            'weighted_c': 0.0,
            'weighted_r': 0.0,
            'weighted_u': 0.0,
            'weighted_d': 0.0,
        }
        for size_data in self.scenario.sizes_by_name.values():
            if size_data['size_min'] == size_data['size_max']:
                size_range = '%-15s' % (
                    self._format_bytes(size_data['size_min']),)
            else:
                size_range = '%s - %s' % (
                    self._format_bytes(size_data['size_min']),
                    self._format_bytes(size_data['size_max']))
            initial_files = self.scenario._scenario_data['initial_files']
            initial_total = sum(initial_files.values())
            pct_total = (initial_files.get(size_data['name'], 0)
                         / float(initial_total) * 100.0)
            tmpl_vars['size_data'].append({
                'crud_pcts': '  '.join(map(lambda p: '%2.0f' % p,
                                           size_data['crud_pcts'])),
                'size_range': size_range,
                'size_name': size_data['name'],
                'pct_total_ops': '%3.0f%%' % pct_total,
            })
            tmpl_vars['weighted_c'] += \
                pct_total * size_data['crud_pcts'][0] / 100.0
            tmpl_vars['weighted_r'] += \
                pct_total * size_data['crud_pcts'][1] / 100.0
            tmpl_vars['weighted_u'] += \
                pct_total * size_data['crud_pcts'][2] / 100.0
            tmpl_vars['weighted_d'] += \
                pct_total * size_data['crud_pcts'][3] / 100.0
        if output_csv:
            csv_fields = [
                'scenario_name', 'ssbench_version', 'worker_count',
                'concurrency', 'start_time', 'stop_time', 'duration',
                'delete_after']
            csv_data = {
                'scenario_name': self.scenario.name,
                'ssbench_version': self.scenario.version,
                'worker_count': tmpl_vars['agg_stats']['worker_count'],
                'concurrency': self.scenario.user_count,
                'start_time': tmpl_vars['start_time'],
                'stop_time': tmpl_vars['stop_time'],
                'duration': tmpl_vars['duration'],
                'delete_after': str(self.scenario.delete_after),
            }
            for label, stats, sstats in tmpl_vars['stat_list']:
                label_lc = label.lower()
                if stats.get('req_count', 0):
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_count' % label_lc, stats['req_count'])
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_errors' % label_lc,
                                     stats['errors'])
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_retries' % label_lc,
                                     stats['retries'])
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_retry_rate' % label_lc,
                                     '%f' % stats['retry_rate'])
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_avg_req_per_s' % label_lc,
                                     stats['avg_req_per_sec'])
                    self._add_stats_for(csv_fields, csv_data, label, 'all',
                                        stats, tmpl_vars['nth_pctile'])
                    for size_str, per_size_stats in sstats.iteritems():
                        if per_size_stats:
                            self._add_stats_for(csv_fields, csv_data, label,
                                                size_str, per_size_stats,
                                                tmpl_vars['nth_pctile'])
            csv_file = StringIO()
            csv_writer = DictWriter(csv_file, csv_fields,
                                    lineterminator='\n',
                                    quoting=csv.QUOTE_NONNUMERIC)
            csv_writer.writeheader()
            csv_writer.writerow(csv_data)
            return csv_file.getvalue()
        else:
            return template.render(scenario=self.scenario, **tmpl_vars)

Example 49

Project: babble
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        content = open(filename,'rb').read()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if type(value) != type([]):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""
                value = str(value)
                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        # We can't use urllib2 since we need to send the Basic
        # auth right with the first request
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        assert not params and not query and not fragments
        if schema == 'http':
            http = httplib.HTTPConnection(netloc)
        elif schema == 'https':
            http = httplib.HTTPSConnection(netloc)
        else:
            raise AssertionError, "unsupported schema "+schema

        data = ''
        loglevel = log.INFO
        try:
            http.connect()
            http.putrequest("POST", url)
            http.putheader('Content-type',
                           'multipart/form-data; boundary=%s'%boundary)
            http.putheader('Content-length', str(len(body)))
            http.putheader('Authorization', auth)
            http.endheaders()
            http.send(body)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return

        r = http.getresponse()
        if r.status == 200:
            self.announce('Server response (%s): %s' % (r.status, r.reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                          log.ERROR)
        if self.show_response:
            print '-'*75, r.read(), '-'*75

Example 50

View license
@ModuleInfo.plugin('wb.util.generateLaravel5Migration',
                   caption='Export Laravel 5 Migration',
                   input=[wbinputs.currentCatalog()],
                   groups=['Catalog/Utilities', 'Menu/Catalog']
                   )
@ModuleInfo.export(grt.INT, grt.classes.db_Catalog)
def generateLaravel5Migration(cat):

    def create_tree(out, schema, is_main_schema):
        table_tree = {}
        for tbl in sorted(schema.tables, key=lambda table: table.name):
            table_references = []

            for fkey in tbl.foreignKeys:
                if fkey.name != '':
                    table_references.append(fkey.referencedColumns[0].owner.name)

            table_tree[tbl.name] = table_references

        d = dict((k, set(table_tree[k])) for k in table_tree)
        r = []
        while d:
            # values not in keys (items without dep)
            t = set(i for v in d.values() for i in v) - set(d.keys())
            # and keys without value (items without dep)
            t.update(k for k, v in d.items() if not v)
            # can be done right away
            r.append(t)
            # and cleaned up
            d = dict(((k, v - t) for k, v in d.items() if v))
        return r

    def export_schema(out, schema, is_main_schema, table_tree):
        if len(schema.tables) == 0:
            return

        foreign_keys = {}
        global migration_tables
        global migrations
        tables = sorted(schema.tables, key=lambda table: table.name)
        ti = 0

        for reference_tables in table_tree:
            for reference in reference_tables:
                for tbl in tables:

                    if tbl.name != reference:
                        continue

                    table_name = tbl.name
                    components = table_name.split('_')

                    migration_tables.append(table_name)
                    migrations[ti] = []

                    migrations[ti].append(migrationBegginingTemplate.format(
                        tableNameCamelCase=("".join(x.title() for x in components[0:])),
                        tableName=table_name
                    ))

                    created_at = created_at_nullable \
                        = updated_at \
                        = updated_at_nullable \
                        = deleted_at \
                        = timestamps \
                        = timestamps_nullable = False

                    for col in tbl.columns:
                        if col.name == 'created_at':
                            created_at = True
                            if col.isNotNull != 1:
                                created_at_nullable = True
                        elif col.name == 'updated_at':
                            updated_at = True
                            if col.isNotNull != 1:
                                updated_at_nullable = True

                    if created_at is True and updated_at is True and created_at_nullable is True and updated_at_nullable is True:
                        timestamps_nullable = True
                    elif created_at is True and updated_at is True:
                        timestamps = True

                    pk_column = None
                    for col in tbl.columns:
                        if (col.name == 'created_at' or col.name == 'updated_at') and (
                                        timestamps is True or timestamps_nullable is True):
                            continue

                        if col.name == 'deleted_at':
                            deleted_at = True
                            continue

                        if col.simpleType:
                            col_type = col.simpleType.name
                            col_flags = col.simpleType.flags
                        else:
                            col_type = col.userType.name
                            col_flags = col.flags

                        primary_key = [i for i in tbl.indices if i.isPrimary == 1]
                        primary_key = primary_key[0] if len(primary_key) > 0 else None

                        if primary_key and len(primary_key.columns) == 1:
                            pk_column = primary_key.columns[0].referencedColumn

                        if col == pk_column:
                            if col_type == 'BIGINT':
                                col_type = 'BIGINCREMENTS'
                            else:
                                col_type = 'INCREMENTS'

                        col_data = '\''
                        if typesDict[col_type] == 'char':
                            if col.length > -1:
                                col_data = '\', %s' % (str(col.length))
                        elif typesDict[col_type] == 'decimal':
                            if col.precision > -1 and col.scale > -1:
                                col_data = '\', %s, %s' % (str(col.precision), str(col.scale))
                        elif typesDict[col_type] == 'double':
                            if col.precision > -1 and col.length > -1:
                                col_data = '\', %s, %s' % (str(col.length), str(col.precision))
                        elif typesDict[col_type] == 'enum':
                            col_data = '\', [%s]' % (col.datatypeExplicitParams[1:-1])
                        elif typesDict[col_type] == 'string':
                            if col.length > -1:
                                col_data = '\', %s' % (str(col.length))

                        if col.name == 'remember_token' and typesDict[col_type] == 'string' and str(col.length) == 100:
                            migrations[ti].append('            $table->rememberToken();\n')
                        elif typesDict[col_type]:
                            migrations[ti].append(
                                '            $table->%s(\'%s%s)' % (typesDict[col_type], col.name, col_data))
                            if typesDict[col_type] == 'integer' and 'UNSIGNED' in col.flags:
                                migrations[ti].append('->unsigned()')
                            if col.isNotNull != 1:
                                migrations[ti].append('->nullable()')
                            if col.defaultValue != '' and col.defaultValueIsNull != 0:
                                migrations[ti].append('->default(NULL)')
                            elif col.defaultValue != '':
                                migrations[ti].append('->default(%s)' % col.defaultValue)
                            if col.comment != '':
                                migrations[ti].append('->comment(\'%s\')' % col.comment)
                            migrations[ti].append(';\n')

                    if tbl.indices:
                        migrations[ti].append("            # Indexes\n")

                        for column in tbl.indices:
                            for index in column.columns:
                                index_type = index.owner.indexType.lower()
                                key = index.referencedColumn.name

                                # Do not add index for increments
                                if not column.isPrimary:

                                    index_key_template = "            $table->{indexType}('{key}');\n".format(
                                        indexType=index_type,
                                        key=key
                                    )
                                    migrations[ti].append(index_key_template)

                    if deleted_at is True:
                        migrations[ti].append('            $table->softDeletes();\n')
                    if timestamps is True:
                        migrations[ti].append('            $table->timestamps();\n')
                    elif timestamps_nullable is True:
                        migrations[ti].append('            $table->nullableTimestamps();\n')

                    first_foreign_created = False

                    for fkey in tbl.foreignKeys:
                        if fkey.name != '':
                            index_name = fkey.index.name
                            foreign_key = fkey.columns[0].name

                            if index_name == 'PRIMARY':
                                index_name = tbl.name + "_" + fkey.columns[0].name

                            if fkey.referencedColumns[0].owner.name in migration_tables:

                                if not first_foreign_created:
                                    migrations[ti].append('\n')
                                    first_foreign_created = True

                                migrations[ti].append(foreignKeyTemplate.format(
                                        foreignKey=foreign_key,
                                        foreignKeyName=index_name,
                                        tableKeyName=fkey.referencedColumns[0].name,
                                        foreignTableName=fkey.referencedColumns[0].owner.name,
                                        onDeleteAction=fkey.deleteRule.lower(),
                                        onUpdateAction=fkey.updateRule.lower()
                                ))

                            else:
                                if fkey.referencedColumns[0].owner.name not in foreign_keys:
                                    foreign_keys[fkey.referencedColumns[0].owner.name] = []

                                foreign_keys[fkey.referencedColumns[0].owner.name].append({
                                    'table': fkey.columns[0].owner.name,
                                    'key': foreign_key,
                                    'name': index_name,
                                    'referenced_table': fkey.referencedColumns[0].owner.name,
                                    'referenced_name': fkey.referencedColumns[0].name,
                                    'update_rule': fkey.updateRule,
                                    'delete_rule': fkey.deleteRule
                                })

                    migrations[ti].append("        });\n")

                    for fkey, fval in foreign_keys.iteritems():
                        if fkey == tbl.name:
                            keyed_tables = []
                            schema_table = 0
                            for item in fval:
                                if item['table'] not in keyed_tables:
                                    keyed_tables.append(item['table'])
                                    if schema_table == 0:
                                        foreign_table_name = item['table']
                                        migrations[ti].append('\n')
                                        migrations[ti].append(
                                            schemaCreateTemplate.format(tableName=item['table'])
                                        )
                                        schema_table = 1
                                    elif foreign_table_name != item['table']:
                                        foreign_table_name = item['table']
                                        migrations[ti].append("        });\n")
                                        migrations[ti].append('\n')
                                        migrations[ti].append(
                                            schemaCreateTemplate.format(tableName=item['table'])
                                        )
                                    migrations[ti].append(foreignKeyTemplate.format(
                                        foreignKey=item['key'],
                                        foreignKeyName=item['name'],
                                        tableKeyName=item['referenced_name'],
                                        foreignTableName=item['referenced_table'],
                                        onDeleteAction=item['delete_rule'].lower(),
                                        onUpdateAction=item['update_rule'].lower()
                                    ))

                            if schema_table == 1:
                                migrations[ti].append("        });\n")
                                migrations[ti].append('\n')

                    migrations[ti].append('    }\n')

                    ##########
                    # Reverse
                    ##########

                    migrations[ti].append(migrationDownTemplate)
                    migrations[ti].append(migrationEndingTemplate.format(tableName=table_name))
                    ti += 1

        return migrations

    out = cStringIO.StringIO()

    try:
        for schema in [(s, s.name == 'main') for s in cat.schemata]:
            table_tree = create_tree(out, schema[0], schema[1])
            migrations = export_schema(out, schema[0], schema[1], table_tree)

    except GenerateLaravel5MigrationError as e:
        Workbench.confirm(e.typ, e.message)
        return 1

    for name in sorted(migrations):
        out.write('Table name: {0}\n\n\n'.format(migration_tables[name]))
        out.write(''.join(migrations[name]))
        out.write('\n\n\n'.format(name))

    sql_text = out.getvalue()
    out.close()

    wizard = GenerateLaravel5MigrationWizard(sql_text)
    wizard.run()

    return 0