cStringIO.StringIO

Here are the examples of the python api cStringIO.StringIO taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 51

Project: pymo
Source File: config.py
View license
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
    """
    Start up a socket server on the specified port, and listen for new
    configurations.

    These will be sent as a file suitable for processing by fileConfig().
    Returns a Thread object on which you can call start() to start the server,
    and which you can join() when appropriate. To stop the server, call
    stopListening().
    """
    if not thread:
        raise NotImplementedError("listen() needs threading to work")

    class ConfigStreamHandler(StreamRequestHandler):
        """
        Handler for a logging configuration request.

        It expects a completely new logging configuration and uses fileConfig
        to install it.
        """
        def handle(self):
            """
            Handle a request.

            Each request is expected to be a 4-byte length, packed using
            struct.pack(">L", n), followed by the config file.
            Uses fileConfig() to do the grunt work.
            """
            import tempfile
            try:
                conn = self.connection
                chunk = conn.recv(4)
                if len(chunk) == 4:
                    slen = struct.unpack(">L", chunk)[0]
                    chunk = self.connection.recv(slen)
                    while len(chunk) < slen:
                        chunk = chunk + conn.recv(slen - len(chunk))
                    try:
                        import json
                        d =json.loads(chunk)
                        assert isinstance(d, dict)
                        dictConfig(d)
                    except:
                        #Apply new configuration.

                        file = cStringIO.StringIO(chunk)
                        try:
                            fileConfig(file)
                        except (KeyboardInterrupt, SystemExit):
                            raise
                        except:
                            traceback.print_exc()
                    if self.server.ready:
                        self.server.ready.set()
            except socket.error, e:
                if not isinstance(e.args, tuple):
                    raise
                else:
                    errcode = e.args[0]
                    if errcode != RESET_ERROR:
                        raise

    class ConfigSocketReceiver(ThreadingTCPServer):
        """
        A simple TCP socket-based logging config receiver.
        """

        allow_reuse_address = 1

        def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
                     handler=None, ready=None):
            ThreadingTCPServer.__init__(self, (host, port), handler)
            logging._acquireLock()
            self.abort = 0
            logging._releaseLock()
            self.timeout = 1
            self.ready = ready

        def serve_until_stopped(self):
            import select
            abort = 0
            while not abort:
                rd, wr, ex = select.select([self.socket.fileno()],
                                           [], [],
                                           self.timeout)
                if rd:
                    self.handle_request()
                logging._acquireLock()
                abort = self.abort
                logging._releaseLock()
            self.socket.close()

    class Server(threading.Thread):

        def __init__(self, rcvr, hdlr, port):
            super(Server, self).__init__()
            self.rcvr = rcvr
            self.hdlr = hdlr
            self.port = port
            self.ready = threading.Event()

        def run(self):
            server = self.rcvr(port=self.port, handler=self.hdlr,
                               ready=self.ready)
            if self.port == 0:
                self.port = server.server_address[1]
            self.ready.set()
            global _listener
            logging._acquireLock()
            _listener = server
            logging._releaseLock()
            server.serve_until_stopped()

    return Server(ConfigSocketReceiver, ConfigStreamHandler, port)

Example 52

Project: robofab
Source File: glifLib.py
View license
def writeGlyphToString(glyphName, glyphObject=None, drawPointsFunc=None, writer=None):
	"""Return .glif data for a glyph as a UTF-8 encoded string.
	The 'glyphObject' argument can be any kind of object (even None);
	the writeGlyphToString() method will attempt to get the following
	attributes from it:
		"width"     the advance with of the glyph
		"unicodes"  a list of unicode values for this glyph
		"note"      a string
		"lib"       a dictionary containing custom data

	All attributes are optional: if 'glyphObject' doesn't
	have the attribute, it will simply be skipped.

	To write outline data to the .glif file, writeGlyphToString() needs
	a function (any callable object actually) that will take one
	argument: an object that conforms to the PointPen protocol.
	The function will be called by writeGlyphToString(); it has to call the
	proper PointPen methods to transfer the outline to the .glif file.
	"""
	if writer is None:
		try:
			from xmlWriter import XMLWriter
		except ImportError:
			# try the other location
			from fontTools.misc.xmlWriter import XMLWriter
		aFile = StringIO()
		writer = XMLWriter(aFile, encoding="UTF-8")
	else:
		aFile = None
	writer.begintag("glyph", [("name", glyphName), ("format", "1")])
	writer.newline()

	width = getattr(glyphObject, "width", None)
	if width is not None:
		if not isinstance(width, (int, float)):
			raise GlifLibError, "width attribute must be int or float"
		writer.simpletag("advance", width=repr(width))
		writer.newline()

	unicodes = getattr(glyphObject, "unicodes", None)
	if unicodes:
		if isinstance(unicodes, int):
			unicodes = [unicodes]
		for code in unicodes:
			if not isinstance(code, int):
				raise GlifLibError, "unicode values must be int"
			hexCode = hex(code)[2:].upper()
			if len(hexCode) < 4:
				hexCode = "0" * (4 - len(hexCode)) + hexCode
			writer.simpletag("unicode", hex=hexCode)
			writer.newline()

	note = getattr(glyphObject, "note", None)
	if note is not None:
		if not isinstance(note, (str, unicode)):
			raise GlifLibError, "note attribute must be str or unicode"
		note = note.encode('utf-8')
		writer.begintag("note")
		writer.newline()
		for line in note.splitlines():
			writer.write(line.strip())
			writer.newline()
		writer.endtag("note")
		writer.newline()

	if drawPointsFunc is not None:
		writer.begintag("outline")
		writer.newline()
		pen = GLIFPointPen(writer)
		drawPointsFunc(pen)
		writer.endtag("outline")
		writer.newline()

	lib = getattr(glyphObject, "lib", None)
	if lib:
		from robofab.plistlib import PlistWriter
		if not isinstance(lib, dict):
			lib = dict(lib)
		writer.begintag("lib")
		writer.newline()
		plistWriter = PlistWriter(writer.file, indentLevel=writer.indentlevel,
				indent=writer.indentwhite, writeHeader=False)
		plistWriter.writeValue(lib)
		writer.endtag("lib")
		writer.newline()

	writer.endtag("glyph")
	writer.newline()
	if aFile is not None:
		return aFile.getvalue()
	else:
		return None

Example 53

View license
    def handle(self, *args, **kwargs):
        if len(args) > 0:
            if args[0] in ("trusted", "possible", "full"):
                display = args[0]
            else:
                print "Unexpected argument '%s'.  Options are 'trusted' or 'full'" % args[0]
                return
        else:
            display = 'full'

        cursor = connection.cursor()
        # ORM is way too slow for this on 100,000+ rows.
        cursor.execute("""
            SELECT DISTINCT a.entity_id,a.alias,m.state,m.party,m.seat FROM
            matchbox_entityalias a
            LEFT JOIN politician_metadata_latest_cycle_view m ON m.entity_id=a.entity_id
            LEFT JOIN matchbox_entity e ON m.entity_id=e.id
            WHERE e.type = %s
            """, ['politician'])
        rows = cursor.fetchall()
        # Add person name classes
        rows = [(e, PersonName(fix_name(a or "")), s or "", p or "", o or "") for e,a,s,p,o in rows]
        by_last_name = defaultdict(list)

        # group all entities by last name
        for row in rows:
            by_last_name[row[1].last].append(row)

        #count = 0
        #grand_total = len(rows)
        totals = defaultdict(int)
        groups = defaultdict(list)

        for last_name, entities in by_last_name.iteritems():
            #print count, grand_total, last_name, len(entities)
            #count += len(entities)

            # for each last name, split enities into a groups of state and federal politicians
            # this will make all the "left sides" of the matches federal and all the right sides state
            fed_entities =  [ entity for entity in entities if entity[4].startswith('federal') ]
            state_entities =  [ entity for entity in entities if entity[4].startswith('state') ]


            for eid1, name1, state1, party1, office1 in fed_entities:

                for eid2, name2, state2, party2, office2 in state_entities:
                    # skip if maximal fuzziness fails
                    if not name1.matches(name2):
                        continue

                    state_checks = {
                        'same state': state1 == state2,
                        'diff state': state1 != state2,
                        'missing one state': (not state1 or not state2) and state1 != state2,
                        'missing two states': not state1 and not state2,
                    }
                    party_checks = {
                        'same party': party1 == party2,
                        'diff party; both 3rd': party1 not in "RD" and
                            party2 not in "RD" and party1 != party2,
                        'diff party; one 3rd': party1 != party2 and
                            (party1 not in "RD" or party2 not in "RD") and
                            (party1 in "RD" or party2 in "RD"),
                        'diff party; R or D': party1 != party2 and
                            (party1 in "RD" and party2 in "RD"),
                    }
                    # Check all combinations of name matching conditions
                    all_conditions = ('missing_middle', 'nicknames', 'missing_suffix',
                            'initials', 'first_as_middle')

                    name_checks = {
                        'exact': name1.matches(name2, exact=True)
                    }

                    def check(conditions):
                        if len(conditions) == 0:
                            return name_checks['exact']
                        key = ", ".join(conditions)
                        name_checks[key] = name_checks.get(key, (
                            not check(conditions[:-1]) and
                            name1.matches(name2, exact=True, **dict((c, True) for c in conditions))
                        ))
                        return name_checks[key]

                    def get_minimum_match():
                        for r in range(len(all_conditions)):
                            for conds in combinations(all_conditions, r):
                                if check(conds):
                                    return True
                    get_minimum_match()

                    for n1, c1 in state_checks.iteritems():
                        if c1:
                            for n2, c2 in party_checks.iteritems():
                                if c2:
                                    for n3, c3 in name_checks.iteritems():
                                        if c3:
                                            key = " | ".join((n1, n2, n3))
                                            totals[key] += 1
                                            match = (
                                                (eid1, name1.name, state1, party1, office1),
                                                (eid2, name2.name, state2, party2, office2)
                                            )
                                            # names can have multiple aliases which cause duplicate entity matches
                                            # don't add these
                                            if match not in groups[key]:
                                                groups[key].append(match)

                                            break



        if display == 'full':
            pprint(dict(totals))
            for group in sorted(groups.keys()):
                print group, len(groups[group])
                for n1, n2 in groups[group]:
                    print " ", n1, n2
        elif display in ('trusted', 'possible'):
            out = StringIO()
            writer = csv.writer(out)
            matches = getattr(self, display)
            for group in matches:
                for n1, n2 in groups[group]:
                    if not n1[1] in self.excluded and not n2[1] in self.excluded:
                        writer.writerow(n1 + n2)
            print out.getvalue()
            out.close()

Example 54

Project: PythonScript
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 55

Project: PythonScript
Source File: config.py
View license
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
    """
    Start up a socket server on the specified port, and listen for new
    configurations.

    These will be sent as a file suitable for processing by fileConfig().
    Returns a Thread object on which you can call start() to start the server,
    and which you can join() when appropriate. To stop the server, call
    stopListening().
    """
    if not thread:
        raise NotImplementedError("listen() needs threading to work")

    class ConfigStreamHandler(StreamRequestHandler):
        """
        Handler for a logging configuration request.

        It expects a completely new logging configuration and uses fileConfig
        to install it.
        """
        def handle(self):
            """
            Handle a request.

            Each request is expected to be a 4-byte length, packed using
            struct.pack(">L", n), followed by the config file.
            Uses fileConfig() to do the grunt work.
            """
            import tempfile
            try:
                conn = self.connection
                chunk = conn.recv(4)
                if len(chunk) == 4:
                    slen = struct.unpack(">L", chunk)[0]
                    chunk = self.connection.recv(slen)
                    while len(chunk) < slen:
                        chunk = chunk + conn.recv(slen - len(chunk))
                    try:
                        import json
                        d =json.loads(chunk)
                        assert isinstance(d, dict)
                        dictConfig(d)
                    except:
                        #Apply new configuration.

                        file = cStringIO.StringIO(chunk)
                        try:
                            fileConfig(file)
                        except (KeyboardInterrupt, SystemExit):
                            raise
                        except:
                            traceback.print_exc()
                    if self.server.ready:
                        self.server.ready.set()
            except socket.error, e:
                if not isinstance(e.args, tuple):
                    raise
                else:
                    errcode = e.args[0]
                    if errcode != RESET_ERROR:
                        raise

    class ConfigSocketReceiver(ThreadingTCPServer):
        """
        A simple TCP socket-based logging config receiver.
        """

        allow_reuse_address = 1

        def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
                     handler=None, ready=None):
            ThreadingTCPServer.__init__(self, (host, port), handler)
            logging._acquireLock()
            self.abort = 0
            logging._releaseLock()
            self.timeout = 1
            self.ready = ready

        def serve_until_stopped(self):
            import select
            abort = 0
            while not abort:
                rd, wr, ex = select.select([self.socket.fileno()],
                                           [], [],
                                           self.timeout)
                if rd:
                    self.handle_request()
                logging._acquireLock()
                abort = self.abort
                logging._releaseLock()
            self.socket.close()

    class Server(threading.Thread):

        def __init__(self, rcvr, hdlr, port):
            super(Server, self).__init__()
            self.rcvr = rcvr
            self.hdlr = hdlr
            self.port = port
            self.ready = threading.Event()

        def run(self):
            server = self.rcvr(port=self.port, handler=self.hdlr,
                               ready=self.ready)
            if self.port == 0:
                self.port = server.server_address[1]
            self.ready.set()
            global _listener
            logging._acquireLock()
            _listener = server
            logging._releaseLock()
            server.serve_until_stopped()

    return Server(ConfigSocketReceiver, ConfigStreamHandler, port)

Example 56

Project: ssbench
Source File: reporter.py
View license
    def generate_default_report(self, output_csv=False):
        """Format a default summary report based on calculated statistics for
        an executed scenario.

        :returns: A report (string) suitable for printing, emailing, etc.
        """

        stats = self.stats
        template = Template(self.scenario_template())
        tmpl_vars = {
            'size_data': [],
            'stat_list': [
                ('TOTAL', stats['agg_stats'], stats['size_stats']),
                ('CREATE', stats['op_stats'][ssbench.CREATE_OBJECT],
                 stats['op_stats'][ssbench.CREATE_OBJECT]['size_stats']),
                ('READ', stats['op_stats'][ssbench.READ_OBJECT],
                 stats['op_stats'][ssbench.READ_OBJECT]['size_stats']),
                ('UPDATE', stats['op_stats'][ssbench.UPDATE_OBJECT],
                 stats['op_stats'][ssbench.UPDATE_OBJECT]['size_stats']),
                ('DELETE', stats['op_stats'][ssbench.DELETE_OBJECT],
                 stats['op_stats'][ssbench.DELETE_OBJECT]['size_stats']),
            ],
            'agg_stats': stats['agg_stats'],
            'nth_pctile': stats['nth_pctile'],
            'start_time': datetime.utcfromtimestamp(
                stats['time_series']['start_time']
            ).strftime(REPORT_TIME_FORMAT),
            'stop_time': datetime.utcfromtimestamp(
                stats['time_series']['stop']).strftime(REPORT_TIME_FORMAT),
            'duration': stats['time_series']['stop']
            - stats['time_series']['start_time'],
            'jobs_per_worker_stats': stats['jobs_per_worker_stats'],
            'weighted_c': 0.0,
            'weighted_r': 0.0,
            'weighted_u': 0.0,
            'weighted_d': 0.0,
        }
        for size_data in self.scenario.sizes_by_name.values():
            if size_data['size_min'] == size_data['size_max']:
                size_range = '%-15s' % (
                    self._format_bytes(size_data['size_min']),)
            else:
                size_range = '%s - %s' % (
                    self._format_bytes(size_data['size_min']),
                    self._format_bytes(size_data['size_max']))
            initial_files = self.scenario._scenario_data['initial_files']
            initial_total = sum(initial_files.values())
            pct_total = (initial_files.get(size_data['name'], 0)
                         / float(initial_total) * 100.0)
            tmpl_vars['size_data'].append({
                'crud_pcts': '  '.join(map(lambda p: '%2.0f' % p,
                                           size_data['crud_pcts'])),
                'size_range': size_range,
                'size_name': size_data['name'],
                'pct_total_ops': '%3.0f%%' % pct_total,
            })
            tmpl_vars['weighted_c'] += \
                pct_total * size_data['crud_pcts'][0] / 100.0
            tmpl_vars['weighted_r'] += \
                pct_total * size_data['crud_pcts'][1] / 100.0
            tmpl_vars['weighted_u'] += \
                pct_total * size_data['crud_pcts'][2] / 100.0
            tmpl_vars['weighted_d'] += \
                pct_total * size_data['crud_pcts'][3] / 100.0
        if output_csv:
            csv_fields = [
                'scenario_name', 'ssbench_version', 'worker_count',
                'concurrency', 'start_time', 'stop_time', 'duration',
                'delete_after']
            csv_data = {
                'scenario_name': self.scenario.name,
                'ssbench_version': self.scenario.version,
                'worker_count': tmpl_vars['agg_stats']['worker_count'],
                'concurrency': self.scenario.user_count,
                'start_time': tmpl_vars['start_time'],
                'stop_time': tmpl_vars['stop_time'],
                'duration': tmpl_vars['duration'],
                'delete_after': str(self.scenario.delete_after),
            }
            for label, stats, sstats in tmpl_vars['stat_list']:
                label_lc = label.lower()
                if stats.get('req_count', 0):
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_count' % label_lc, stats['req_count'])
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_errors' % label_lc,
                                     stats['errors'])
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_retries' % label_lc,
                                     stats['retries'])
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_retry_rate' % label_lc,
                                     '%f' % stats['retry_rate'])
                    self._add_csv_kv(csv_fields, csv_data,
                                     '%s_avg_req_per_s' % label_lc,
                                     stats['avg_req_per_sec'])
                    self._add_stats_for(csv_fields, csv_data, label, 'all',
                                        stats, tmpl_vars['nth_pctile'])
                    for size_str, per_size_stats in sstats.iteritems():
                        if per_size_stats:
                            self._add_stats_for(csv_fields, csv_data, label,
                                                size_str, per_size_stats,
                                                tmpl_vars['nth_pctile'])
            csv_file = StringIO()
            csv_writer = DictWriter(csv_file, csv_fields,
                                    lineterminator='\n',
                                    quoting=csv.QUOTE_NONNUMERIC)
            csv_writer.writeheader()
            csv_writer.writerow(csv_data)
            return csv_file.getvalue()
        else:
            return template.render(scenario=self.scenario, **tmpl_vars)

Example 57

Project: tomato
Source File: swf_image_dumper.py
View license
    def save_image(self, object_id, v):
        # 実際に出力した画像の ID を保存
        file_base = self.output_file

        if v['tag'] == 'DefineBitsLossLess' and v['format'] == 3:
            color_table_length = ord(v['data'][0])+1
            decompress_data = zlib.decompress(v['data'][1:])
            color_table = []
            for i in range(color_table_length):
                c = (ord(decompress_data[i*3]), 
                     ord(decompress_data[i*3+1]), 
                     ord(decompress_data[i*3+2]))
                color_table.append(c)
            color_map = decompress_data[color_table_length*3:]
            width = v['width']
            height = v['height']
            width = width if width % 4 == 0 else (width/4+1)* 4
            im = Image.new("RGB", (width, height), "white")
            x = y = 0
            for i in range(len(color_map)):
                im.putpixel((x,y), color_table[ord(color_map[i])])
                x += 1
                if x == width:
                    x = 0
                    y += 1

            out_file = "%s_%s.png" % (file_base, object_id)
            im.save(out_file)
            v['file_name'] = os.path.split(out_file)[1]

        elif v['tag'] == 'DefineBitsLossless2' and v['format'] == 3:
            color_table_length = ord(v['data'][0])+1
            decompress_data = zlib.decompress(v['data'][1:])
            color_table = []
            for i in range(color_table_length):
                c = (ord(decompress_data[i*4+0]), 
                     ord(decompress_data[i*4+1]), 
                     ord(decompress_data[i*4+2]),
                     ord(decompress_data[i*4+3]))
                color_table.append(c)
            color_map = decompress_data[color_table_length*4:]
            width = v['width']
            height = v['height']
            width = width if width % 4 == 0 else (width/4+1)* 4
            im = Image.new("RGB", (width, height), "white")
            x = y = 0
            for i in range(len(color_map)):
                im.putpixel((x,y), color_table[ord(color_map[i])])
                x += 1
                if x == width:
                    x = 0
                    y += 1

            out_file = "%s_%s.png" % (file_base, object_id)
            im.save(out_file)
            v['file_name'] = os.path.split(out_file)[1]

        elif v['tag'] == 'DefineBitsLossLess' and v['format'] == 5:
            decompress_data = zlib.decompress(v['data'])
            width = v['width']
            height = v['height']
            im = Image.new("RGB", (width, height), "white")
            x = y = 0
            for i in range(0, len(decompress_data), 4):
                im.putpixel((x,y), (ord(decompress_data[i+1]),
                                    ord(decompress_data[i+2]),
                                    ord(decompress_data[i+3])))
                x += 1
                if x == width:
                    x = 0
                    y += 1

            out_file = "%s_%s.png" % (file_base, object_id)
            im.save(out_file)
            v['file_name'] = os.path.split(out_file)[1]

        elif v['tag'] == 'DefineBitsLossless2' and v['format'] == 5:
            decompress_data = zlib.decompress(v['data'])
            width = v['width']
            height = v['height']
            im = Image.new("RGBA", (width, height), "white")
            x = y = 0
            for i in range(0, len(decompress_data), 4):
                im.putpixel((x,y), (ord(decompress_data[i+1]),
                                    ord(decompress_data[i+2]),
                                    ord(decompress_data[i+3]),
                                    ord(decompress_data[i+0])))
                x += 1
                if x == width:
                    x = 0
                    y += 1

            out_file = "%s_%s.png" % (file_base, object_id)
            im.save(out_file)
            v['file_name'] = os.path.split(out_file)[1]

        elif v['tag'] == 'DefineBitsJPEG2':
            data = v['data']
            if data[:4] == '\xff\xd9\xff\xd8':
                """
                See: http://pwiki.awm.jp/~yoya/?Flash/JPEG
                    JPEG SOI marker (FF D8) -> \xff\xd8
                    JPEG EOI marker (FF D9) -> \xff\xd9
                Flash に格納されているのは基本的にはそのままの JPEG データ
                ただし、データの最初に [EOI] + [SOI] が付けられていることがあり
                その場合は除去する必要がある
                """
                data = data[4:]
            im = Image.open(StringIO(data))

            v['width'] = im.size[0]
            v['height'] = im.size[1]
            out_file = "%s_%s.jpg" % (file_base, object_id)
            im.save(out_file)
            v['file_name'] = os.path.split(out_file)[1]

Example 58

Project: teuthology
Source File: kernel.py
View license
def install_and_reboot(ctx, config):
    """
    Install and reboot the kernel.  This mostly performs remote
    installation operations.   The code does check for Arm images
    and skips grub operations if the kernel is Arm.  Otherwise, it
    extracts kernel titles from submenu entries and makes the appropriate
    grub calls.   The assumptions here are somewhat simplified in that
    it expects kernel entries to be present under submenu entries.

    :param ctx: Context
    :param config: Configuration
    """
    procs = {}
    kernel_title = ''
    for role, src in config.iteritems():
        (role_remote,) = ctx.cluster.only(role).remotes.keys()
        if isinstance(src, str) and src.find('distro') >= 0:
            log.info('Installing distro kernel on {role}...'.format(role=role))
            install_kernel(role_remote, version=src)
            continue

        log.info('Installing kernel {src} on {role}...'.format(src=src,
                                                               role=role))
        package_type = role_remote.os.package_type
        if package_type == 'rpm':
            proc = role_remote.run(
                args=[
                    'sudo',
                    'rpm',
                    '-ivh',
                    '--oldpackage',
                    '--replacefiles',
                    '--replacepkgs',
                    remote_pkg_path(role_remote),
                ])
            install_kernel(role_remote, remote_pkg_path(role_remote))
            continue

        # TODO: Refactor this into install_kernel() so that it handles all
        # cases for both rpm and deb packages.
        proc = role_remote.run(
            args=[
                # install the kernel deb
                'sudo',
                'dpkg',
                '-i',
                remote_pkg_path(role_remote),
                ],
            )

        # collect kernel image name from the .deb
        kernel_title = get_image_version(role_remote,
                                         remote_pkg_path(role_remote))
        log.info('searching for kernel {}'.format(kernel_title))

        if kernel_title.endswith("-highbank"):
            _no_grub_link('vmlinuz', role_remote, kernel_title)
            _no_grub_link('initrd.img', role_remote, kernel_title)
            proc = role_remote.run(
                args=[
                    'sudo',
                    'shutdown',
                    '-r',
                    'now',
                    ],
                wait=False,
            )
            procs[role_remote.name] = proc
            continue

        # look for menuentry for our kernel, and collect any
        # submenu entries for their titles.  Assume that if our
        # kernel entry appears later in the file than a submenu entry,
        # it's actually nested under that submenu.  If it gets more
        # complex this will totally break.

        cmdout = StringIO()
        proc = role_remote.run(
            args=[
                'egrep',
                '(submenu|menuentry.*' + kernel_title + ').*{',
                '/boot/grub/grub.cfg'
               ],
            stdout = cmdout,
            )
        submenu_title = ''
        default_title = ''
        for l in cmdout.getvalue().split('\n'):
            fields = shlex.split(l)
            if len(fields) >= 2:
                command, title = fields[:2]
                if command == 'submenu':
                    submenu_title = title + '>'
                if command == 'menuentry':
                    if title.endswith(kernel_title):
                        default_title = title
                        break
        cmdout.close()
        log.info('submenu_title:{}'.format(submenu_title))
        log.info('default_title:{}'.format(default_title))

        proc = role_remote.run(
            args=[
                # use the title(s) to construct the content of
                # the grub menu entry, so we can default to it.
                '/bin/echo',
                '-e',
                r'cat <<EOF\nset default="' + submenu_title + \
                    default_title + r'"\nEOF\n',
                # make it look like an emacs backup file so
                # unfortunately timed update-grub runs don't pick it
                # up yet; use sudo tee so we are able to write to /etc
                run.Raw('|'),
                'sudo',
                'tee',
                '--',
                '/etc/grub.d/01_ceph_kernel.tmp~',
                run.Raw('>/dev/null'),
                run.Raw('&&'),
                'sudo',
                'chmod',
                'a+x',
                '--',
                '/etc/grub.d/01_ceph_kernel.tmp~',
                run.Raw('&&'),
                'sudo',
                'mv',
                '--',
                '/etc/grub.d/01_ceph_kernel.tmp~',
                '/etc/grub.d/01_ceph_kernel',
                # update grub again so it accepts our default
                run.Raw('&&'),
                'sudo',
                'update-grub',
                run.Raw('&&'),
                'rm',
                remote_pkg_path(role_remote),
                run.Raw('&&'),
                'sudo',
                'shutdown',
                '-r',
                'now',
                ],
            wait=False,
            )
        procs[role_remote.name] = proc

    for name, proc in procs.iteritems():
        log.debug('Waiting for install on %s to complete...', name)
        proc.wait()

Example 59

Project: WAPT
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\r\n--' + boundary
        end_boundary = sep_boundary + '--\r\n'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\r\nContent-Disposition: form-data; name="%s"' % key)
                body.write(fn)
                body.write("\r\n\r\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, result.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            raise
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            msg = 'Upload failed (%s): %s' % (status, reason)
            self.announce(msg, log.ERROR)
            raise DistutilsError(msg)

Example 60

Project: chipsec
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 61

Project: smisk
Source File: httpd.py
View license
	def handle_fcgi_request(self, ch):
		rid = 1
		content_length = 0
		query_string = ''
		if '?' in self.path:
			query_string = self.path[self.path.index('?')+1:]
		params = {
			'GATEWAY_INTERFACE': 'CGI/1.1',
			'PATH_INFO': '',
			'QUERY_STRING': query_string,
			'REMOTE_ADDR': self.client_address[0],
			'REMOTE_HOST': self.server.fqdn,
			'REQUEST_METHOD': self.command,
			'SCRIPT_NAME': '/' + self.path.lstrip('/'),
			'SERVER_NAME': '%s:%d' % (self.server.fqdn, self.server.naddr[1]),
			'SERVER_PORT': '%d' % self.server.naddr[1],
			'SERVER_PROTOCOL': self.request_version,
			'SERVER_SOFTWARE': self.server_version,
			
			# Following are not part of CGI 1.1:
			'DOCUMENT_ROOT': self.server.document_root,
			'REMOTE_PORT': '%d' % self.client_address[1],
			'REQUEST_URI': self.path,
			'SCRIPT_FILENAME': self.server.document_root + '/' + self.path.lstrip('/'),
			'SERVER_ADDR': self.server.naddr[0],
		}
		
		# read http headers and transfer to params
		for k in self.headers:
			v = self.headers.get(k)
			params['HTTP_'+k.replace('-','_').upper()] = v
			if k == 'content-length':
				content_length = int(v)
			elif k == 'content-type':
				params['CONTENT_TYPE'] = v
		if content_length:
			params['CONTENT_LENGTH'] = str(content_length)
		
		# begin
		role = fcgi.FCGI_RESPONDER
		flags = 0
		content = '%c%c%c\000\000\000\000\000' % ((role&0xFF00)>>8, role&0xFF, flags)
		ch.writePacket(fcgi.Record(fcgi.FCGI_BEGIN_REQUEST, rid, content))
		
		# params
		content = ''
		for k,v in params.items():
			s = fcgi.writeNameValue(k,v)
			if len(content)+len(s) > fcgi.FCGI_MAX_PACKET_LEN:
				ch.writePacket(fcgi.Record(fcgi.FCGI_PARAMS, rid, content))
				content = s
			else:
				content += s
		ch.writePacket(fcgi.Record(fcgi.FCGI_PARAMS, rid, content))
		ch.writePacket(fcgi.Record(fcgi.FCGI_PARAMS, rid))
		
		# EOF on stdin
		if content_length == 0:
			ch.writePacket(fcgi.Record(fcgi.FCGI_STDIN, rid, ''))
		
		# read reply
		started = False
		wrote_stdin_eof = content_length
		indata = ''
		outbuf = ''
		transfer_encoding = None
		skipout = False
		while 1:
			if content_length:
				try:
					r = ch.readPacket(True)
				except socket.error, e:
					if e.args[0] == 35: # "Resource temporarily unavailable"
						# probably waiting for stdin
						n = content_length
						if n > fcgi.FCGI_MAX_PACKET_LEN:
							n = fcgi.FCGI_MAX_PACKET_LEN
							content_length -= n
						else:
							content_length = 0
						
						indata = self.rfile.read(n)
						
						if not indata:
							log.warn('client sent EOF on stdin even though not all bytes indicated by '\
											 'content-length have been read -- aborting request')
							ch.writePacket(fcgi.Record(fcgi.FCGI_ABORT_REQUEST, rid))
							break
						
						log.info('got %d bytes on http stdin -- forwarding on FCGI channel', len(indata))
						
						ch.writePacket(fcgi.Record(fcgi.FCGI_STDIN, rid, indata))
						
						if content_length == 0:
							# write EOF
							ch.writePacket(fcgi.Record(fcgi.FCGI_STDIN, rid))
							wrote_stdin_eof = True
						
						continue
					else:
						raise
			else:
				r = ch.readPacket()
			log.debug('received packet %r', r)
			if r.type == fcgi.FCGI_STDOUT:
				if not started:
					outbuf += r.content
					r.content = ''
					p = outbuf.find('\r\n\r\n')
					if p != -1:
						sf = StringIO(outbuf[:p])
						r.content = outbuf[p+4:]
						headers = mimetools.Message(sf, True)
						
						# status
						status = headers.get('status', None)
						if status:
							status = status.split(' ',1)
							status[0] = int(status[0])
							self.send_response(*status)
						else:
							self.send_response(200)
						
						# required headers
						skipk = ['server', 'date', 'transfer-encoding']
						self.send_header('Server', headers.getheader('server', self.version_string()))
						self.send_header('Date', headers.getheader('date', self.date_time_string()))
						
						# content length
						if not headers.getheader('content-length', False):
							if self.protocol_version == 'HTTP/1.1':
								transfer_encoding = headers.getheader('server', 'chunked').lower()
								self.send_header('Transfer-Encoding', transfer_encoding)
							else:
								self.close_connection = 1
						
						# send other headers
						for k in headers:
							if k not in skipk:
								self.send_header(k.capitalize(), headers.getheader(k))
						
						self.wfile.write('\r\n')
						started = True
				if r.content and not skipout:
					self.wfile.write(r.content)
			elif r.type == fcgi.FCGI_STDERR:
				log.error('%s: %s', ch, r.content)
			elif r.type == fcgi.FCGI_END_REQUEST:
				if transfer_encoding == 'chunked':
					self.wfile.write('')
				break
		
		# EOF on stdin
		if not wrote_stdin_eof:
			ch.writePacket(fcgi.Record(fcgi.FCGI_STDIN, rid))

Example 62

View license
def generateResidueTemplate(molecule, residue_atoms=None):
    """
    Generate an residue template for simtk.openmm.app.ForceField using GAFF/AM1-BCC.

    This requires the OpenEye toolkit.

    Parameters
    ----------
    molecule : openeye.oechem.OEMol
        The molecule to be parameterized.
        The molecule must have explicit hydrogens.
        Net charge will be inferred from the net formal charge on each molecule.
        Partial charges will be determined automatically using oequacpac and canonical AM1-BCC charging rules.
    residue_atomset : set of OEAtom, optional, default=None
        If not None, only the atoms in this set will be used to construct the residue template

    Returns
    -------
    template : simtk.openmm.app.forcefield._TemplateData
        Residue template for ForceField using atom types and parameters from `gaff.xml`.
    additional_parameters_ffxml : str
        Contents of ForceField `ffxml` file defining additional parameters from parmchk(2).

    Notes
    -----
    The residue template will be named after the molecule title.
    This method preserves stereochemistry during AM1-BCC charge parameterization.
    Atom names in molecules will be assigned Tripos atom names if any are blank or not unique.

    """
    # Set the template name based on the molecule title plus a globally unique UUID.
    from uuid import uuid4
    template_name = molecule.GetTitle() + '-' + str(uuid4())

    # If any atom names are not unique, atom names
    _ensureUniqueAtomNames(molecule)

    # Compute net formal charge.
    net_charge = _computeNetCharge(molecule)

    # Generate canonical AM1-BCC charges and a reference conformation.
    molecule = get_charges(molecule, strictStereo=False, keep_confs=1)

    # DEBUG: This may be necessary.
    molecule.SetTitle('MOL')

    # Create temporary directory for running antechamber.
    import tempfile
    tmpdir = tempfile.mkdtemp()
    prefix = 'molecule'
    input_mol2_filename = os.path.join(tmpdir, prefix + '.tripos.mol2')
    gaff_mol2_filename = os.path.join(tmpdir, prefix + '.gaff.mol2')
    frcmod_filename = os.path.join(tmpdir, prefix + '.frcmod')

    # Write Tripos mol2 file as antechamber input.
    _writeMolecule(molecule, input_mol2_filename)

    # Parameterize the molecule with antechamber.
    run_antechamber(template_name, input_mol2_filename, charge_method=None, net_charge=net_charge, gaff_mol2_filename=gaff_mol2_filename, frcmod_filename=frcmod_filename)

    # Read the resulting GAFF mol2 file as a ParmEd structure.
    from openeye import oechem
    ifs = oechem.oemolistream(gaff_mol2_filename)
    ifs.SetFlavor(oechem.OEFormat_MOL2, oechem.OEIFlavor_MOL2_DEFAULT | oechem.OEIFlavor_MOL2_M2H | oechem.OEIFlavor_MOL2_Forcefield)
    m2h = True
    oechem.OEReadMolecule(ifs, molecule)
    ifs.close()

    # If residue_atoms = None, add all atoms to the residues
    if residue_atoms == None:
        residue_atoms = [ atom for atom in molecule.GetAtoms() ]

    # Modify partial charges so that charge on residue atoms is integral.
    residue_charge = 0.0
    sum_of_absolute_charge = 0.0
    for atom in residue_atoms:
        charge = atom.GetPartialCharge()
        residue_charge += charge
        sum_of_absolute_charge += abs(charge)
    excess_charge = residue_charge - net_charge
    if sum_of_absolute_charge == 0.0:
        sum_of_absolute_charge = 1.0
    for atom in residue_atoms:
        charge = atom.GetPartialCharge()
        atom.SetPartialCharge( charge + excess_charge * (abs(charge) / sum_of_absolute_charge) )

    # Create residue template.
    template = ForceField._TemplateData(template_name)
    for (index, atom) in enumerate(molecule.GetAtoms()):
        atomname = atom.GetName()
        typename = atom.GetType()
        element = Element.getByAtomicNumber(atom.GetAtomicNum())
        charge = atom.GetPartialCharge()
        parameters = { 'charge' : charge }
        atom_template = ForceField._TemplateAtomData(atomname, typename, element, parameters)
        template.atoms.append(atom_template)
    for bond in molecule.GetBonds():
        if (bond.GetBgn() in residue_atoms) and (bond.GetEnd() in residue_atoms):
            template.addBondByName(bond.GetBgn().GetName(), bond.GetEnd().GetName())
        elif (bond.GetBgn() in residue_atoms) and (bond.GetEnd() not in residue_atoms):
            template.addExternalBondByName(bond.GetBgn().GetName())
        elif (bond.GetBgn() not in residue_atoms) and (bond.GetEnd() in residue_atoms):
            template.addExternalBondByName(bond.GetEnd().GetName())

    # Generate ffxml file contents for parmchk-generated frcmod output.
    leaprc = StringIO('parm = loadamberparams %s' % frcmod_filename)
    params = parmed.amber.AmberParameterSet.from_leaprc(leaprc)
    params = parmed.openmm.OpenMMParameterSet.from_parameterset(params)
    ffxml = StringIO()
    params.write(ffxml)

    return template, ffxml.getvalue()

Example 63

Project: maxfield
Source File: PlanPrinterMap.py
View license
    def __init__(self,a,outputDir,nagents,color='#FF004D',useGoogle=False,api_key=None):
        self.a = a
        self.n = a.order() # number of nodes
        self.m = a.size()  # number of links

        self.nagents = nagents
        self.outputDir = outputDir
        self.color = color

        # if the ith link to be made is (p,q) then orderedEdges[i] = (p,q)
        self.orderedEdges = [None]*self.m
        for e in a.edges_iter():
            self.orderedEdges[a.edge[e[0]][e[1]]['order']] = e

        # movements[i][j] is the index (in orderedEdges) of agent i's jth link
        self.movements = agentOrder.getAgentOrder(a,nagents,self.orderedEdges)

        # link2agent[i] is the agent that will make the ith link
        self.link2agent = [-1]*self.m
        for i in range(nagents):
            for e in self.movements[i]:
                self.link2agent[e] = i

        # keyneeds[i,j] = number of keys agent i needs for portal j
        self.agentkeyneeds = np.zeros([self.nagents,self.n],dtype=int)
        for i in xrange(self.nagents):
            for e in self.movements[i]:
                p,q = self.orderedEdges[e]
                self.agentkeyneeds[i][q] += 1

        self.names = np.array([a.node[i]['name'] for i in xrange(self.n)])
        # The alphabetical order
        makeLowerCase = np.vectorize(lambda s: s.lower())
        self.nameOrder = np.argsort(makeLowerCase(self.names))

        self.xy = np.array([self.a.node[i]['xy'] for i in xrange(self.n)])
        # print self.xy

        # The order from north to south (for easy-to-find labels)
        self.posOrder = np.argsort(self.xy,axis=0)[::-1,1]

        # The inverse permutation of posOrder
        self.nslabel = [-1]*self.n
        for i in xrange(self.n):
            self.nslabel[self.posOrder[i]] = i

        self.maxNameLen = max([len(a.node[i]['name']) for  i in xrange(self.n)])

        # total stats for this plan
        self.num_portals = self.n
        self.num_links = self.m
        self.num_fields = 0

        if useGoogle:
            # convert xy coordinates to web mercator
            x_merc = np.array([128./np.pi * (self.a.node[i]['geo'][1] + np.pi) for i in self.a.node.keys()])
            min_x_merc = np.min(x_merc)
            #print "min_x_merc",min_x_merc
            x_merc = x_merc - min_x_merc
            #print "Xmin, Xmax",np.min(x_merc),np.max(x_merc)
            y_merc = np.array([128./np.pi * (np.pi - np.log(np.tan(np.pi/4. + self.a.node[i]['geo'][0]/2.))) for i in self.a.node.keys()])
            min_y_merc = np.min(y_merc)
            #print "min_y_merc",min_y_merc
            y_merc = y_merc - min_y_merc
            #print "Ymin, Ymax",np.min(y_merc),np.max(y_merc)
            # determine proper zoom such that the map is smaller than 640 on both sides
            zooms = np.arange(0,20,1)
            largest_x_zoom = 0
            largest_y_zoom = 0
            for zm in zooms:
                #print "X max",np.max(x_merc * 2.**zm + 20.)
                #print "Y max",np.max(y_merc * 2.**zm + 20.)
                if np.max(x_merc * 2.**zm) < 256.:
                    largest_x_zoom = zm
                    #print "X",largest_x_zoom
                if np.max(y_merc * 2.**zm) < 256.:
                    largest_y_zoom = zm
                    #print "Y",largest_y_zoom
            zoom = np.min([largest_x_zoom,largest_y_zoom])
            min_x_merc = min_x_merc*2.**(1+zoom)
            min_y_merc = min_y_merc*2.**(1+zoom)
            self.xy[:,0] = x_merc*2.**(1+zoom)
            self.xy[:,1] = y_merc*2.**(1+zoom)
            for i in xrange(self.n):
                self.a.node[i]['xy'] = self.xy[i]
            xsize = np.max(self.xy[:,0])+20
            ysize = np.max(self.xy[:,1])+20
            self.xylims = [-10,xsize-10,ysize-10,-10]
            # coordinates needed for google maps
            loncenter = np.rad2deg((min_x_merc+xsize/2.-10.)*np.pi/(128.*2.**(zoom+1)) - np.pi)
            latcenter = np.rad2deg(2.*np.arctan(np.exp(-1.*((min_y_merc+ysize/2.-10.)*np.pi/(128.*2.**(zoom+1)) - np.pi))) - np.pi/2.)
            #latmax = np.rad2deg(max([self.a.node[i]['geo'][0] for i in self.a.node.keys()]))
            #latmin = np.rad2deg(min([self.a.node[i]['geo'][0] for i in self.a.node.keys()]))
            #lonmax = np.rad2deg(max([self.a.node[i]['geo'][1] for i in self.a.node.keys()]))
            #lonmin = np.rad2deg(min([self.a.node[i]['geo'][1] for i in self.a.node.keys()]))
            #loncenter = (lonmax-lonmin)/2. + lonmin
            #latcenter = (latmax-latmin)/2. + latmin
            #print "Center Coordinates (lat,lon): ",latcenter,loncenter

            # turn things in to integers for maps API
            map_xwidth = int(xsize)
            map_ywidth = int(ysize)
            zoom = int(zoom)+1

            # google maps API
            # get API key
            if api_key is not None:
                url = "//maps.googleapis.com/maps/api/staticmap?center={0},{1}&size={2}x{3}&zoom={4}&sensor=false&key={5}".format(latcenter,loncenter,map_xwidth,map_ywidth,zoom,api_key)
            else:
                url = "//maps.googleapis.com/maps/api/staticmap?center={0},{1}&size={2}x{3}&zoom={4}&sensor=false".format(latcenter,loncenter,map_xwidth,map_ywidth,zoom)
            #print url

            # determine if we can use google maps
            self.google_image = None
            try:
                buffer = StringIO(urllib2.urlopen(url).read())
                self.google_image = Image.imread(buffer)
                plt.clf()
            except urllib2.URLError as err:
                print("Could not connect to google maps server!")

Example 64

Project: datapusher
Source File: jobs.py
View license
@job.async
def push_to_datastore(task_id, input, dry_run=False):
    '''Download and parse a resource push its data into CKAN's DataStore.

    An asynchronous job that gets a resource from CKAN, downloads the
    resource's data file and, if the data file has changed since last time,
    parses the data and posts it into CKAN's DataStore.

    :param dry_run: Fetch and parse the data file but don't actually post the
        data to the DataStore, instead return the data headers and rows that
        would have been posted.
    :type dry_run: boolean

    '''
    handler = util.StoringHandler(task_id, input)
    logger = logging.getLogger(task_id)
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG)

    validate_input(input)

    data = input['metadata']

    ckan_url = data['ckan_url']
    resource_id = data['resource_id']
    api_key = input.get('api_key')

    try:
        resource = get_resource(resource_id, ckan_url, api_key)
    except util.JobError, e:
        #try again in 5 seconds just incase CKAN is slow at adding resource
        time.sleep(5)
        resource = get_resource(resource_id, ckan_url, api_key)

    # fetch the resource data
    logger.info('Fetching from: {0}'.format(resource.get('url')))
    try:
        request = urllib2.Request(resource.get('url'))

        if resource.get('url_type') == 'upload':
            # If this is an uploaded file to CKAN, authenticate the request,
            # otherwise we won't get file from private resources
            request.add_header('Authorization', api_key)

        response = urllib2.urlopen(request, timeout=DOWNLOAD_TIMEOUT)
    except urllib2.HTTPError as e:
        raise HTTPError(
            "DataPusher received a bad HTTP response when trying to download "
            "the data file", status_code=e.code,
            request_url=resource.get('url'), response=e.read())
    except urllib2.URLError as e:
        if isinstance(e.reason, socket.timeout):
            raise util.JobError('Connection timed out after %ss' %
                                DOWNLOAD_TIMEOUT)
        else:
            raise HTTPError(
                message=str(e.reason), status_code=None,
                request_url=resource.get('url'), response=None)

    cl = response.info().getheader('content-length')
    if cl and int(cl) > MAX_CONTENT_LENGTH:
        raise util.JobError(
            'Resource too large to download: {cl} > max ({max_cl}).'.format(
            cl=cl, max_cl=MAX_CONTENT_LENGTH))

    ct = response.info().getheader('content-type').split(';', 1)[0]

    f = cStringIO.StringIO(response.read())
    file_hash = hashlib.md5(f.read()).hexdigest()
    f.seek(0)

    if (resource.get('hash') == file_hash
            and not data.get('ignore_hash')):
        logger.info("The file hash hasn't changed: {hash}.".format(
            hash=file_hash))
        return

    resource['hash'] = file_hash

    try:
        table_set = messytables.any_tableset(f, mimetype=ct, extension=ct)
    except messytables.ReadError as e:
        ## try again with format
        f.seek(0)
        try:
            format = resource.get('format')
            table_set = messytables.any_tableset(f, mimetype=format, extension=format)
        except:
            raise util.JobError(e)

    row_set = table_set.tables.pop()
    offset, headers = messytables.headers_guess(row_set.sample)

    # Some headers might have been converted from strings to floats and such.
    headers = [unicode(header) for header in headers]

    row_set.register_processor(messytables.headers_processor(headers))
    row_set.register_processor(messytables.offset_processor(offset + 1))
    types = messytables.type_guess(row_set.sample, types=TYPES, strict=True)
    row_set.register_processor(messytables.types_processor(types))

    headers = [header.strip() for header in headers if header.strip()]
    headers_set = set(headers)

    def row_iterator():
        for row in row_set:
            data_row = {}
            for index, cell in enumerate(row):
                column_name = cell.column.strip()
                if column_name not in headers_set:
                    continue
                data_row[column_name] = cell.value
            yield data_row
    result = row_iterator()

    '''
    Delete existing datstore resource before proceeding. Otherwise
    'datastore_create' will append to the existing datastore. And if
    the fields have significantly changed, it may also fail.
    '''
    if datastore_resource_exists(resource_id, api_key, ckan_url):
        logger.info('Deleting "{res_id}" from datastore.'.format(
            res_id=resource_id))
        delete_datastore_resource(resource_id, api_key, ckan_url)

    headers_dicts = [dict(id=field[0], type=TYPE_MAPPING[str(field[1])])
                     for field in zip(headers, types)]

    logger.info('Determined headers and types: {headers}'.format(
        headers=headers_dicts))

    if dry_run:
        return headers_dicts, result

    count = 0
    for i, records in enumerate(chunky(result, 250)):
        count += len(records)
        logger.info('Saving chunk {number}'.format(number=i))
        send_resource_to_datastore(resource, headers_dicts,
                                   records, api_key, ckan_url)

    logger.info('Successfully pushed {n} entries to "{res_id}".'.format(
        n=count, res_id=resource_id))

    if data.get('set_url_type', False):
        update_resource(resource, api_key, ckan_url)

Example 65

View license
def _update_rc_conf_legacy(infile, interfaces):
    """
    Return data for (sub-)interfaces and routes
    """

    # Updating this file happens in two phases since it's non-trivial to
    # update. The INTERFACES and ROUTES variables the key lines, but they
    # will in turn reference other variables, which may be before or after.
    # As a result, we need to load the entire file, find the main variables
    # and then remove the reference variables. When that is done, we add
    # the lines for the new config.

    # First generate new config
    ifaces = []
    routes = []

    gateway4, gateway6 = commands.network.get_gateways(interfaces)

    ifnames = interfaces.keys()
    ifnames.sort()

    for ifname_prefix in ifnames:
        interface = interfaces[ifname_prefix]

        ip4s = interface['ip4s']
        ip6s = interface['ip6s']

        ifname_suffix_num = 0

        for ip4, ip6 in map(None, ip4s, ip6s):
            if ifname_suffix_num:
                ifname = "%s:%d" % (ifname_prefix, ifname_suffix_num)
            else:
                ifname = ifname_prefix

            line = [ifname]
            if ip4:
                line.append('%(address)s netmask %(netmask)s' % ip4)

            if ip6:
                line.append('add %(address)s/%(prefixlen)s' % ip6)

            ifname_suffix_num += 1

            ifaces.append((ifname.replace(':', '_'), ' '.join(line)))

        for i, route in enumerate(interface['routes']):
            if route['network'] == '0.0.0.0' and \
                    route['netmask'] == '0.0.0.0' and \
                    route['gateway'] == gateway4:
                continue

            line = "-net %(network)s netmask %(netmask)s gw %(gateway)s" % \
                    route

            routes.append(('%s_route%d' % (ifname_prefix, i), line))

    if gateway4:
        routes.append(('gateway', 'default gw %s' % gateway4))
    if gateway6:
        routes.append(('gateway6', 'default gw %s' % gateway6))

    # Then load old file
    lines, variables = _parse_config(infile)

    # Update INTERFACES
    lineno = variables.get('INTERFACES')
    if lineno is not None:
        # Remove old lines
        for name in _parse_variable(lines[lineno], strip_bang=True):
            if name in variables:
                lines[variables[name]] = None
    else:
        lines.append('')
        lineno = len(lines) - 1

    config = []
    names = []
    for name, line in ifaces:
        config.append('%s="%s"' % (name, line))
        names.append(name)

    config.append('INTERFACES=(%s)' % ' '.join(names))
    lines[lineno] = '\n'.join(config)

    # Update ROUTES
    lineno = variables.get('ROUTES')
    if lineno is not None:
        # Remove old lines
        for name in _parse_variable(lines[lineno], strip_bang=True):
            if name in variables:
                lines[variables[name]] = None
    else:
        lines.append('')
        lineno = len(lines) - 1

    config = []
    names = []
    for name, line in routes:
        config.append('%s="%s"' % (name, line))
        names.append(name)

    config.append('ROUTES=(%s)' % ' '.join(names))
    lines[lineno] = '\n'.join(config)

    # (Possibly) comment out NETWORKS
    lineno = variables.get('NETWORKS')
    if lineno is not None:
        for name in _parse_variable(lines[lineno], strip_bang=True):
            nlineno = variables.get(name)
            if nlineno is not None:
                lines[nlineno] = '#' + lines[lineno]

        lines[lineno] = '#' + lines[lineno]

    # (Possibly) update DAEMONS
    lineno = variables.get('DAEMONS')
    if lineno is not None:
        daemons = _parse_variable(lines[lineno])
        try:
            network = daemons.index('!network')
            daemons[network] = 'network'
            if '@net-profiles' in daemons:
                daemons.remove('@net-profiles')
            lines[lineno] = 'DAEMONS=(%s)' % ' '.join(daemons)
        except ValueError:
            pass

    # Filter out any removed lines
    lines = filter(lambda l: l is not None, lines)

    # Serialize into new file
    outfile = StringIO()
    for line in lines:
        print >> outfile, line

    outfile.seek(0)
    return outfile.read()

Example 66

Project: canape
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 67

Project: bh2014
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 68

Project: odoo
Source File: custom.py
View license
    def _create_lines(self, cr, uid, ids, report, fields, results, context):
        pool = openerp.registry(cr.dbname)
        pdf_string = cStringIO.StringIO()
        can = canvas.init(fname=pdf_string, format='pdf')
        
        can.show(80,380,'/16/H'+report['title'])
        
        ar = area.T(size=(350,350),
        #x_coord = category_coord.T(['2005-09-01','2005-10-22'],0),
        x_axis = axis.X(label = fields[0]['name'], format="/a-30{}%s"),
        y_axis = axis.Y(label = ', '.join(map(lambda x : x['name'], fields[1:]))))
        
        process_date = {
            'D': lambda x: reduce(lambda xx, yy: xx + '-' + yy, x.split('-')[1:3]),
            'M': lambda x: x.split('-')[1],
            'Y': lambda x: x.split('-')[0]
        }

        order_date = {
            'D': lambda x: time.mktime((2005, int(x.split('-')[0]), int(x.split('-')[1]), 0, 0, 0, 0, 0, 0)),
            'M': lambda x: x,
            'Y': lambda x: x
        }

        abscissa = []
        
        idx = 0 
        date_idx = None
        fct = {}
        for f in fields:
            field_id = (f['field_child3'] and f['field_child3'][0]) or (f['field_child2'] and f['field_child2'][0]) or (f['field_child1'] and f['field_child1'][0]) or (f['field_child0'] and f['field_child0'][0])
            if field_id:
                type = pool['ir.model.fields'].read(cr, uid, [field_id],['ttype'])
                if type[0]['ttype'] == 'date':
                    date_idx = idx
                    fct[idx] = process_date[report['frequency']] 
                else:
                    fct[idx] = lambda x : x
            else:
                fct[idx] = lambda x : x
            idx+=1

        # plots are usually displayed year by year
        # so we do so if the first field is a date
        data_by_year = {}
        if date_idx is not None:
            for r in results:
                key = process_date['Y'](r[date_idx])
                if key not in data_by_year:
                    data_by_year[key] = []
                for i in range(len(r)):
                    r[i] = fct[i](r[i])
                data_by_year[key].append(r)
        else:
            data_by_year[''] = results

        idx0 = 0
        nb_bar = len(data_by_year)*(len(fields)-1)
        colors = map(lambda x:line_style.T(color=x), misc.choice_colors(nb_bar))
        abscissa = {}
        for line in data_by_year.keys():
            fields_bar = []
            # sum data and save it in a list. An item for a fields
            for d in data_by_year[line]:
                for idx in range(len(fields)-1):
                    fields_bar.append({})
                    if d[0] in fields_bar[idx]:
                        fields_bar[idx][d[0]] += d[idx+1]
                    else:
                        fields_bar[idx][d[0]] = d[idx+1]
            for idx  in range(len(fields)-1):
                data = {}
                for k in fields_bar[idx].keys():
                    if k in data:
                        data[k] += fields_bar[idx][k]
                    else:
                        data[k] = fields_bar[idx][k]
                data_cum = []
                prev = 0.0
                keys = data.keys()
                keys.sort()
                # cumulate if necessary
                for k in keys:
                    data_cum.append([k, float(data[k])+float(prev)])
                    if fields[idx+1]['cumulate']:
                        prev += data[k]
                idx0 = 0
                plot = line_plot.T(label=fields[idx+1]['name']+' '+str(line), data = data_cum, line_style=colors[idx0*(len(fields)-1)+idx])
                ar.add_plot(plot)
                abscissa.update(fields_bar[idx])
                idx0 += 1
        
        abscissa = map(lambda x : [x, None], abscissa)
        ar.x_coord = category_coord.T(abscissa,0)
        ar.draw(can)

        can.close()
        self.obj = external_pdf(pdf_string.getvalue())
        self.obj.render()
        pdf_string.close()
        return True

Example 69

Project: entropy
Source File: client.py
View license
    def _generic_post_handler(self, function_name, params, file_params,
        timeout):
        """
        Given a function name and the request data (dict format), do the actual
        HTTP request and return the response object to caller.
        WARNING: params and file_params dict keys must be ASCII string only.

        @param function_name: name of the function that called this method
        @type function_name: string
        @param params: POST parameters
        @type params: dict
        @param file_params: mapping composed by file names as key and tuple
            composed by (file_name, file object) as values
        @type file_params: dict
        @param timeout: socket timeout
        @type timeout: float
        @return: tuple composed by the server response string or None
            (in case of empty response) and the HTTPResponse object (useful
                for checking response status)
        @rtype: tuple
        """
        if timeout is None:
            timeout = self._default_timeout_secs
        multipart_boundary = "---entropy.services,boundary---"
        request_path = self._request_path.rstrip("/") + "/" + function_name
        const_debug_write(__name__,
            "WebService _generic_post_handler, calling: %s at %s -- %s,"
            " tx_callback: %s, timeout: %s" % (self._request_host, request_path,
                params, self._transfer_callback, timeout,))
        connection = None
        try:
            if self._request_protocol == "http":
                connection = httplib.HTTPConnection(self._request_host,
                    timeout = timeout)
            elif self._request_protocol == "https":
                ssl_context = None
                if hasattr(ssl, 'create_default_context'):
                    ssl_context = ssl.create_default_context(
                        purpose = ssl.Purpose.CLIENT_AUTH)
                connection = httplib.HTTPSConnection(
                    self._request_host, timeout = timeout, context = ssl_context)
            else:
                raise WebService.RequestError("invalid request protocol",
                    method = function_name)

            headers = {
                "Accept": "text/plain",
                "User-Agent": self._generate_user_agent(function_name),
            }

            if file_params is None:
                file_params = {}
            # autodetect file parameters in params
            for k in list(params.keys()):
                if isinstance(params[k], (tuple, list)) \
                    and (len(params[k]) == 2):
                    f_name, f_obj = params[k]
                    if isinstance(f_obj, file):
                        file_params[k] = params[k]
                        del params[k]
                elif const_isunicode(params[k]):
                    # convert to raw string
                    params[k] = const_convert_to_rawstring(params[k],
                        from_enctype = "utf-8")
                elif not const_isstring(params[k]):
                    # invalid ?
                    if params[k] is None:
                        # will be converted to ""
                        continue
                    int_types = const_get_int()
                    supported_types = (float, list, tuple) + int_types
                    if not isinstance(params[k], supported_types):
                        raise WebService.UnsupportedParameters(
                            "%s is unsupported type %s" % (k, type(params[k])))
                    list_types = (list, tuple)
                    if isinstance(params[k], list_types):
                        # not supporting nested lists
                        non_str = [x for x in params[k] if not \
                            const_isstring(x)]
                        if non_str:
                            raise WebService.UnsupportedParameters(
                                "%s is unsupported type %s" % (k,
                                    type(params[k])))

            body = None
            if not file_params:
                headers["Content-Type"] = "application/x-www-form-urlencoded"
                encoded_params = urllib_parse.urlencode(params)
                data_size = len(encoded_params)
                if self._transfer_callback is not None:
                    self._transfer_callback(0, data_size, False)

                if data_size < 65536:
                    try:
                        connection.request("POST", request_path, encoded_params,
                            headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                else:
                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                    sio = StringIO(encoded_params)
                    data_size = len(encoded_params)
                    while True:
                        chunk = sio.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                method = function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(sio.tell(),
                                data_size, False)
                # for both ways, send a signal through the callback
                if self._transfer_callback is not None:
                    self._transfer_callback(data_size, data_size, False)

            else:
                headers["Content-Type"] = "multipart/form-data; boundary=" + \
                    multipart_boundary
                body_file, body_fpath = self._encode_multipart_form(params,
                    file_params, multipart_boundary)
                try:
                    data_size = body_file.tell()
                    headers["Content-Length"] = str(data_size)
                    body_file.seek(0)
                    if self._transfer_callback is not None:
                        self._transfer_callback(0, data_size, False)

                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                    while True:
                        chunk = body_file.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                method = function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(body_file.tell(),
                                data_size, False)
                    if self._transfer_callback is not None:
                        self._transfer_callback(data_size, data_size, False)
                finally:
                    body_file.close()
                    os.remove(body_fpath)

            try:
                response = connection.getresponse()
            except socket.error as err:
                raise WebService.RequestError(err,
                    method = function_name)
            const_debug_write(__name__, "WebService.%s(%s), "
                "response header: %s" % (
                    function_name, params, response.getheaders(),))
            total_length = response.getheader("Content-Length", "-1")
            try:
                total_length = int(total_length)
            except ValueError:
                total_length = -1
            outcome = const_convert_to_rawstring("")
            current_len = 0
            if self._transfer_callback is not None:
                self._transfer_callback(current_len, total_length, True)
            while True:
                try:
                    chunk = response.read(65536)
                except socket.error as err:
                    raise WebService.RequestError(err,
                        method = function_name)
                if not chunk:
                    break
                outcome += chunk
                current_len += len(chunk)
                if self._transfer_callback is not None:
                    self._transfer_callback(current_len, total_length, True)

            if self._transfer_callback is not None:
                self._transfer_callback(total_length, total_length, True)

            if const_is_python3():
                outcome = const_convert_to_unicode(outcome)
            if not outcome:
                return None, response
            return outcome, response

        except httplib.HTTPException as err:
            raise WebService.RequestError(err,
                method = function_name)
        finally:
            if connection is not None:
                connection.close()

Example 70

Project: nginx-python-buildpack
Source File: pyopenssl.py
View license
    def readline(self, size=-1):
        buf = self._rbuf
        buf.seek(0, 2)  # seek end
        if buf.tell() > 0:
            # check if we already have it in our buffer
            buf.seek(0)
            bline = buf.readline(size)
            if bline.endswith('\n') or len(bline) == size:
                self._rbuf = StringIO()
                self._rbuf.write(buf.read())
                return bline
            del bline
        if size < 0:
            # Read until \n or EOF, whichever comes first
            if self._rbufsize <= 1:
                # Speed up unbuffered case
                buf.seek(0)
                buffers = [buf.read()]
                self._rbuf = StringIO()  # reset _rbuf.  we consume it via buf.
                data = None
                recv = self._sock.recv
                while True:
                    try:
                        while data != "\n":
                            data = recv(1)
                            if not data:
                                break
                            buffers.append(data)
                    except OpenSSL.SSL.WantReadError:
                        continue
                    break
                return "".join(buffers)

            buf.seek(0, 2)  # seek end
            self._rbuf = StringIO()  # reset _rbuf.  we consume it via buf.
            while True:
                try:
                    data = self._sock.recv(self._rbufsize)
                except OpenSSL.SSL.WantReadError:
                    continue
                if not data:
                    break
                nl = data.find('\n')
                if nl >= 0:
                    nl += 1
                    buf.write(data[:nl])
                    self._rbuf.write(data[nl:])
                    del data
                    break
                buf.write(data)
            return buf.getvalue()
        else:
            # Read until size bytes or \n or EOF seen, whichever comes first
            buf.seek(0, 2)  # seek end
            buf_len = buf.tell()
            if buf_len >= size:
                buf.seek(0)
                rv = buf.read(size)
                self._rbuf = StringIO()
                self._rbuf.write(buf.read())
                return rv
            self._rbuf = StringIO()  # reset _rbuf.  we consume it via buf.
            while True:
                try:
                    data = self._sock.recv(self._rbufsize)
                except OpenSSL.SSL.WantReadError:
                        continue
                if not data:
                    break
                left = size - buf_len
                # did we just receive a newline?
                nl = data.find('\n', 0, left)
                if nl >= 0:
                    nl += 1
                    # save the excess data to _rbuf
                    self._rbuf.write(data[nl:])
                    if buf_len:
                        buf.write(data[:nl])
                        break
                    else:
                        # Shortcut.  Avoid data copy through buf when returning
                        # a substring of our first recv().
                        return data[:nl]
                n = len(data)
                if n == size and not buf_len:
                    # Shortcut.  Avoid data copy through buf when
                    # returning exactly all of our first recv().
                    return data
                if n >= left:
                    buf.write(data[:left])
                    self._rbuf.write(data[left:])
                    break
                buf.write(data)
                buf_len += n
                #assert buf_len == buf.tell()
            return buf.getvalue()

Example 71

Project: oleviewdotnet
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 72

Project: Petrel
Source File: package.py
View license
def build_jar(source_jar_path, dest_jar_path, config, venv=None, definition=None, logdir=None):
    """Build a StormTopology .jar which encapsulates the topology defined in
    topology_dir. Optionally override the module and function names. This
    feature supports the definition of multiple topologies in a single
    directory."""

    if definition is None:
        definition = 'create.create'

    # Prepare data we'll use later for configuring parallelism.
    config_yaml = read_yaml(config)
    parallelism = dict((k.split('.')[-1], v) for k, v in config_yaml.iteritems()
        if k.startswith('petrel.parallelism'))

    pip_options = config_yaml.get('petrel.pip_options', '')

    module_name, dummy, function_name = definition.rpartition('.')
    
    topology_dir = os.getcwd()

    # Make a copy of the input "jvmpetrel" jar. This jar acts as a generic
    # starting point for all Petrel topologies.
    source_jar_path = os.path.abspath(source_jar_path)
    dest_jar_path = os.path.abspath(dest_jar_path)
    if source_jar_path == dest_jar_path:
        raise ValueError("Error: Destination and source path are the same.")
    shutil.copy(source_jar_path, dest_jar_path)
    jar = zipfile.ZipFile(dest_jar_path, 'a', compression=zipfile.ZIP_DEFLATED)
    
    added_path_entry = False
    try:
        # Add the files listed in manifest.txt to the jar.
        with open(os.path.join(topology_dir, MANIFEST), 'r') as f:
            for fn in f.readlines():
                # Ignore blank and comment lines.
                fn = fn.strip()
                if len(fn) and not fn.startswith('#'):

                    add_item_to_jar(jar, os.path.expandvars(fn.strip()))

        # Add user and machine information to the jar.
        add_to_jar(jar, '__submitter__.yaml', '''
petrel.user: %s
petrel.host: %s
''' % (getpass.getuser(),socket.gethostname()))
        
        # Also add the topology configuration to the jar.
        with open(config, 'r') as f:
            config_text = f.read()
        add_to_jar(jar, '__topology__.yaml', config_text)
    
        # Call module_name/function_name to populate a Thrift topology object.
        builder = TopologyBuilder()
        module_dir = os.path.abspath(topology_dir)
        if module_dir not in sys.path:
            sys.path[:0] = [ module_dir ]
            added_path_entry = True
        module = __import__(module_name)
        getattr(module, function_name)(builder)

        # Add the spout and bolt Python scripts to the jar. Create a
        # setup_<script>.sh for each Python script.

        # Add Python scripts and any other per-script resources.
        for k, v in chain(builder._spouts.iteritems(), builder._bolts.iteritems()):
            add_file_to_jar(jar, topology_dir, v.script)

            # Create a bootstrap script.
            if venv is not None:
                # Allow overriding the execution command from the "petrel"
                # command line. This is handy if the server already has a
                # virtualenv set up with the necessary libraries.
                v.execution_command = os.path.join(venv, 'bin/python')

            # If a parallelism value was specified in the configuration YAML,
            # override any setting provided in the topology definition script.
            if k in parallelism:
                builder._commons[k].parallelism_hint = int(parallelism.pop(k))

            v.execution_command, v.script = \
                intercept(venv, v.execution_command, os.path.splitext(v.script)[0],
                          jar, pip_options, logdir)

        if len(parallelism):
            raise ValueError(
                'Parallelism settings error: There are no components named: %s' %
                ','.join(parallelism.keys()))

        # Build the Thrift topology object and serialize it to the .jar. Must do
        # this *after* the intercept step above since that step may modify the
        # topology definition.
        io = StringIO()
        topology = builder.write(io)
        add_to_jar(jar, 'topology.ser', io.getvalue())
    finally:
        jar.close()
        if added_path_entry:
            # Undo our sys.path change.
            sys.path[:] = sys.path[1:]

Example 73

Project: PokemonGo-Bot-Desktop
Source File: config.py
View license
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
    """
    Start up a socket server on the specified port, and listen for new
    configurations.

    These will be sent as a file suitable for processing by fileConfig().
    Returns a Thread object on which you can call start() to start the server,
    and which you can join() when appropriate. To stop the server, call
    stopListening().
    """
    if not thread:
        raise NotImplementedError("listen() needs threading to work")

    class ConfigStreamHandler(StreamRequestHandler):
        """
        Handler for a logging configuration request.

        It expects a completely new logging configuration and uses fileConfig
        to install it.
        """
        def handle(self):
            """
            Handle a request.

            Each request is expected to be a 4-byte length, packed using
            struct.pack(">L", n), followed by the config file.
            Uses fileConfig() to do the grunt work.
            """
            import tempfile
            try:
                conn = self.connection
                chunk = conn.recv(4)
                if len(chunk) == 4:
                    slen = struct.unpack(">L", chunk)[0]
                    chunk = self.connection.recv(slen)
                    while len(chunk) < slen:
                        chunk = chunk + conn.recv(slen - len(chunk))
                    try:
                        import json
                        d =json.loads(chunk)
                        assert isinstance(d, dict)
                        dictConfig(d)
                    except:
                        #Apply new configuration.

                        file = cStringIO.StringIO(chunk)
                        try:
                            fileConfig(file)
                        except (KeyboardInterrupt, SystemExit):
                            raise
                        except:
                            traceback.print_exc()
                    if self.server.ready:
                        self.server.ready.set()
            except socket.error as e:
                if e.errno != RESET_ERROR:
                    raise

    class ConfigSocketReceiver(ThreadingTCPServer):
        """
        A simple TCP socket-based logging config receiver.
        """

        allow_reuse_address = 1

        def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
                     handler=None, ready=None):
            ThreadingTCPServer.__init__(self, (host, port), handler)
            logging._acquireLock()
            self.abort = 0
            logging._releaseLock()
            self.timeout = 1
            self.ready = ready

        def serve_until_stopped(self):
            import select
            abort = 0
            while not abort:
                rd, wr, ex = select.select([self.socket.fileno()],
                                           [], [],
                                           self.timeout)
                if rd:
                    self.handle_request()
                logging._acquireLock()
                abort = self.abort
                logging._releaseLock()
            self.socket.close()

    class Server(threading.Thread):

        def __init__(self, rcvr, hdlr, port):
            super(Server, self).__init__()
            self.rcvr = rcvr
            self.hdlr = hdlr
            self.port = port
            self.ready = threading.Event()

        def run(self):
            server = self.rcvr(port=self.port, handler=self.hdlr,
                               ready=self.ready)
            if self.port == 0:
                self.port = server.server_address[1]
            self.ready.set()
            global _listener
            logging._acquireLock()
            _listener = server
            logging._releaseLock()
            server.serve_until_stopped()

    return Server(ConfigSocketReceiver, ConfigStreamHandler, port)

Example 74

Project: dopey
Source File: document.py
View license
    def load_ora(self, filename, feedback_cb=None):
        """Loads from an OpenRaster file"""
        logger.info('load_ora: %r', filename)
        t0 = time.time()
        tempdir = tempfile.mkdtemp('mypaint')
        if not isinstance(tempdir, unicode):
            tempdir = tempdir.decode(sys.getfilesystemencoding())
        z = zipfile.ZipFile(filename)
        logger.debug('mimetype: %r', z.read('mimetype').strip())
        xml = z.read('stack.xml')
        image = ET.fromstring(xml)
        stack = image.find('stack')

        image_w = int(image.attrib['w'])
        image_h = int(image.attrib['h'])

        def get_pixbuf(filename):
            t1 = time.time()

            try:
                fp = z.open(filename, mode='r')
            except KeyError:
                # support for bad zip files (saved by old versions of the GIMP ORA plugin)
                fp = z.open(filename.encode('utf-8'), mode='r')
                logger.warning('Bad OpenRaster ZIP file. There is an utf-8 '
                               'encoded filename that does not have the '
                               'utf-8 flag set: %r', filename)

            res = self._pixbuf_from_stream(fp, feedback_cb)
            fp.close()
            logger.debug('%.3fs loading pixbuf %s', time.time() - t1, filename)
            return res

        def get_layers_list(root, x=0,y=0):
            res = []
            for item in root:
                if item.tag == 'layer':
                    if 'x' in item.attrib:
                        item.attrib['x'] = int(item.attrib['x']) + x
                    if 'y' in item.attrib:
                        item.attrib['y'] = int(item.attrib['y']) + y
                    res.append(item)
                elif item.tag == 'stack':
                    stack_x = int( item.attrib.get('x', 0) )
                    stack_y = int( item.attrib.get('y', 0) )
                    res += get_layers_list(item, stack_x, stack_y)
                else:
                    logger.warning('ignoring unsupported tag %r', item.tag)
            return res

        self.clear() # this leaves one empty layer
        no_background = True

        selected_layer = None
        for layer in get_layers_list(stack):
            a = layer.attrib

            if 'background_tile' in a:
                assert no_background
                try:
                    logger.debug("background tile: %r", a['background_tile'])
                    self.set_background(get_pixbuf(a['background_tile']))
                    no_background = False
                    continue
                except tiledsurface.BackgroundError, e:
                    logger.warning('ORA background tile not usable: %r', e)

            src = a.get('src', '')
            if not src.lower().endswith('.png'):
                logger.warning('Ignoring non-png layer %r', src)
                continue
            name = a.get('name', '')
            x = int(a.get('x', '0'))
            y = int(a.get('y', '0'))
            opac = float(a.get('opacity', '1.0'))
            compositeop = str(a.get('composite-op', DEFAULT_COMPOSITE_OP))
            if compositeop not in VALID_COMPOSITE_OPS:
                compositeop = DEFAULT_COMPOSITE_OP
            selected = self.__xsd2bool(a.get("selected", 'false'))
            locked = self.__xsd2bool(a.get("edit-locked", 'false'))

            visible = not 'hidden' in a.get('visibility', 'visible')
            self.add_layer(insert_idx=0, name=name)
            t1 = time.time()

            # extract the png form the zip into a file first
            # the overhead for doing so seems to be neglegible (around 5%)
            z.extract(src, tempdir)
            tmp_filename = join(tempdir, src)
            self.load_layer_from_png(tmp_filename, x, y, feedback_cb)
            os.remove(tmp_filename)

            layer = self.layers[0]

            self.set_layer_opacity(helpers.clamp(opac, 0.0, 1.0), layer)
            self.set_layer_compositeop(compositeop, layer)
            self.set_layer_visibility(visible, layer)
            self.set_layer_locked(locked, layer)
            if selected:
                selected_layer = layer
            logger.debug('%.3fs loading and converting layer png',
                         time.time() - t1)
            # strokemap
            fname = a.get('mypaint_strokemap_v2', None)
            if fname:
                sio = StringIO(z.read(fname))
                layer.load_strokemap_from_file(sio, x, y)
                sio.close()

        if len(self.layers) == 1:
            # no assertion (allow empty documents)
            logger.error('Could not load any layer, document is empty.')

        if len(self.layers) > 1:
            # remove the still present initial empty top layer
            self.select_layer(len(self.layers)-1)
            self.remove_layer()
            # this leaves the topmost layer selected

        try:
            ani_data = z.read('animation.xsheet')
            self.ani.str_to_xsheet(ani_data)
        except KeyError:
            self.ani.load_xsheet(filename)

        if selected_layer is not None:
            for i, layer in zip(range(len(self.layers)), self.layers):
                if layer is selected_layer:
                    self.select_layer(i)
                    break

        # Set the frame size to that saved in the image.
        self.update_frame(x=0, y=0, width=image_w, height=image_h,
                          user_initiated=False)

        # Enable frame if the saved image size is something other than the
        # calculated bounding box. Goal: if the user saves an "infinite
        # canvas", it loads as an infinite canvas.
        bbox_c = helpers.Rect(x=0, y=0, w=image_w, h=image_h)
        bbox = self.get_bbox()
        frame_enab = not (bbox_c==bbox or bbox.empty() or bbox_c.empty())
        self.set_frame_enabled(frame_enab, user_initiated=False)

        z.close()

        # remove empty directories created by zipfile's extract()
        for root, dirs, files in os.walk(tempdir, topdown=False):
            for name in dirs:
                os.rmdir(os.path.join(root, name))
        os.rmdir(tempdir)

        logger.info('%.3fs load_ora total', time.time() - t0)

Example 75

Project: termite-data-server
Source File: pdfinvoice.py
View license
    def draw(self, invoice, items_page=10):
        """ Draws the invoice """
        buffer = cStringIO.StringIO()
        invoice_items = invoice['items']
        pages = max((len(invoice_items)-2)/items_page+1,1)
        canvas = Canvas(buffer, pagesize=self.page_size)
        for page in range(pages):
            canvas.translate(0, 29.7 * cm)
            canvas.setFont(self.font_face, 10)

            canvas.saveState()
            canvas.setStrokeColorRGB(0.9, 0.5, 0.2)
            canvas.setFillColorRGB(0.2, 0.2, 0.2)
            canvas.setFont(self.font_face, 16)
            canvas.drawString(1 * cm, -1 * cm, invoice.get('title',''))
            if self.logo:
                canvas.drawInlineImage(self.logo, 1 * cm, -1 * cm, 250, 16)
            canvas.setLineWidth(4)
            canvas.line(0, -1.25 * cm, 21.7 * cm, -1.25 * cm)
            canvas.restoreState()

            canvas.saveState()
            notes = listify(invoice.get('notes',''))
            textobject = canvas.beginText(1 * cm, -25 * cm)
            for line in notes:
                textobject.textLine(line)
            canvas.drawText(textobject)
            textobject = canvas.beginText(18 * cm, -28 * cm)
            textobject.textLine('Pag.%s/%s' % (page+1,pages))
            canvas.drawText(textobject)
            canvas.restoreState()

            canvas.saveState()
            business_details = listify(invoice.get('from','FROM:'))
            canvas.setFont(self.font_face, 9)
            textobject = canvas.beginText(13 * cm, -2.5 * cm)
            for line in business_details:
                textobject.textLine(line)
            canvas.drawText(textobject)
            canvas.restoreState()

            canvas.saveState()
            client_info = listify(invoice.get('to','TO:'))
            textobject = canvas.beginText(1.5 * cm, -2.5 * cm)
            for line in client_info:
                textobject.textLine(line)
            canvas.drawText(textobject)
            canvas.restoreState()

            textobject = canvas.beginText(1.5 * cm, -6.75 * cm)
            textobject.textLine(u'Invoice ID: %s' % invoice.get('id','<invoice id>'))
            textobject.textLine(u'Invoice Date: %s' % invoice.get('date',datetime.date.today()))
            textobject.textLine(u'Client: %s' % invoice.get('client_name','<invoice client>'))
            canvas.drawText(textobject)

            items = invoice_items[1:][page*items_page:(page+1)*items_page]
            if items:
                data = [invoice_items[0]]
                for item in items:
                    data.append([
                            self.format_currency(x)
                            if isinstance(x,float) else x
                            for x in item])
                righta = [k for k,v in enumerate(items[0])
                          if isinstance(v,(int,float,Decimal))]
                if page == pages-1:
                    total = self.format_currency(invoice['total'])
                else:
                    total = ''
                data.append(['']*(len(items[0])-1)+[total])
                colWidths = [2.5*cm]*len(items[0])
                colWidths[1] = (21.5-2.5*len(items[0]))*cm
                table = Table(data, colWidths=colWidths)
                table.setStyle([
                        ('FONT', (0, 0), (-1, -1), self.font_face),
                        ('FONTSIZE', (0, 0), (-1, -1), 8),
                        ('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)),
                        ('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)),
                        ('GRID', (-1, -1), (-1, -1), 1, (0.7, 0.7, 0.7)),
                        ('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8)),
                        ]+[('ALIGN',(k,0),(k,-1),'RIGHT') for k in righta])
                tw, th, = table.wrapOn(canvas, 15 * cm, 19 * cm)
                table.drawOn(canvas, 1 * cm, -8 * cm - th)

            if page == pages-1:
                items = invoice['totals'][1:]
                if items:
                    data = [invoice['totals'][0]]
                    for item in items:
                        data.append([
                                self.format_currency(x)
                                if isinstance(x,float) else x
                                for x in item])
                    righta = [k for k,v in enumerate(items[0])
                              if isinstance(v,(int,float,Decimal))]
                    total = self.format_currency(invoice['total'])
                    data.append(['']*(len(items[0])-1)+[total])
                    colWidths = [2.5*cm]*len(items[0])
                    colWidths[1] = (21.5-2.5*len(items[0]))*cm
                    table = Table(data, colWidths=colWidths)
                    table.setStyle([
                            ('FONT', (0, 0), (-1, -1), self.font_face),
                            ('FONTSIZE', (0, 0), (-1, -1), 8),
                            ('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)),
                            ('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)),
                            ('GRID', (-1, -1), (-1, -1), 1, (0.7, 0.7, 0.7)),
                            ('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8)),
                            ]+[('ALIGN',(k,0),(k,-1),'RIGHT') for k in righta])
                    tw, th, = table.wrapOn(canvas, 15 * cm, 19 * cm)
                    table.drawOn(canvas, 1 * cm, -18 * cm - th)
            canvas.showPage()
            canvas.save()
        return buffer.getvalue()

Example 76

Project: changes
Source File: build_index.py
View license
    def post(self):
        """
        Create a new commit or diff build. The API roughly goes like this:

        1. Identify the project(s) to build for. This can be done by specifying
        ``project``, ``repository``, or ``repository[callsign]``. If a repository is
        specified somehow, then all projects for that repository are considered
        for building.

        2. Using the ``sha``, find the appropriate revision object. This may
        involve updating the repo.

        3. If ``patch`` is given, then apply the patch and mark this as a diff build.
        Otherwise, this is a commit build.

        4. If ``snapshot_id`` is given, verify that the snapshot can be used by all
        projects.

        5. If provided, apply project_whitelist, filtering out projects not in
        this whitelist.

        6. Based on the flag ``apply_project_files_trigger`` (see comment on the argument
        itself for default values), decide whether or not to filter out projects
        by file blacklist and whitelist.

        7. Attach metadata and create/ensure existence of a build for each project,
        depending on the flag ``ensure_only``.

        NOTE: In ensure-only mode, the collection_ids of the returned builds are
        not necessarily identical, as we give new builds new collection IDs
        and preserve the existing builds' collection IDs.

        NOTE: If ``patch`` is specified ``sha`` is assumed to be the original
        base revision to apply the patch.

        Not relevant until we fix TODO: ``sha`` is **not** guaranteed to be the rev
        used to apply the patch. See ``find_green_parent_sha`` for the logic of
        identifying the correct revision.
        """
        args = self.parser.parse_args()

        if args.patch_file and args.ensure_only:
            return error("Ensure-only mode does not work with a diff build yet.",
                         problems=["patch", "ensure_only"])

        if not (args.project or args.repository or args['repository[phabricator.callsign]']):
            return error("Project or repository must be specified",
                         problems=["project", "repository", "repository[phabricator.callsign]"])

        # read arguments
        if args.patch_data:
            try:
                patch_data = json.loads(args.patch_data)
            except Exception:
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])

            if not isinstance(patch_data, dict):
                return error("Invalid patch data (must be JSON dict)",
                             problems=["patch[data]"])
        else:
            patch_data = None

        # 1. identify project(s)
        projects, repository = try_get_projects_and_repository(args)

        if not projects:
            return error("Unable to find project(s).")

        # read arguments
        label = args.label
        author = args.author
        message = args.message
        tag = args.tag
        snapshot_id = args.snapshot_id
        no_snapshot = args.no_snapshot

        cause = Cause[args.cause]

        if no_snapshot and snapshot_id:
            return error("Cannot specify snapshot with no_snapshot option")

        if not tag and args.patch_file:
            tag = 'patch'

        # 2. validate snapshot
        if snapshot_id:
            snapshot = Snapshot.query.get(snapshot_id)
            if not snapshot:
                return error("Unable to find snapshot.")
            if snapshot.status != SnapshotStatus.active:
                return error("Snapshot is in an invalid state: %s" % snapshot.status)
            for project in projects:
                plans = get_build_plans(project)
                for plan in plans:
                    plan_options = plan.get_item_options()
                    allow_snapshot = '1' == plan_options.get('snapshot.allow', '1') or plan.snapshot_plan
                    if allow_snapshot and not SnapshotImage.get(plan, snapshot_id):
                        # We want to create a build using a specific snapshot but no image
                        # was found for this plan so fail.
                        return error("Snapshot cannot be applied to %s's %s" % (project.slug, plan.label))

        # 3. find revision
        try:
            revision = identify_revision(repository, args.sha)
        except MissingRevision:
            # if the default fails, we absolutely can't continue and the
            # client should send a valid revision
            return error("Unable to find commit %s in %s." % (args.sha, repository.url),
                         problems=['sha', 'repository'])

        # get default values for arguments
        if revision:
            if not author:
                author = revision.author
            if not label:
                label = revision.subject
            # only default the message if its absolutely not set
            if message is None:
                message = revision.message
            sha = revision.sha
        else:
            sha = args.sha

        if not args.target:
            target = sha[:12]
        else:
            target = args.target[:128]

        if not label:
            if message:
                label = message.splitlines()[0]
            if not label:
                label = 'A homeless build'
        label = label[:128]

        # 4. Check for patch
        if args.patch_file:
            fp = StringIO()
            for line in args.patch_file:
                fp.write(line)
            patch_file = fp
        else:
            patch_file = None

        if patch_file:
            patch = Patch(
                repository=repository,
                parent_revision_sha=sha,
                diff=patch_file.getvalue(),
            )
            db.session.add(patch)
        else:
            patch = None

        project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist'])

        # mark as commit or diff build
        if not patch:
            is_commit_build = True
        else:
            is_commit_build = False

        apply_project_files_trigger = args.apply_project_files_trigger
        if apply_project_files_trigger is None:
            apply_project_files_trigger = args.apply_file_whitelist
        if apply_project_files_trigger is None:
            if is_commit_build:
                apply_project_files_trigger = False
            else:
                apply_project_files_trigger = True

        if apply_project_files_trigger:
            if patch:
                diff_parser = DiffParser(patch.diff)
                files_changed = diff_parser.get_changed_files()
            elif revision:
                try:
                    files_changed = _get_revision_changed_files(repository, revision)
                except MissingRevision:
                    return error("Unable to find commit %s in %s." % (args.sha, repository.url),
                                 problems=['sha', 'repository'])
            else:
                # the only way that revision can be null is if this repo does not have a vcs backend
                logging.warning('Revision and patch are both None for sha %s. This is because the repo %s does not have a VCS backend.', sha, repository.url)
                files_changed = None
        else:
            # we won't be applying file whitelist, so there is no need to get the list of changed files.
            files_changed = None

        collection_id = uuid.uuid4()

        builds = []
        for project in projects:
            plan_list = get_build_plans(project)
            if not plan_list:
                logging.warning('No plans defined for project %s', project.slug)
                continue
            # 5. apply project whitelist as appropriate
            if args.project_whitelist is not None and project.slug not in args.project_whitelist:
                logging.info('Project %s is not in the supplied whitelist', project.slug)
                continue
            forced_sha = sha
            # TODO(dcramer): find_green_parent_sha needs to take branch
            # into account
            # if patch_file:
            #     forced_sha = find_green_parent_sha(
            #         project=project,
            #         sha=sha,
            #     )

            # 6. apply file whitelist as appropriate
            diff = None
            if patch is not None:
                diff = patch.diff
            if (
                apply_project_files_trigger and
                files_changed is not None and
                not files_changed_should_trigger_project(
                    files_changed, project, project_options[project.id], sha, diff)
            ):
                logging.info('Changed files do not trigger build for project %s', project.slug)
                continue
            # 7. create/ensure build
            build_message = None
            selective_testing_policy = SelectiveTestingPolicy.disabled
            if args.selective_testing and project_lib.contains_active_autogenerated_plan(project):
                if is_commit_build:
                    selective_testing_policy, reasons = get_selective_testing_policy(project, sha, diff)
                    if reasons:
                        if selective_testing_policy is SelectiveTestingPolicy.disabled:
                            reasons = ["Selective testing was requested but not done because:"] + ['    ' + m for m in reasons]
                        build_message = '\n'.join(reasons)
                else:
                    # NOTE: for diff builds, it makes sense to just do selective testing,
                    # since it will never become a parent build and will never be used to
                    # calculate revision results.
                    selective_testing_policy = SelectiveTestingPolicy.enabled
            if args.ensure_only:
                potentials = list(Build.query.filter(
                    Build.project_id == project.id,
                    Build.source.has(revision_sha=sha, patch=patch),
                ).order_by(
                    Build.date_created.desc()  # newest first
                ).limit(1))
                if len(potentials) == 0:
                    builds.append(create_build(
                        project=project,
                        collection_id=collection_id,
                        sha=forced_sha,
                        target=target,
                        label=label,
                        message=message,
                        author=author,
                        patch=patch,
                        source_data=patch_data,
                        tag=tag,
                        cause=cause,
                        snapshot_id=snapshot_id,
                        no_snapshot=no_snapshot,
                        selective_testing_policy=selective_testing_policy,
                    ))
                else:
                    builds.append(potentials[0])
            else:
                builds.append(create_build(
                    project=project,
                    collection_id=collection_id,
                    sha=forced_sha,
                    target=target,
                    label=label,
                    message=message,
                    author=author,
                    patch=patch,
                    source_data=patch_data,
                    tag=tag,
                    cause=cause,
                    snapshot_id=snapshot_id,
                    no_snapshot=no_snapshot,
                    selective_testing_policy=selective_testing_policy,
                ))

            if build_message:
                message = BuildMessage(
                    build=builds[-1],
                    text=build_message,
                )
                db.session.add(message)
                db.session.commit()

        return self.respond(builds)

Example 77

View license
    def upload_file(self, command, pyversion, filename):
        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data
        f = open(filename,'rb')
        content = f.read()
        f.close()
        basename = os.path.basename(filename)
        comment = ''
        if command=='bdist_egg' and self.distribution.has_ext_modules():
            comment = "built on %s" % platform.platform(terse=1)
        data = {
            ':action':'file_upload',
            'protocol_version':'1',
            'name':self.distribution.get_name(),
            'version':self.distribution.get_version(),
            'content':(basename,content),
            'filetype':command,
            'pyversion':pyversion,
            'md5_digest':md5(content).hexdigest(),
            }
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            asc_file = open(filename + ".asc")
            data['gpg_signature'] = (os.path.basename(filename) + ".asc", asc_file.read())
            asc_file.close()

        # set up the authentication
        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if type(value) != type([]):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""
                value = str(value)
                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        # We can't use urllib2 since we need to send the Basic
        # auth right with the first request
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        assert not params and not query and not fragments
        if schema == 'http':
            http = httplib.HTTPConnection(netloc)
        elif schema == 'https':
            http = httplib.HTTPSConnection(netloc)
        else:
            raise AssertionError, "unsupported schema "+schema

        data = ''
        loglevel = log.INFO
        try:
            http.connect()
            http.putrequest("POST", url)
            http.putheader('Content-type',
                           'multipart/form-data; boundary=%s'%boundary)
            http.putheader('Content-length', str(len(body)))
            http.putheader('Authorization', auth)
            http.endheaders()
            http.send(body)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return

        r = http.getresponse()
        if r.status == 200:
            self.announce('Server response (%s): %s' % (r.status, r.reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                          log.ERROR)
        if self.show_response:
            print '-'*75, r.read(), '-'*75

Example 78

Project: edx-platform
Source File: xml.py
View license
    def load_course(self, course_dir, course_ids, tracker, target_course_id=None):
        """
        Load a course into this module store
        course_path: Course directory name

        returns a CourseDescriptor for the course
        """
        log.debug('========> Starting courselike import from %s', course_dir)
        with open(self.data_dir / course_dir / self.parent_xml) as course_file:

            # VS[compat]
            # TODO (cpennington): Remove this once all fall 2012 courses have
            # been imported into the cms from xml
            course_file = StringIO(clean_out_mako_templating(course_file.read()))

            course_data = etree.parse(course_file, parser=edx_xml_parser).getroot()

            org = course_data.get('org')

            if org is None:
                msg = ("No 'org' attribute set for courselike in {dir}. "
                       "Using default 'edx'".format(dir=course_dir))
                log.warning(msg)
                tracker(msg)
                org = 'edx'

            # Parent XML should be something like 'library.xml' or 'course.xml'
            courselike_label = self.parent_xml.split('.')[0]

            course = course_data.get(courselike_label)

            if course is None:
                msg = (
                    "No '{courselike_label}' attribute set for course in {dir}."
                    " Using default '{default}'".format(
                        courselike_label=courselike_label,
                        dir=course_dir,
                        default=course_dir
                    )
                )
                log.warning(msg)
                tracker(msg)
                course = course_dir

            url_name = course_data.get('url_name', course_data.get('slug'))

            if url_name:
                policy_dir = self.data_dir / course_dir / 'policies' / url_name
                policy_path = policy_dir / 'policy.json'

                policy = self.load_policy(policy_path, tracker)

                # VS[compat]: remove once courses use the policy dirs.
                if policy == {}:

                    dog_stats_api.increment(
                        DEPRECATION_VSCOMPAT_EVENT,
                        tags=(
                            "location:xml_load_course_policy_dir",
                            u"course:{}".format(course),
                        )
                    )

                    old_policy_path = self.data_dir / course_dir / 'policies' / '{0}.json'.format(url_name)
                    policy = self.load_policy(old_policy_path, tracker)
            else:
                policy = {}
                # VS[compat] : 'name' is deprecated, but support it for now...
                if course_data.get('name'):

                    dog_stats_api.increment(
                        DEPRECATION_VSCOMPAT_EVENT,
                        tags=(
                            "location:xml_load_course_course_data_name",
                            u"course:{}".format(course_data.get('course')),
                            u"org:{}".format(course_data.get('org')),
                            u"name:{}".format(course_data.get('name')),
                        )
                    )

                    url_name = Location.clean(course_data.get('name'))
                    tracker("'name' is deprecated for module xml.  Please use "
                            "display_name and url_name.")
                else:
                    url_name = None

            course_id = self.get_id(org, course, url_name)

            if course_ids is not None and course_id not in course_ids:
                return None

            def get_policy(usage_id):
                """
                Return the policy dictionary to be applied to the specified XBlock usage
                """
                return policy.get(policy_key(usage_id), {})

            services = {}
            if self.i18n_service:
                services['i18n'] = self.i18n_service

            if self.fs_service:
                services['fs'] = self.fs_service

            if self.user_service:
                services['user'] = self.user_service

            system = ImportSystem(
                xmlstore=self,
                course_id=course_id,
                course_dir=course_dir,
                error_tracker=tracker,
                load_error_modules=self.load_error_modules,
                get_policy=get_policy,
                mixins=self.xblock_mixins,
                default_class=self.default_class,
                select=self.xblock_select,
                field_data=self.field_data,
                services=services,
                target_course_id=target_course_id,
            )
            course_descriptor = system.process_xml(etree.tostring(course_data, encoding='unicode'))
            # If we fail to load the course, then skip the rest of the loading steps
            if isinstance(course_descriptor, ErrorDescriptor):
                return course_descriptor

            self.content_importers(system, course_descriptor, course_dir, url_name)

            log.debug('========> Done with courselike import from %s', course_dir)
            return course_descriptor

Example 79

View license
    def draw(self, invoice, items_page=10):
        """ Draws the invoice """
        buffer = cStringIO.StringIO()
        invoice_items = invoice['items']
        pages = max((len(invoice_items)-2)/items_page+1,1)
        canvas = Canvas(buffer, pagesize=self.page_size)
        for page in range(pages):
            canvas.translate(0, 29.7 * cm)
            canvas.setFont(self.font_face, 10)

            canvas.saveState()
            canvas.setStrokeColorRGB(0.9, 0.5, 0.2)
            canvas.setFillColorRGB(0.2, 0.2, 0.2)
            canvas.setFont(self.font_face, 16)
            canvas.drawString(1 * cm, -1 * cm, invoice.get('title',''))
            if self.logo:
                canvas.drawInlineImage(self.logo, 1 * cm, -1 * cm, 250, 16)
            canvas.setLineWidth(4)
            canvas.line(0, -1.25 * cm, 21.7 * cm, -1.25 * cm)
            canvas.restoreState()

            canvas.saveState()
            notes = listify(invoice.get('notes',''))
            textobject = canvas.beginText(1 * cm, -25 * cm)
            for line in notes:
                textobject.textLine(line)
            canvas.drawText(textobject)
            textobject = canvas.beginText(18 * cm, -28 * cm)
            textobject.textLine('Pag.%s/%s' % (page+1,pages))
            canvas.drawText(textobject)
            canvas.restoreState()

            canvas.saveState()
            business_details = listify(invoice.get('from','FROM:'))
            canvas.setFont(self.font_face, 9)
            textobject = canvas.beginText(13 * cm, -2.5 * cm)
            for line in business_details:
                textobject.textLine(line)
            canvas.drawText(textobject)
            canvas.restoreState()

            canvas.saveState()
            client_info = listify(invoice.get('to','TO:'))
            textobject = canvas.beginText(1.5 * cm, -2.5 * cm)
            for line in client_info:
                textobject.textLine(line)
            canvas.drawText(textobject)
            canvas.restoreState()

            textobject = canvas.beginText(1.5 * cm, -6.75 * cm)
            textobject.textLine(u'Invoice ID: %s' % invoice.get('id','<invoice id>'))
            textobject.textLine(u'Invoice Date: %s' % invoice.get('date',datetime.date.today()))
            textobject.textLine(u'Client: %s' % invoice.get('client_name','<invoice client>'))
            canvas.drawText(textobject)

            items = invoice_items[1:][page*items_page:(page+1)*items_page]
            if items:
                data = [invoice_items[0]]
                for item in items:
                    data.append([
                            self.format_currency(x)
                            if isinstance(x,float) else x
                            for x in item])
                righta = [k for k,v in enumerate(items[0])
                          if isinstance(v,(int,float,Decimal))]
                if page == pages-1:
                    total = self.format_currency(invoice['total'])
                else:
                    total = ''
                data.append(['']*(len(items[0])-1)+[total])
                colWidths = [2.5*cm]*len(items[0])
                colWidths[1] = (21.5-2.5*len(items[0]))*cm
                table = Table(data, colWidths=colWidths)
                table.setStyle([
                        ('FONT', (0, 0), (-1, -1), self.font_face),
                        ('FONTSIZE', (0, 0), (-1, -1), 8),
                        ('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)),
                        ('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)),
                        ('GRID', (-1, -1), (-1, -1), 1, (0.7, 0.7, 0.7)),
                        ('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8)),
                        ]+[('ALIGN',(k,0),(k,-1),'RIGHT') for k in righta])
                tw, th, = table.wrapOn(canvas, 15 * cm, 19 * cm)
                table.drawOn(canvas, 1 * cm, -8 * cm - th)

            if page == pages-1:
                items = invoice['totals'][1:]
                if items:
                    data = [invoice['totals'][0]]
                    for item in items:
                        data.append([
                                self.format_currency(x)
                                if isinstance(x,float) else x
                                for x in item])
                    righta = [k for k,v in enumerate(items[0])
                              if isinstance(v,(int,float,Decimal))]
                    total = self.format_currency(invoice['total'])
                    data.append(['']*(len(items[0])-1)+[total])
                    colWidths = [2.5*cm]*len(items[0])
                    colWidths[1] = (21.5-2.5*len(items[0]))*cm
                    table = Table(data, colWidths=colWidths)
                    table.setStyle([
                            ('FONT', (0, 0), (-1, -1), self.font_face),
                            ('FONTSIZE', (0, 0), (-1, -1), 8),
                            ('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)),
                            ('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)),
                            ('GRID', (-1, -1), (-1, -1), 1, (0.7, 0.7, 0.7)),
                            ('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8)),
                            ]+[('ALIGN',(k,0),(k,-1),'RIGHT') for k in righta])
                    tw, th, = table.wrapOn(canvas, 15 * cm, 19 * cm)
                    table.drawOn(canvas, 1 * cm, -18 * cm - th)
            canvas.showPage()
            canvas.save()
        return buffer.getvalue()

Example 80

Project: ijd8
Source File: io_test.py
View license
	def test(self):
		def test_put():
			key = "test_%s" % r(9)
			params = "op=3"
			data = "hello bubby!"
			extra.check_crc = 2
			extra.crc32 = binascii.crc32(data) & 0xFFFFFFFF
			ret, err = io.put(policy.token(), key, data, extra)
			assert err is None
			assert ret['key'] == key

		def test_put_same_crc():
			key = "test_%s" % r(9)
			data = "hello bubby!"
			extra.check_crc = 2
			ret, err = io.put(policy.token(), key, data, extra)
			assert err is None
			assert ret['key'] == key

		def test_put_no_key():
			data = r(100)
			extra.check_crc = 0
			ret, err = io.put(policy.token(), key=None, data=data, extra=extra)
			assert err is None
			assert ret['hash'] == ret['key']

		def test_put_quote_key():
			data = r(100)
			key = 'a\\b\\c"你好' + r(9)
			ret, err = io.put(policy.token(), key, data)
			print err
			assert err is None
			assert ret['key'].encode('utf8') == key

			data = r(100)
			key = u'a\\b\\c"你好' + r(9)
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret['key'] == key

		def test_put_unicode1():
			key = "test_%s" % r(9) + '你好'
			data = key
			ret, err = io.put(policy.token(), key, data, extra)
			assert err is None
			assert ret[u'key'].endswith(u'你好')

		def test_put_unicode2():
			key = "test_%s" % r(9) + '你好'
			data = key
			data = data.decode('utf8')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret[u'key'].endswith(u'你好')

		def test_put_unicode3():
			key = "test_%s" % r(9) + '你好'
			data = key
			key = key.decode('utf8')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret[u'key'].endswith(u'你好')

		def test_put_unicode4():
			key = "test_%s" % r(9) + '你好'
			data = key
			key = key.decode('utf8')
			data = data.decode('utf8')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret[u'key'].endswith(u'你好')

		def test_put_StringIO():
			key = "test_%s" % r(9)
			data = cStringIO.StringIO('hello buddy!')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret['key'] == key

		def test_put_urlopen():
			key = "test_%s" % r(9)
			data = urllib.urlopen('http://cheneya.qiniudn.com/hello_jpg')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret['key'] == key

		def test_put_no_length():
			class test_reader(object):
				def __init__(self):
					self.data = 'abc'
					self.pos = 0
				def read(self, n=None):
					if n is None or n < 0:
						newpos = len(self.data)
					else:
						newpos = min(self.pos+n, len(self.data))
					r = self.data[self.pos: newpos]
					self.pos = newpos
					return r
			key = "test_%s" % r(9)
			data = test_reader()

			extra.check_crc = 2
			extra.crc32 = binascii.crc32('abc') & 0xFFFFFFFF
			ret, err = io.put(policy.token(), key, data, extra)
			assert err is None
			assert ret['key'] == key

		test_put()
		test_put_same_crc()
		test_put_no_key()
		test_put_quote_key()
		test_put_unicode1()
		test_put_unicode2()
		test_put_unicode3()
		test_put_unicode4()
		test_put_StringIO()
		test_put_urlopen()
		test_put_no_length()

Example 81

Project: py-zfec
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data
        content = open(filename,'rb').read()
        basename = os.path.basename(filename)
        comment = ''
        if command=='bdist_egg' and self.distribution.has_ext_modules():
            comment = "built on %s" % platform.platform(terse=1)
        data = {
            ':action':'file_upload',
            'protcol_version':'1',
            'name':self.distribution.get_name(),
            'version':self.distribution.get_version(),
            'content':(basename,content),
            'filetype':command,
            'pyversion':pyversion,
            'md5_digest':md5(content).hexdigest(),
            }
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if type(value) != type([]):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""
                value = str(value)
                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        # We can't use urllib2 since we need to send the Basic
        # auth right with the first request
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        assert not params and not query and not fragments
        if schema == 'http':
            http = httplib.HTTPConnection(netloc)
        elif schema == 'https':
            http = httplib.HTTPSConnection(netloc)
        else:
            raise AssertionError, "unsupported schema "+schema

        data = ''
        loglevel = log.INFO
        try:
            http.connect()
            http.putrequest("POST", url)
            http.putheader('Content-type',
                           'multipart/form-data; boundary=%s'%boundary)
            http.putheader('Content-length', str(len(body)))
            http.putheader('Authorization', auth)
            http.endheaders()
            http.send(body)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return

        r = http.getresponse()
        if r.status == 200:
            self.announce('Server response (%s): %s' % (r.status, r.reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                          log.ERROR)
        if self.show_response:
            print '-'*75, r.read(), '-'*75

Example 82

Project: saepy-log
Source File: io_test.py
View license
	def test(self):
		def test_put():
			key = "test_%s" % r(9)
			params = "op=3"
			data = "hello bubby!"
			extra.check_crc = 2
			extra.crc32 = binascii.crc32(data) & 0xFFFFFFFF
			ret, err = io.put(policy.token(), key, data, extra)
			assert err is None
			assert ret['key'] == key

		def test_put_same_crc():
			key = "test_%s" % r(9)
			data = "hello bubby!"
			extra.check_crc = 2
			ret, err = io.put(policy.token(), key, data, extra)
			assert err is None
			assert ret['key'] == key

		def test_put_no_key():
			data = r(100)
			extra.check_crc = 0
			ret, err = io.put(policy.token(), key=None, data=data, extra=extra)
			assert err is None
			assert ret['hash'] == ret['key']

		def test_put_quote_key():
			data = r(100)
			key = 'a\\b\\c"你好' + r(9)
			ret, err = io.put(policy.token(), key, data)
			print err
			assert err is None
			assert ret['key'].encode('utf8') == key

			data = r(100)
			key = u'a\\b\\c"你好' + r(9)
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret['key'] == key

		def test_put_unicode1():
			key = "test_%s" % r(9) + '你好'
			data = key
			ret, err = io.put(policy.token(), key, data, extra)
			assert err is None
			assert ret[u'key'].endswith(u'你好')

		def test_put_unicode2():
			key = "test_%s" % r(9) + '你好'
			data = key
			data = data.decode('utf8')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret[u'key'].endswith(u'你好')

		def test_put_unicode3():
			key = "test_%s" % r(9) + '你好'
			data = key
			key = key.decode('utf8')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret[u'key'].endswith(u'你好')

		def test_put_unicode4():
			key = "test_%s" % r(9) + '你好'
			data = key
			key = key.decode('utf8')
			data = data.decode('utf8')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret[u'key'].endswith(u'你好')

		def test_put_StringIO():
			key = "test_%s" % r(9)
			data = cStringIO.StringIO('hello buddy!')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret['key'] == key

		def test_put_urlopen():
			key = "test_%s" % r(9)
			data = urllib.urlopen('http://cheneya.qiniudn.com/hello_jpg')
			ret, err = io.put(policy.token(), key, data)
			assert err is None
			assert ret['key'] == key

		def test_put_no_length():
			class test_reader(object):
				def __init__(self):
					self.data = 'abc'
					self.pos = 0
				def read(self, n=None):
					if n is None or n < 0:
						newpos = len(self.data)
					else:
						newpos = min(self.pos+n, len(self.data))
					r = self.data[self.pos: newpos]
					self.pos = newpos
					return r
			key = "test_%s" % r(9)
			data = test_reader()

			extra.check_crc = 2
			extra.crc32 = binascii.crc32('abc') & 0xFFFFFFFF
			ret, err = io.put(policy.token(), key, data, extra)
			assert err is None
			assert ret['key'] == key

		test_put()
		test_put_same_crc()
		test_put_no_key()
		test_put_quote_key()
		test_put_unicode1()
		test_put_unicode2()
		test_put_unicode3()
		test_put_unicode4()
		test_put_StringIO()
		test_put_urlopen()
		test_put_no_length()

Example 83

Project: Veil-Evasion
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (status, reason),
                          log.ERROR)

Example 84

Project: p2ptv-pi
Source File: WebUI.py
View license
    def doget(self, urlpath):
        if not urlpath.startswith(URLPATH_WEBIF_PREFIX):
            return streaminfo404()
        else:
            self.lastreqtime = time.time()
            try:
                fakeurl = 'http://127.0.0.1' + urlpath[len(URLPATH_WEBIF_PREFIX):]
                if DEBUG:
                    log('webui::doget: fakeurl', fakeurl)
                request_url = urlparse.urlparse(fakeurl)
            except:
                print_exc()
                return

            path = request_url[2]
            query_string = request_url[4]
            query_params = urlparse.parse_qs(query_string)
            if DEBUG:
                log('webui::doget: urlpath', urlpath, 'request_url', request_url, 'path', path, 'query_params', query_params)
            if len(path) == 0:
                if DEBUG:
                    log('webui::doget: show status page')
                page = self.statusPage()
                pageStream = StringIO(page)
                return {'statuscode': 200,
                 'mimetype': 'text/html',
                 'stream': pageStream,
                 'length': len(page)}
            if path == 'permid.js':
                try:
                    permid = encodestring(self.bgApp.s.get_permid()).replace('\n', '')
                    txt = "var permid = '%s';" % permid
                    dataStream = StringIO(txt)
                except:
                    print_exc()
                    return {'statuscode': 500,
                     'statusmsg': 'Bad permid'}

                return {'statuscode': 200,
                 'mimetype': 'text/javascript',
                 'stream': dataStream,
                 'length': len(txt)}
            if path == '/createstream':
                if DEBUG:
                    log('webui::doget: show create stream page')
                page = self.createStreamPage()
                pageStream = StringIO(page)
                return {'statuscode': 200,
                 'mimetype': 'text/html',
                 'stream': pageStream,
                 'length': len(page)}
            if path == '/dispatch':
                if 'url' not in query_params:
                    if DEBUG:
                        log('webui::doget:dispatch: missing url')
                    return streaminfo404()
                url = query_params['url'][0]
                redirect_url = 'http://127.0.0.1:6878/webui/' + url
                params = []
                for name, val in query_params.iteritems():
                    if name != 'url':
                        params.append(urllib.quote_plus(name) + '=' + urllib.quote_plus(val[0]))

                if len(params):
                    redirect_url += '?' + '&'.join(params)
                if DEBUG:
                    log('webui::doget:dispatch: redirect_url', redirect_url)
                page = '<!DOCTYPE html><html><head><script type="text/javascript">'
                page += 'parent.location.href = "' + redirect_url + '";'
                page += '</script></head><body></body></html>'
                pageStream = StringIO(page)
                return {'statuscode': 200,
                 'mimetype': 'text/html',
                 'stream': pageStream,
                 'length': len(page)}
            if path.startswith('/player/') and query_params.has_key('a') and query_params['a'][0] == 'check':
                player_id = path.split('/')[2]
                redirect_url = 'http://127.0.0.1:6878/webui/player/' + player_id
                params = []
                for name, val in query_params.iteritems():
                    if name != 'a':
                        params.append(urllib.quote_plus(name) + '=' + urllib.quote_plus(val[0]))

                if len(params):
                    redirect_url += '?' + '&'.join(params)
                if DEBUG:
                    log('webui::doget:dispatch: redirect_url', redirect_url)
                page = '<!DOCTYPE html><html><head><script type="text/javascript">'
                page += 'parent.location.href = "' + redirect_url + '";'
                page += '</script></head><body></body></html>'
                pageStream = StringIO(page)
                return {'statuscode': 200,
                 'mimetype': 'text/html',
                 'stream': pageStream,
                 'length': len(page)}
            if path.startswith('/player/'):
                player_id = path.split('/')[2]
                if DEBUG:
                    log('webui::doget: show player page: id', player_id)
                params = {}
                for name, val in query_params.iteritems():
                    params[name] = val[0]

                page = self.playerPage(player_id, params)
                pageStream = StringIO(page)
                return {'statuscode': 200,
                 'mimetype': 'text/html',
                 'stream': pageStream,
                 'length': len(page)}
            static_path = None
            json_query = None
            if path.startswith('/json/'):
                json_query = request_url[4]
            else:
                static_path = os.path.join(self.webUIPath, path[1:])
            if DEBUG:
                log('webui::doget: request parsed: static_path', static_path, 'json_query', json_query)
            if static_path is not None:
                if not os.path.isfile(static_path):
                    if DEBUG:
                        log('webui::doget: file not found:', static_path)
                    return streaminfo404()
                extension = os.path.splitext(static_path)[1]
                if extension in self.binaryExtensions:
                    mode = 'rb'
                else:
                    mode = 'r'
                fp = open(static_path, mode)
                data = fp.read()
                fp.close()
                dataStream = StringIO(data)
                return {'statuscode': 200,
                 'mimetype': self.getContentType(extension),
                 'stream': dataStream,
                 'length': len(data)}
            if json_query is not None:
                params = {}
                for s in json_query.split('&'):
                    name, value = s.split('=')
                    params[name] = value

                if DEBUG:
                    log('webui:doget: got json request:', json_query, 'params', params)
                if 'q' not in params:
                    return
                try:
                    req = urllib.unquote(params['q'])
                    if DEBUG:
                        log('webui::doget: parse json: req', req)
                    jreq = json.loads(req)
                    if DEBUG:
                        log('webui::doget: parse json done: jreq', jreq)
                except:
                    print_exc()
                    return

                try:
                    method = jreq['method']
                except:
                    return {'statuscode': 504,
                     'statusmsg': 'Json request in wrong format! At least a method has to be specified!'}

                try:
                    args = jreq['arguments']
                    if DEBUG:
                        print >> sys.stderr, 'webUI: Got JSON request: ', jreq, '; method: ', method, '; arguments: ', args
                except:
                    args = None
                    if DEBUG:
                        print >> sys.stderr, 'webUI: Got JSON request: ', jreq, '; method: ', method

                if args is None:
                    data = self.process_json_request(method)
                    if DEBUG:
                        print >> sys.stderr, 'WebUI: response to JSON ', method, ' request: ', data
                else:
                    data = self.process_json_request(method, args)
                    if DEBUG:
                        print >> sys.stderr, 'WebUI: response to JSON ', method, ' request: ', data, ' arguments: ', args
                if data == 'Args missing':
                    return {'statuscode': 504,
                     'statusmsg': 'Json request in wrong format! Arguments have to be specified!'}
                dataStream = StringIO(data)
                return {'statuscode': 200,
                 'mimetype': 'application/json',
                 'stream': dataStream,
                 'length': len(data)}
            if DEBUG:
                log('webui::doget: unknow request format: request_url', request_url)
            return streaminfo404()

Example 85

Project: autonetkit
Source File: graphml.py
View license
def load_graphml(input_data, defaults = True):


    # TODO: allow default properties to be passed in as dicts

    try:
        graph = nx.read_graphml(input_data)
    except IOError, e:
        acceptable_errors = set([2, 36, 63])  # no such file or directory
                                              # input string too long for filename
                                              # input string too long for filename
        if e.errno in acceptable_errors:
            from xml.etree.cElementTree import ParseError

            # try as data string rather than filename string

            try:
                input_pseduo_fh = StringIO(input_data)  # load into filehandle to networkx
                graph = nx.read_graphml(input_pseduo_fh)
            except IOError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except IndexError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except ParseError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except ParseError:
                raise autonetkit.exception.AnkIncorrectFileFormat
        else:
            raise e

    if graph.is_multigraph():
        log.info('Input graph is multigraph. Converting to single-edge graph'
                 )
        if graph.is_directed():
            graph = nx.DiGraph(graph)
        else:
            graph = nx.Graph(graph)

    # TODO: need to support edge index keying for multi graphs
    graph.remove_edges_from(edge for edge in graph.selfloop_edges())

# TODO: if selfloops then log that are removing

    letters_single = (c for c in string.lowercase)  # a, b, c, ... z
    letters_double = ('%s%s' % (a, b) for (a, b) in
        itertools.product(string.lowercase, string.lowercase))  # aa, ab, ... zz
    letters = itertools.chain(letters_single, letters_double)  # a, b, c, .. z, aa, ab, ac, ... zz

# TODO: need to get set of current labels, and only return if not in this set

    # TODO: add cloud, host, etc
    # prefixes for unlabelled devices, ie router -> r_a

    label_prefixes = {'router': 'r', 'switch': 'sw', 'server': 'se'}

    current_labels = set(graph.node[node].get('label') for node in
                         graph.nodes_iter())
    unique_label = (letter for letter in letters if letter
        not in current_labels)

# TODO: make sure device label set

    ank_graph_defaults = settings['Graphml']['Graph Defaults']
    for (key, val) in ank_graph_defaults.items():
        if key not in graph.graph:
            graph.graph[key] = val

    # handle yEd exported booleans: if a boolean is set, then only the nodes marked true have the attribute. need to map the remainder to be false to allow ANK logic
    # for node in graph.nodes(data=True):
        # print node

    all_labels = dict((n, d.get('label')) for (n, d) in
                      graph.nodes(data=True))
    label_counts = defaultdict(list)
    for (node, label) in all_labels.items():
        label_counts[label].append(node)

    # set default name for blank labels to ensure unique

    try:
        blank_labels = [v for (k, v) in label_counts.items()
                        if not k].pop()  # strip outer list
    except IndexError:
        blank_labels = []  # no blank labels
    for (index, node) in enumerate(blank_labels):

        # TODO: log message that no label set, so setting default

        graph.node[node]['label'] = 'none___%s' % index

    duplicates = [(k, v) for (k, v) in label_counts.items() if k
                  and len(v) > 1]
    for (label, nodes) in duplicates:
        for node in nodes:

            # TODO: need to check they don't all have same ASN... if so then warn

            try:
                graph.node[node]['label'] = '%s_%s' \
                    % (graph.node[node]['label'], graph.node[node]['asn'
                       ])
            except KeyError:
                log.warning('Unable to set new label for duplicate node %s: %s'
                             % (node, graph.node[node].get('label')))

    boolean_attributes = set(k for (n, d) in graph.nodes(data=True)
                             for (k, v) in d.items() if isinstance(v,
                             bool))

    for node in graph:
        for attr in boolean_attributes:
            if attr not in graph.node[node]:
                graph.node[node][attr] = False

    boolean_attributes = set(k for (n1, d1) in graph.edge.items()
                             for (n2, d2) in d1.items() for (k, v) in
                             d2.items() if isinstance(v, bool))
    for (n1, d1) in graph.edge.items():
        for (n2, d2) in d1.items():
            for attr in boolean_attributes:
                if attr not in graph.edge[n1][n2]:
                    graph.edge[n1][n2][attr] = False

# TODO: store these in config file

    if defaults:
        ank_node_defaults = settings['Graphml']['Node Defaults']
        node_defaults = graph.graph['node_default']  # update with defaults from graphml
        for (key, val) in node_defaults.items():
            if val == 'False':
                node_defaults[key] = False

    # TODO: do a dict update before applying so only need to iterate nodes once

        for (key, val) in ank_node_defaults.items():
            if key not in node_defaults or node_defaults[key] == 'None':
                node_defaults[key] = val

        for node in graph:
            for (key, val) in node_defaults.items():
                if key not in graph.node[node]:
                    graph.node[node][key] = val

    # set address family

        graph.graph['address_family'] = 'v4'
        graph.graph['enable_routing'] = True

    # map lat/lon from zoo to crude x/y approximation

    if graph.graph.get('Creator') == 'Topology Zoo Toolset':
        all_lat = [graph.node[n].get('Latitude') for n in graph
                   if graph.node[n].get('Latitude')]
        all_lon = [graph.node[n].get('Longitude') for n in graph
                   if graph.node[n].get('Longitude')]

        lat_min = min(all_lat)
        lon_min = min(all_lon)
        lat_max = max(all_lat)
        lon_max = max(all_lon)
        lat_mean = (lat_max - lat_min) / 2
        lon_mean = (lon_max - lon_min) / 2
        lat_scale = 500 / (lat_max - lat_min)
        lon_scale = 500 / (lon_max - lon_min)
        for node in graph:
            lat = graph.node[node].get('Latitude') or lat_mean  # set default to be mean of min/max
            lon = graph.node[node].get('Longitude') or lon_mean  # set default to be mean of min/max
            graph.node[node]['y'] = -1 * lat * lat_scale
            graph.node[node]['x'] = lon * lon_scale

    if not (any(graph.node[n].get('x') for n in graph)
            and any(graph.node[n].get('y') for n in graph)):

# No x, y set, layout in a grid

        grid_length = int(math.ceil(math.sqrt(len(graph))))
        co_ords = [(x * 100, y * 100) for y in range(grid_length)
                   for x in range(grid_length)]

        # (0,0), (100, 0), (200, 0), (0, 100), (100, 100) ....

        for node in sorted(graph):
            (x, y) = co_ords.pop(0)
            graph.node[node]['x'] = x
            graph.node[node]['y'] = y

    # and ensure asn is integer, x and y are floats

    for node in sorted(graph):
        graph.node[node]['asn'] = int(graph.node[node]['asn'])
        if graph.node[node]['asn'] == 0:
            log.debug('Node %s has ASN set to 0. Setting to 1'
                      % graph.node[node]['label'])
            graph.node[node]['asn'] = 1
        try:
            x = float(graph.node[node]['x'])
        except KeyError:
            x = 0
        graph.node[node]['x'] = x
        try:
            y = float(graph.node[node]['y'])
        except KeyError:
            y = 0
        graph.node[node]['y'] = y
        try:
            graph.node[node]['label']
        except KeyError:
            device_type = graph.node[node]['device_type']
            graph.node[node]['label'] = '%s_%s' \
                % (label_prefixes[device_type], unique_label.next())

    if defaults:
        ank_edge_defaults = settings['Graphml']['Edge Defaults']
        edge_defaults = graph.graph['edge_default']
        for (key, val) in ank_edge_defaults.items():
            if key not in edge_defaults or edge_defaults[key] == 'None':
                edge_defaults[key] = val

        for (src, dst) in graph.edges():
            for (key, val) in edge_defaults.items():
                if key not in graph[src][dst]:
                    graph[src][dst][key] = val

# apply defaults
# relabel nodes
# other handling... split this into seperate module!
# relabel based on label: assume unique by now!
    # if graph.graph.get("Network") == "European NRENs":
        # TODO: test if non-unique labels, if so then warn and proceed with this logic
        # we need to map node ids to contain network to ensure unique labels
        # mapping = dict( (n, "%s__%s" % (d['label'], d['asn'])) for n, d in graph.nodes(data=True))


    mapping = dict((n, d['label']) for (n, d) in graph.nodes(data=True))  # TODO: use dict comprehension
    if not all(key == val for (key, val) in mapping.items()):
        nx.relabel_nodes(graph, mapping, copy=False)  # Networkx wipes data if remap with same labels

    graph.graph['file_type'] = 'graphml'

    selfloop_count = graph.number_of_selfloops()
    if selfloop_count > 0:
        log.warning("Self loops present: do multiple nodes have the same label?")
        selfloops = ", ".join(str(e) for e in graph.selfloop_edges())
        log.warning("Removing selfloops: %s" % selfloops)
        graph.remove_edges_from(edge for edge in graph.selfloop_edges())


    return graph

Example 86

Project: pol
Source File: keepass.py
View license
def load(f, password, keyfile=None):
    if keyfile:
        l.debug('Reading keyfile ...')
        keyfile_bit = keyfile.read()
        if len(keyfile_bit) == 32:
            pass
        elif len(keyfile_bit) == 64:
            keyfile_bit = binascii.unhexlify(keyfile_bit)
        else:
            keyfile_bit = hashlib.sha256(keyfile_bit).digest()
    else:
        keyfile_bit = None

    l.debug('Reading header ...')
    signature = f.read(8)
    if signature != SIGNATURE:
        raise KeePassFormatError('Invalid signature.  Is this a KeePass file?')
    flags, version = struct.unpack('<II', f.read(8))
    master_seed = f.read(16)
    encryption_iv = f.read(16)
    ngroups, nentries  = struct.unpack('<II', f.read(8))
    contents_hash = f.read(32)
    master_seed2 = f.read(32)
    key_enc_rounds = struct.unpack('<I', f.read(4))[0]

    if flags != FLAG_SHA2 | FLAG_RIJNDAEL:
        raise NotImplementedError

    l.debug('Deriving finalkey ...')
    if keyfile_bit:
        compositekey = hashlib.sha256(password).digest() + keyfile_bit
    else:
        compositekey = password
    finalkey = masterkey_to_finalkey(compositekey, master_seed, master_seed2,
                        key_enc_rounds)

    l.debug('Reading remaining ciphertext ...')
    ciphertext = f.read()

    l.debug('Decrypting ...')
    cipher = Crypto.Cipher.AES.new(finalkey, Crypto.Cipher.AES.MODE_CBC,
                                            encryption_iv)
    padded_plaintext = cipher.decrypt(ciphertext)
    plaintext = padded_plaintext[:-ord(padded_plaintext[-1])]

    l.debug('Verifying hash ...')
    if hashlib.sha256(plaintext).digest() != contents_hash:
        raise BadPasswordError

    l.debug('Parsing groups ...')
    groups_found = 0
    g = StringIO.StringIO(plaintext)
    groups = {}
    current_group = {}
    had = set()
    while groups_found < ngroups:
        field_type, field_size = struct.unpack('<HI', g.read(6))
        if field_type in had:
            raise KeePassFormatError("Same field type occurs twice")
        had.add(field_type)
        data = g.read(field_size)
        if field_type == 0:
            l.debug(' comment %s', field_type, repr(data))
        elif field_type == 1:
            if len(data) != 4:
                raise KeePassFormatError("Group ID data must be 4 bytes")
            value = struct.unpack('<I', data)[0]
            current_group['id'] = value
            l.debug(' id %s', value)
        elif field_type == 2:
            value = data[:-1].decode('utf-8')
            current_group['name'] = value
            l.debug(' name %s', value)
        elif field_type == 3:
            value = unpack_datetime(data)
            current_group['creation-time'] = value
            l.debug(' creation-time %s', value)
        elif field_type == 4:
            value = unpack_datetime(data)
            current_group['last-modification-time'] = value
            l.debug(' last-modification-time %s', value)
        elif field_type == 5:
            value = unpack_datetime(data)
            current_group['last-access-time'] = value
            l.debug(' last-access-time %s', value)
        elif field_type == 6:
            value = unpack_datetime(data)
            current_group['expiration-time'] = value
            l.debug(' expiration-time %s', value)
        elif field_type == 7:
            if len(data) != 4:
                raise KeePassFormatError("Image ID data must be 4 bytes")
            value = struct.unpack('<I', data)[0]
            current_group['image-id'] = value
            l.debug(' image-id %s', value)
        elif field_type == 8:
            if len(data) != 2:
                raise KeePassFormatError("Level data must be 2 bytes")
            value = struct.unpack('<H', data)[0]
            current_group['level'] = value
            l.debug(' level %s', value)
        elif field_type == 9:
            if len(data) != 4:
                raise KeePassFormatError("Flags data must be 2 bytes")
            value = struct.unpack('<I', data)[0]
            current_group['flags'] = value
            l.debug(' flags %s', bin(value))
        elif field_type == 0xffff:
            l.debug(' end-of-group')
            groups_found += 1
            groups[current_group['id']] = current_group
            had = set()
            current_group = {}
        else:
            l.warn(' unknown field %s %s', field_type, repr(data))

    l.debug('Parsing entries ...')
    entries_found = 0
    entries = []
    current_entry = {}
    had = set()
    while entries_found < nentries:
        field_type, field_size = struct.unpack('<HI', g.read(6))
        if field_type in had:
            raise KeePassFormatError("Same field type occurs twice")
        had.add(field_type)
        data = g.read(field_size)
        if field_type == 0:
            l.debug(' comment %s', field_type, repr(data))
        elif field_type == 1:
            if len(data) != 16:
                raise KeePassFormatError("UUID data must be 16 bytes")
            value = uuid.UUID(bytes=data)
            current_entry['uuid'] = value
            l.debug(' uuid %s', value)
        elif field_type == 2:
            if len(data) != 4:
                raise KeePassFormatError("Group ID data must be 16 bytes")
            value = struct.unpack("<I", data)[0]
            current_entry['group'] = value
            l.debug(' group %s', value)
        elif field_type == 3:
            if len(data) != 4:
                raise KeePassFormatError("Image ID data must be 16 bytes")
            value = struct.unpack("<I", data)[0]
            current_entry['image-id'] = value
            l.debug(' image-id %s', value)
        elif field_type == 4:
            value = data[:-1].decode('utf-8')
            current_entry['title'] = value
            l.debug(' title %s', value)
        elif field_type == 5:
            value = data[:-1].decode('utf-8')
            current_entry['url'] = value
            l.debug(' url %s', value)
        elif field_type == 6:
            value = data[:-1].decode('utf-8')
            current_entry['username'] = value
            l.debug(' username %s', value)
        elif field_type == 7:
            value = data[:-1].decode('utf-8')
            current_entry['password'] = value
            l.debug(' password %s', value)
        elif field_type == 8:
            value = data[:-1].decode('utf-8')
            current_entry['notes'] = value
            l.debug(' notes %s', value)
        elif field_type == 9:
            value = unpack_datetime(data)
            current_entry['creation-time'] = value
            l.debug(' creation-time %s', value)
        elif field_type == 10:
            value = unpack_datetime(data)
            current_entry['last-modification-time'] = value
            l.debug(' last-modification-time %s', value)
        elif field_type == 11:
            value = unpack_datetime(data)
            current_entry['last-access-time'] = value
            l.debug(' last-access-time %s', value)
        elif field_type == 12:
            value = unpack_datetime(data)
            current_entry['expiration-time'] = value
            l.debug(' expiration-time %s', value)
        elif field_type == 13:
            value = data[:-1].decode('utf-8')
            current_entry['binary-description'] = value
            l.debug(' binary-description %s', value)
        elif field_type == 14:
            value = data
            current_entry['binary-data'] = value
            l.debug(' binary-data %s', repr(value))
        elif field_type == 0xffff:
            l.debug(' end-of-entry')
            entries_found += 1
            entries.append(current_entry)
            had = set()
            current_entry = {}
        else:
            l.warn(' unknown field %s %s', field_type, repr(data))
    return (groups, entries)

Example 87

Project: viper
Source File: cuckoo.py
View license
    def run(self):
        super(Cuckoo, self).run()
        if self.args is None:
            return

        # Get the connections string from config

        if cfg.cuckoo.cuckoo_host:
            cuckoo_host = cfg.cuckoo.cuckoo_host
        else:
            self.log('error', 'Cuckoo Config Not Set')
            return

        if cfg.cuckoo.cuckoo_modified:
            search_url = '{0}/api/tasks/search/sha256'.format(cuckoo_host)
            submit_file_url = '{0}/api/tasks/create/file/'.format(cuckoo_host)
            status_url = '{0}/api/cuckoo/status'.format(cuckoo_host)
        else:
            search_url = '{0}/tasks/list'.format(cuckoo_host)
            submit_file_url = '{0}/tasks/create/file'.format(cuckoo_host)
            status_url = '{0}/cuckoo/status'.format(cuckoo_host)

        if self.args.status:
            # get the JSON
            try:
                api_status = self.api_query('get', status_url).json()
            except:
                return

            if cfg.cuckoo.cuckoo_modified:
                cuckoo_version = api_status['data']['version']
                machines = '{0}/{1}'.format(api_status['data']['machines']['available'],
                                            api_status['data']['machines']['total']
                                            )
                tasks = [api_status['data']['tasks']['completed'],
                         api_status['data']['tasks']['pending'],
                         api_status['data']['tasks']['reported'],
                         api_status['data']['tasks']['running'],
                         api_status['data']['tasks']['total']
                         ]
            else:
                cuckoo_version = api_status['version']
                machines = '{0}/{1}'.format(api_status['machines']['available'],
                                            api_status['machines']['total']
                                            )
                tasks = [api_status['tasks']['completed'],
                         api_status['tasks']['pending'],
                         api_status['tasks']['reported'],
                         api_status['tasks']['running'],
                         api_status['tasks']['total']
                         ]

            self.log('info', "Cuckoo")
            self.log('item', "Version: {0}".format(cuckoo_version))
            self.log('item', "Available Machines: {0}".format(machines))

            self.log('info', "Tasks")
            self.log('item', "Completed: {0}".format(tasks[0]))
            self.log('item', "Pending: {0}".format(tasks[1]))
            self.log('item', "Reported: {0}".format(tasks[2]))
            self.log('item', "Running: {0}".format(tasks[3]))
            self.log('item', "Total: {0}".format(tasks[4]))


        if self.args.file:
            if not __sessions__.is_set():
                self.log('error', "No open session")
                return

            if not self.args.resubmit:
                # Check for existing Session
                if cfg.cuckoo.cuckoo_modified:
                    search_results = self.api_query('get', '{0}/{1}'.format(search_url, __sessions__.current.file.sha256)).json()
                    if search_results['data'] != "Sample not found in database":
                        self.log('info', "Found {0} Results".format(len(search_results['data'])))
                        rows = []
                        header = ['ID', 'Started On', 'Status', 'Completed On']
                        for result in search_results['data']:
                            rows.append([result['id'], result['started_on'], result['status'], result['completed_on']])
                        self.log('table', dict(header=header, rows=rows))
                        self.log('warning', "use -r, --resubmit to force a new analysis")
                        return
                else:
                    search_results = self.api_query('get', search_url).json()
                    count = 0
                    if 'tasks' in search_results:
                        rows = []
                        header = ['ID', 'Started On', 'Status', 'Completed On']
                        for result in search_results['tasks']:
                            try:
                                if result['sample']['sha256'] == __sessions__.current.file.sha256:
                                    rows.append([result['id'], result['started_on'], result['status'], result['completed_on']])
                                    count += 1
                            except:
                                pass
                        if len(rows) > 0:
                            self.log('info', "Found {0} Results".format(count))
                            self.log('table', dict(header=header, rows=rows))
                            self.log('warning', "use -r, --resubmit to force a new analysis")
                            return
            # Submit the file
            params = {}
            if self.args.machine:
                params['machine'] = self.args.machine
            if self.args.package:
                params['package'] = self.args.package
            if self.args.options:
                params['options'] = self.args.options

            files = {'file': (__sessions__.current.file.name, open(__sessions__.current.file.path, 'rb').read())}
            submit_file = self.api_query('post', submit_file_url, files=files, params=params).json()
            try:
                self.log('info', "Task Submitted ID: {0}".format(submit_file['task_id']))
            except KeyError:
                try:
                    self.log('info', "Task Submitted ID: {0}".format(submit_file['task_ids'][0]))
                except KeyError:
                    self.log('error', submit_file)

        if self.args.dropped and __sessions__.is_set():
            try:
                task_id = int(self.args.dropped)
            except:
                self.log('error', "Not a valid task id")

            # Handle Modified-Cuckoo
            if cfg.cuckoo.cuckoo_modified:
                dropped_url = '{0}/api/tasks/get/dropped/{1}'.format(cuckoo_host, task_id)
            else:
                dropped_url = '{0}/tasks/report/{1}/dropped'.format(cuckoo_host, task_id)

            dropped_result = self.api_query('get', dropped_url)

            if dropped_result.content.startswith('BZ'):
                # explode BZ
                with tarfile.open(fileobj=StringIO(dropped_result.content)) as bz_file:
                    for item in bz_file:
                        # Write Files to tmp dir
                        if item.isreg():
                            # Create temp dirs to prevent duplicate filenames creating Errors
                            with create_temp() as temp_dir:
                                file_path = os.path.join(temp_dir, os.path.basename(item.name))
                                with open(file_path, 'wb') as out:
                                    out.write(bz_file.extractfile(item).read())
                                # Add the file
                                self.log('info', "Storing Dropped File {0}".format(item.name))
                                self.add_file(file_path,
                                              'Cuckoo_ID_{0}'.format(task_id),
                                              __sessions__.current.file.sha256
                                              )
                return

            else:
                if not __sessions__.is_set():
                    self.log('error', "No open session")
                    return
                try:
                    json_error = dropped_result.json()
                    self.log('error', json_error['data'])
                except Exception as e:
                    self.log('error', "Your broke something, {0}".format(e))

Example 88

Project: datafari
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Makes sure the repository URL is compliant
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        if params or query or fragments:
            raise AssertionError("Incompatible url %s" % self.repository)

        if schema not in ('http', 'https'):
            raise AssertionError("unsupported schema " + schema)

        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        f = open(filename,'rb')
        try:
            content = f.read()
        finally:
            f.close()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + standard_b64encode(self.username + ":" +
                                             self.password)

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\r\n--' + boundary
        end_boundary = sep_boundary + '--\r\n'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if not isinstance(value, list):
                value = [value]
            for value in value:
                if isinstance(value, tuple):
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""

                body.write(sep_boundary)
                body.write('\r\nContent-Disposition: form-data; name="%s"' % key)
                body.write(fn)
                body.write("\r\n\r\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        headers = {'Content-type':
                        'multipart/form-data; boundary=%s' % boundary,
                   'Content-length': str(len(body)),
                   'Authorization': auth}

        request = Request(self.repository, data=body,
                          headers=headers)
        # send the data
        try:
            result = urlopen(request)
            status = result.getcode()
            reason = result.msg
            if self.show_response:
                msg = '\n'.join(('-' * 75, result.read(), '-' * 75))
                self.announce(msg, log.INFO)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            raise
        except HTTPError, e:
            status = e.code
            reason = e.msg

        if status == 200:
            self.announce('Server response (%s): %s' % (status, reason),
                          log.INFO)
        else:
            msg = 'Upload failed (%s): %s' % (status, reason)
            self.announce(msg, log.ERROR)
            raise DistutilsError(msg)

Example 89

Project: OwnTube
Source File: track.py
View license
    def get_infopage(self):
        try:
            if not self.config['show_infopage']:
                return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
            red = self.config['infopage_redirect']
            if red:
                return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
                        '<A HREF="'+red+'">Click Here</A>')
            
            s = StringIO()
            s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
                '<html><head><title>BitTorrent download info</title>\n')
            if self.favicon is not None:
                s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
            s.write('</head>\n<body>\n' \
                '<h3>BitTorrent download info</h3>\n'\
                '<ul>\n'
                '<li><strong>tracker version:</strong> %s</li>\n' \
                '<li><strong>server time:</strong> %s</li>\n' \
                '</ul>\n' % (version, isotime()))
            if self.config['allowed_dir']:
                if self.show_names:
                    names = [ (self.allowed[hash]['name'],hash)
                              for hash in self.allowed.keys() ]
                else:
                    names = [ (None,hash)
                              for hash in self.allowed.keys() ]
            else:
                names = [ (None,hash) for hash in self.downloads.keys() ]
            if not names:
                s.write('<p>not tracking any files yet...</p>\n')
            else:
                names.sort()
                tn = 0
                tc = 0
                td = 0
                tt = 0  # Total transferred
                ts = 0  # Total size
                nf = 0  # Number of files displayed
                if self.config['allowed_dir'] and self.show_names:
                    s.write('<table summary="files" border="1">\n' \
                        '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
                else:
                    s.write('<table summary="files">\n' \
                        '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
                for name,hash in names:
                    l = self.downloads[hash]
                    n = self.completed.get(hash, 0)
                    tn = tn + n
                    c = self.seedcount[hash]
                    tc = tc + c
                    d = len(l) - c
                    td = td + d
                    if self.config['allowed_dir'] and self.show_names:
                        if self.allowed.has_key(hash):
                            nf = nf + 1
                            sz = self.allowed[hash]['length']  # size
                            ts = ts + sz
                            szt = sz * n   # Transferred for this torrent
                            tt = tt + szt
                            if self.allow_get == 1:
                                linkname = '<a href="/file?info_hash=' + quote(hash) + '">' + name + '</a>'
                            else:
                                linkname = name
                            s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
                                % (b2a_hex(hash), linkname, size_format(sz), c, d, n, size_format(szt)))
                    else:
                        s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
                            % (b2a_hex(hash), c, d, n))
                ttn = 0
                for i in self.completed.values():
                    ttn = ttn + i
                if self.config['allowed_dir'] and self.show_names:
                    s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n'
                            % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt)))
                else:
                    s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n'
                            % (nf, tc, td, tn, ttn))
                s.write('</table>\n' \
                    '<ul>\n' \
                    '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
                    '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
                    '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
                    '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \
                    '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
                    '</ul>\n')

            s.write('</body>\n' \
                '</html>\n')
            return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
        except:
            print_exc()
            return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')

Example 90

Project: securedrop
Source File: test_unit_integration.py
View license
    def helper_test_reply(self, test_reply, expected_success=True):
        test_msg = "This is a test message."

        with self.source_app as source_app:
            rv = source_app.get('/generate')
            rv = source_app.post('/create', follow_redirects=True)
            codename = session['codename']
            sid = g.sid
            # redirected to submission form
            rv = source_app.post('/submit', data=dict(
                msg=test_msg,
                fh=(StringIO(''), ''),
            ), follow_redirects=True)
            self.assertEqual(rv.status_code, 200)
            self.assertFalse(g.source.flagged)
            common.logout(source_app)

        rv = self.journalist_app.get('/')
        self.assertEqual(rv.status_code, 200)
        self.assertIn("Sources", rv.data)
        soup = BeautifulSoup(rv.data)
        col_url = soup.select('ul#cols > li a')[0]['href']

        rv = self.journalist_app.get(col_url)
        self.assertEqual(rv.status_code, 200)

        with self.source_app as source_app:
            rv = source_app.post('/login', data=dict(
                codename=codename), follow_redirects=True)
            self.assertEqual(rv.status_code, 200)
            self.assertFalse(g.source.flagged)
            common.logout(source_app)

        with self.journalist_app as journalist_app:
            rv = journalist_app.post('/flag', data=dict(
                sid=sid))
            self.assertEqual(rv.status_code, 200)

        with self.source_app as source_app:
            rv = source_app.post('/login', data=dict(
                codename=codename), follow_redirects=True)
            self.assertEqual(rv.status_code, 200)
            self.assertTrue(g.source.flagged)
            source_app.get('/lookup')
            self.assertTrue(g.source.flagged)
            common.logout(source_app)

        # Block until the reply keypair has been generated, so we can test
        # sending a reply
        _block_on_reply_keypair_gen(codename)

        # Create 2 replies to test deleting on journalist and source interface
        for i in range(2):
            rv = self.journalist_app.post('/reply', data=dict(
                sid=sid,
                msg=test_reply
            ), follow_redirects=True)
            self.assertEqual(rv.status_code, 200)

        if not expected_success:
            pass
        else:
            self.assertIn("Thanks! Your reply has been stored.", rv.data)

        with self.journalist_app as journalist_app:
            rv = journalist_app.get(col_url)
            self.assertIn("reply-", rv.data)

        soup = BeautifulSoup(rv.data)

        # Download the reply and verify that it can be decrypted with the
        # journalist's key as well as the source's reply key
        sid = soup.select('input[name="sid"]')[0]['value']
        checkbox_values = [
            soup.select('input[name="doc_names_selected"]')[1]['value']]
        rv = self.journalist_app.post('/bulk', data=dict(
            sid=sid,
            action='download',
            doc_names_selected=checkbox_values
        ), follow_redirects=True)
        self.assertEqual(rv.status_code, 200)

        zf = zipfile.ZipFile(StringIO(rv.data), 'r')
        data = zf.read(zf.namelist()[0])
        self._can_decrypt_with_key(data, config.JOURNALIST_KEY)
        self._can_decrypt_with_key(data, crypto_util.getkey(sid), codename)

        # Test deleting reply on the journalist interface
        last_reply_number = len(
            soup.select('input[name="doc_names_selected"]')) - 1
        self.helper_filenames_delete(soup, last_reply_number)

        with self.source_app as source_app:
            rv = source_app.post('/login', data=dict(codename=codename),
                                 follow_redirects=True)
            self.assertEqual(rv.status_code, 200)
            rv = source_app.get('/lookup')
            self.assertEqual(rv.status_code, 200)

            if not expected_success:
                # there should be no reply
                self.assertNotIn("You have received a reply.", rv.data)
            else:
                self.assertIn(
                    "You have received a reply. For your security, please delete all replies when you're done with them.",
                    rv.data)
                self.assertIn(test_reply, rv.data)
                soup = BeautifulSoup(rv.data)
                msgid = soup.select(
                    'form.message > input[name="reply_filename"]')[0]['value']
                rv = source_app.post('/delete', data=dict(
                    sid=sid,
                    reply_filename=msgid
                ), follow_redirects=True)
                self.assertEqual(rv.status_code, 200)
                self.assertIn("Reply deleted", rv.data)

                # Make sure the reply is deleted from the filesystem
                self._wait_for(
                    lambda: self.assertFalse(
                        os.path.exists(
                            store.path(
                                sid,
                                msgid))))

                common.logout(source_app)

Example 91

Project: fixofx
Source File: __init__.py
View license
def make_environ(inp, host, port, script_name):
    """
    Take 'inp' as if it were HTTP-speak being received on host:port,
    and parse it into a WSGI-ok environment dictionary.  Return the
    dictionary.

    Set 'SCRIPT_NAME' from the 'script_name' input, and, if present,
    remove it from the beginning of the PATH_INFO variable.
    """
    #
    # parse the input up to the first blank line (or its end).
    #

    environ = {}
    
    method_line = inp.readline()
    
    content_type = None
    content_length = None
    cookies = []
    
    for line in inp:
        if not line.strip():
            break

        k, v = line.strip().split(':', 1)
        v = v.lstrip()

        #
        # take care of special headers, and for the rest, put them
        # into the environ with HTTP_ in front.
        #

        if k.lower() == 'content-type':
            content_type = v
        elif k.lower() == 'content-length':
            content_length = v
        elif k.lower() == 'cookie' or k.lower() == 'cookie2':
            cookies.append(v)
        else:
            h = k.upper()
            h = h.replace('-', '_')
            environ['HTTP_' + h] = v
            
        if debuglevel >= 2:
            print 'HEADER:', k, v

    #
    # decode the method line
    #

    if debuglevel >= 2:
        print 'METHOD LINE:', method_line
        
    method, url, protocol = method_line.split(' ')

    # clean the script_name off of the url, if it's there.
    if not url.startswith(script_name):
        script_name = ''                # @CTB what to do -- bad URL.  scrap?
    else:
        url = url[len(script_name):]

    url = url.split('?', 1)
    path_info = url[0]
    query_string = ""
    if len(url) == 2:
        query_string = url[1]

    if debuglevel:
        print "method: %s; script_name: %s; path_info: %s; query_string: %s" % (method, script_name, path_info, query_string)

    r = inp.read()
    inp = StringIO(r)

    #
    # fill out our dictionary.
    #
    
    environ.update({ "wsgi.version" : (1,0),
                     "wsgi.url_scheme": "http",
                     "wsgi.input" : inp,           # to read for POSTs
                     "wsgi.errors" : StringIO(),
                     "wsgi.multithread" : 0,
                     "wsgi.multiprocess" : 0,
                     "wsgi.run_once" : 0,
    
                     "PATH_INFO" : path_info,
                     "QUERY_STRING" : query_string,
                     "REMOTE_ADDR" : '127.0.0.1',
                     "REQUEST_METHOD" : method,
                     "SCRIPT_NAME" : script_name,
                     "SERVER_NAME" : host,
                     "SERVER_PORT" : str(port),
                     "SERVER_PROTOCOL" : protocol,
                     })

    #
    # query_string, content_type & length are optional.
    #

    if query_string:
        environ['QUERY_STRING'] = query_string
        
    if content_type:
        environ['CONTENT_TYPE'] = content_type
        if debuglevel >= 2:
            print 'CONTENT-TYPE:', content_type
    if content_length:
        environ['CONTENT_LENGTH'] = content_length
        if debuglevel >= 2:
            print 'CONTENT-LENGTH:', content_length

    #
    # handle cookies.
    #
    if cookies:
        environ['HTTP_COOKIE'] = "; ".join(cookies)

    if debuglevel:
        print 'WSGI environ dictionary:', environ

    return environ

Example 92

View license
    def test_process_scpv1(self):

        # TestCommand.process should complain if supports_getinfo == False
        # We support dynamic configuration, not static

        # The exception line number may change, so we're using a regex match instead of a string match

        expected = re.compile(
            r'error_message=RuntimeError at ".+search_command\.py", line \d\d\d : Command test appears to be '
            r'statically configured for search command protocol version 1 and static configuration is unsupported by '
            r'splunklib.searchcommands. Please ensure that default/commands.conf contains this stanza:\n'
            r'\[test\]\n'
            r'filename = test.py\n'
            r'enableheader = true\n'
            r'outputheader = true\n'
            r'requires_srinfo = true\n'
            r'supports_getinfo = true\n'
            r'supports_multivalues = true\n'
            r'supports_rawargs = true')

        argv = ['test.py', 'not__GETINFO__or__EXECUTE__', 'option=value', 'fieldname']
        command = TestCommand()
        result = StringIO()

        self.assertRaises(SystemExit, command.process, argv, ofile=result)
        self.assertRegexpMatches(result.getvalue(), expected)

        # TestCommand.process should return configuration settings on Getinfo probe

        argv = ['test.py', '__GETINFO__', 'required_option_1=value', 'required_option_2=value']
        command = TestCommand()
        ifile = StringIO('\n')
        result = StringIO()

        self.assertEqual(str(command.configuration), '')

        self.assertEqual(
            repr(command.configuration),
            "[(u'clear_required_fields', None, [1]), (u'distributed', None, [2]), (u'generates_timeorder', None, [1]), "
            "(u'generating', None, [1, 2]), (u'maxinputs', None, [2]), (u'overrides_timeorder', None, [1]), "
            "(u'required_fields', None, [1, 2]), (u'requires_preop', None, [1]), (u'retainsevents', None, [1]), "
            "(u'run_in_preview', None, [2]), (u'streaming', None, [1]), (u'streaming_preop', None, [1, 2]), "
            "(u'type', None, [2])]")

        try:
            # noinspection PyTypeChecker
            command.process(argv, ifile, ofile=result)
        except BaseException as error:
            self.fail('{0}: {1}: {2}\n'.format(type(error).__name__, error, result.getvalue()))

        self.assertEqual('\r\n\r\n\r\n', result.getvalue())  # No message header and no configuration settings

        ifile = StringIO('\n')
        result = StringIO()

        # We might also put this sort of code into our SearchCommand.prepare override ...

        configuration = command.configuration

        # SCP v1/v2 configuration settings
        configuration.generating = True
        configuration.required_fields = ['foo', 'bar']
        configuration.streaming_preop = 'some streaming command'

        # SCP v1 configuration settings
        configuration.clear_required_fields = True
        configuration.generates_timeorder = True
        configuration.overrides_timeorder = True
        configuration.requires_preop = True
        configuration.retainsevents = True
        configuration.streaming = True

        # SCP v2 configuration settings (SCP v1 requires that maxinputs and run_in_preview are set in commands.conf)
        configuration.distributed = True
        configuration.maxinputs = 50000
        configuration.run_in_preview = True
        configuration.type = 'streaming'

        self.assertEqual(
            str(command.configuration),
            'clear_required_fields="True", generates_timeorder="True", generating="True", overrides_timeorder="True", '
            'required_fields="[u\'foo\', u\'bar\']", requires_preop="True", retainsevents="True", streaming="True", '
            'streaming_preop="some streaming command"')

        self.assertEqual(
            repr(command.configuration),
            "[(u'clear_required_fields', True, [1]), (u'distributed', True, [2]), (u'generates_timeorder', True, [1]), "
            "(u'generating', True, [1, 2]), (u'maxinputs', 50000, [2]), (u'overrides_timeorder', True, [1]), "
            "(u'required_fields', [u'foo', u'bar'], [1, 2]), (u'requires_preop', True, [1]), "
            "(u'retainsevents', True, [1]), (u'run_in_preview', True, [2]), (u'streaming', True, [1]), "
            "(u'streaming_preop', u'some streaming command', [1, 2]), (u'type', u'streaming', [2])]")

        try:
            # noinspection PyTypeChecker
            command.process(argv, ifile, ofile=result)
        except BaseException as error:
            self.fail('{0}: {1}: {2}\n'.format(type(error).__name__, error, result.getvalue()))

        result.reset()
        reader = csv.reader(result)
        self.assertEqual([], reader.next())
        observed = dict(izip(reader.next(), reader.next()))
        self.assertRaises(StopIteration, reader.next)

        expected = {
            'clear_required_fields': '1',                '__mv_clear_required_fields': '',
            'generating': '1',                           '__mv_generating': '',
            'generates_timeorder': '1',                  '__mv_generates_timeorder': '',
            'overrides_timeorder': '1',                  '__mv_overrides_timeorder': '',
            'requires_preop': '1',                       '__mv_requires_preop': '',
            'required_fields': 'foo,bar',                '__mv_required_fields': '',
            'retainsevents': '1',                        '__mv_retainsevents': '',
            'streaming': '1',                            '__mv_streaming': '',
            'streaming_preop': 'some streaming command', '__mv_streaming_preop': '',
        }

        self.assertDictEqual(expected, observed)  # No message header and no configuration settings

        for action in '__GETINFO__', '__EXECUTE__':

            # TestCommand.process should produce an error record on parser errors

            argv = [
                'test.py', action, 'required_option_1=value', 'required_option_2=value', 'undefined_option=value',
                'fieldname_1', 'fieldname_2']

            command = TestCommand()
            ifile = StringIO('\n')
            result = StringIO()

            self.assertRaises(SystemExit, command.process, argv, ifile, ofile=result)
            self.assertTrue(
                'error_message=Unrecognized test command option: undefined_option="value"\r\n\r\n',
                result.getvalue())

            # TestCommand.process should produce an error record when required options are missing

            argv = ['test.py', action, 'required_option_2=value', 'fieldname_1']
            command = TestCommand()
            ifile = StringIO('\n')
            result = StringIO()

            self.assertRaises(SystemExit, command.process, argv, ifile, ofile=result)

            self.assertTrue(
                'error_message=A value for test command option required_option_1 is required\r\n\r\n',
                result.getvalue())

            argv = ['test.py', action, 'fieldname_1']
            command = TestCommand()
            ifile = StringIO('\n')
            result = StringIO()

            self.assertRaises(SystemExit, command.process, argv, ifile, ofile=result)

            self.assertTrue(
                'error_message=Values for these test command options are required: required_option_1, required_option_2'
                '\r\n\r\n',
                result.getvalue())

        # TestStreamingCommand.process should exit on processing exceptions

        ifile = StringIO('\naction\r\nraise_error\r\n')
        argv = ['test.py', '__EXECUTE__']
        command = TestStreamingCommand()
        result = StringIO()

        try:
            # noinspection PyTypeChecker
            command.process(argv, ifile, ofile=result)
        except SystemExit as error:
            self.assertNotEqual(error.code, 0)
            self.assertRegexpMatches(
                result.getvalue(),
                r'^error_message=RuntimeError at ".+", line \d+ : Testing\r\n\r\n$')
        except BaseException as error:
            self.fail('Expected SystemExit, but caught {}: {}'.format(type(error).__name__, error))
        else:
            self.fail('Expected SystemExit, but no exception was raised')

        # Command.process should provide access to search results info
        info_path = os.path.join(
            self._package_directory, 'recordings', 'scpv1', 'Splunk-6.3', 'countmatches.execute.dispatch_dir',
            'externSearchResultsInfo.csv')

        ifile = StringIO('infoPath:' + info_path + '\n\naction\r\nget_search_results_info\r\n')
        argv = ['test.py', '__EXECUTE__']
        command = TestStreamingCommand()
        result = StringIO()

        try:
            # noinspection PyTypeChecker
            command.process(argv, ifile, ofile=result)
        except BaseException as error:
            self.fail('Expected no exception, but caught {}: {}'.format(type(error).__name__, error))
        else:
            self.assertRegexpMatches(
                result.getvalue(),
                r'^\r\n'
                r'('
                r'data,__mv_data,_serial,__mv__serial\r\n'
                r'"\{.*u\'is_summary_index\': 0, .+\}",,0,'
                r'|'
                r'_serial,__mv__serial,data,__mv_data\r\n'
                r'0,,"\{.*u\'is_summary_index\': 0, .+\}",'
                r')'
                r'\r\n$'
            )

        # TestStreamingCommand.process should provide access to a service object when search results info is available

        self.assertIsInstance(command.service, Service)

        self.assertEqual(command.service.authority,
                         command.search_results_info.splunkd_uri)

        self.assertEqual(command.service.scheme,
                         command.search_results_info.splunkd_protocol)

        self.assertEqual(command.service.port,
                         command.search_results_info.splunkd_port)

        self.assertEqual(command.service.token,
                         command.search_results_info.auth_token)

        self.assertEqual(command.service.namespace.app,
                         command.search_results_info.ppc_app)

        self.assertEqual(command.service.namespace.owner,
                         None)
        self.assertEqual(command.service.namespace.sharing,
                         None)

        # Command.process should not provide access to search results info or a service object when the 'infoPath'
        # input header is unavailable

        ifile = StringIO('\naction\r\nget_search_results_info')
        argv = ['teststreaming.py', '__EXECUTE__']
        command = TestStreamingCommand()

        # noinspection PyTypeChecker
        command.process(argv, ifile, ofile=result)

        self.assertIsNone(command.search_results_info)
        self.assertIsNone(command.service)

        return

Example 93

Project: babble
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data - send all the meta-data in case we need to
        # register a new release
        content = open(filename,'rb').read()
        meta = self.distribution.metadata
        data = {
            # action
            ':action': 'file_upload',
            'protcol_version': '1',

            # identify release
            'name': meta.get_name(),
            'version': meta.get_version(),

            # file content
            'content': (os.path.basename(filename),content),
            'filetype': command,
            'pyversion': pyversion,
            'md5_digest': md5(content).hexdigest(),

            # additional meta-data
            'metadata_version' : '1.0',
            'summary': meta.get_description(),
            'home_page': meta.get_url(),
            'author': meta.get_contact(),
            'author_email': meta.get_contact_email(),
            'license': meta.get_licence(),
            'description': meta.get_long_description(),
            'keywords': meta.get_keywords(),
            'platform': meta.get_platforms(),
            'classifiers': meta.get_classifiers(),
            'download_url': meta.get_download_url(),
            # PEP 314
            'provides': meta.get_provides(),
            'requires': meta.get_requires(),
            'obsoletes': meta.get_obsoletes(),
            }
        comment = ''
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if type(value) != type([]):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""
                value = str(value)
                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        # We can't use urllib2 since we need to send the Basic
        # auth right with the first request
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        assert not params and not query and not fragments
        if schema == 'http':
            http = httplib.HTTPConnection(netloc)
        elif schema == 'https':
            http = httplib.HTTPSConnection(netloc)
        else:
            raise AssertionError, "unsupported schema "+schema

        data = ''
        loglevel = log.INFO
        try:
            http.connect()
            http.putrequest("POST", url)
            http.putheader('Content-type',
                           'multipart/form-data; boundary=%s'%boundary)
            http.putheader('Content-length', str(len(body)))
            http.putheader('Authorization', auth)
            http.endheaders()
            http.send(body)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return

        r = http.getresponse()
        if r.status == 200:
            self.announce('Server response (%s): %s' % (r.status, r.reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                          log.ERROR)
        if self.show_response:
            print '-'*75, r.read(), '-'*75

Example 94

View license
@ModuleInfo.plugin('wb.util.generateLaravel5Migration',
                   caption='Export Laravel 5 Migration',
                   input=[wbinputs.currentCatalog()],
                   groups=['Catalog/Utilities', 'Menu/Catalog']
                   )
@ModuleInfo.export(grt.INT, grt.classes.db_Catalog)
def generateLaravel5Migration(cat):

    def create_tree(out, schema, is_main_schema):
        table_tree = {}
        for tbl in sorted(schema.tables, key=lambda table: table.name):
            table_references = []

            for fkey in tbl.foreignKeys:
                if fkey.name != '':
                    table_references.append(fkey.referencedColumns[0].owner.name)

            table_tree[tbl.name] = table_references

        d = dict((k, set(table_tree[k])) for k in table_tree)
        r = []
        while d:
            # values not in keys (items without dep)
            t = set(i for v in d.values() for i in v) - set(d.keys())
            # and keys without value (items without dep)
            t.update(k for k, v in d.items() if not v)
            # can be done right away
            r.append(t)
            # and cleaned up
            d = dict(((k, v - t) for k, v in d.items() if v))
        return r

    def export_schema(out, schema, is_main_schema, table_tree):
        if len(schema.tables) == 0:
            return

        foreign_keys = {}
        global migration_tables
        global migrations
        tables = sorted(schema.tables, key=lambda table: table.name)
        ti = 0

        for reference_tables in table_tree:
            for reference in reference_tables:
                for tbl in tables:

                    if tbl.name != reference:
                        continue

                    table_name = tbl.name
                    components = table_name.split('_')

                    migration_tables.append(table_name)
                    migrations[ti] = []

                    migrations[ti].append(migrationBegginingTemplate.format(
                        tableNameCamelCase=("".join(x.title() for x in components[0:])),
                        tableName=table_name
                    ))

                    created_at = created_at_nullable \
                        = updated_at \
                        = updated_at_nullable \
                        = deleted_at \
                        = timestamps \
                        = timestamps_nullable = False

                    for col in tbl.columns:
                        if col.name == 'created_at':
                            created_at = True
                            if col.isNotNull != 1:
                                created_at_nullable = True
                        elif col.name == 'updated_at':
                            updated_at = True
                            if col.isNotNull != 1:
                                updated_at_nullable = True

                    if created_at is True and updated_at is True and created_at_nullable is True and updated_at_nullable is True:
                        timestamps_nullable = True
                    elif created_at is True and updated_at is True:
                        timestamps = True

                    pk_column = None
                    for col in tbl.columns:
                        if (col.name == 'created_at' or col.name == 'updated_at') and (
                                        timestamps is True or timestamps_nullable is True):
                            continue

                        if col.name == 'deleted_at':
                            deleted_at = True
                            continue

                        if col.simpleType:
                            col_type = col.simpleType.name
                            col_flags = col.simpleType.flags
                        else:
                            col_type = col.userType.name
                            col_flags = col.flags

                        primary_key = [i for i in tbl.indices if i.isPrimary == 1]
                        primary_key = primary_key[0] if len(primary_key) > 0 else None

                        if primary_key and len(primary_key.columns) == 1:
                            pk_column = primary_key.columns[0].referencedColumn

                        if col == pk_column:
                            if col_type == 'BIGINT':
                                col_type = 'BIGINCREMENTS'
                            else:
                                col_type = 'INCREMENTS'

                        col_data = '\''
                        if typesDict[col_type] == 'char':
                            if col.length > -1:
                                col_data = '\', %s' % (str(col.length))
                        elif typesDict[col_type] == 'decimal':
                            if col.precision > -1 and col.scale > -1:
                                col_data = '\', %s, %s' % (str(col.precision), str(col.scale))
                        elif typesDict[col_type] == 'double':
                            if col.precision > -1 and col.length > -1:
                                col_data = '\', %s, %s' % (str(col.length), str(col.precision))
                        elif typesDict[col_type] == 'enum':
                            col_data = '\', [%s]' % (col.datatypeExplicitParams[1:-1])
                        elif typesDict[col_type] == 'string':
                            if col.length > -1:
                                col_data = '\', %s' % (str(col.length))

                        if col.name == 'remember_token' and typesDict[col_type] == 'string' and str(col.length) == 100:
                            migrations[ti].append('            $table->rememberToken();\n')
                        elif typesDict[col_type]:
                            migrations[ti].append(
                                '            $table->%s(\'%s%s)' % (typesDict[col_type], col.name, col_data))
                            if typesDict[col_type] == 'integer' and 'UNSIGNED' in col.flags:
                                migrations[ti].append('->unsigned()')
                            if col.isNotNull != 1:
                                migrations[ti].append('->nullable()')
                            if col.defaultValue != '' and col.defaultValueIsNull != 0:
                                migrations[ti].append('->default(NULL)')
                            elif col.defaultValue != '':
                                migrations[ti].append('->default(%s)' % col.defaultValue)
                            if col.comment != '':
                                migrations[ti].append('->comment(\'%s\')' % col.comment)
                            migrations[ti].append(';\n')

                    if tbl.indices:
                        migrations[ti].append("            # Indexes\n")

                        for column in tbl.indices:
                            for index in column.columns:
                                index_type = index.owner.indexType.lower()
                                key = index.referencedColumn.name

                                # Do not add index for increments
                                if not column.isPrimary:

                                    index_key_template = "            $table->{indexType}('{key}');\n".format(
                                        indexType=index_type,
                                        key=key
                                    )
                                    migrations[ti].append(index_key_template)

                    if deleted_at is True:
                        migrations[ti].append('            $table->softDeletes();\n')
                    if timestamps is True:
                        migrations[ti].append('            $table->timestamps();\n')
                    elif timestamps_nullable is True:
                        migrations[ti].append('            $table->nullableTimestamps();\n')

                    first_foreign_created = False

                    for fkey in tbl.foreignKeys:
                        if fkey.name != '':
                            index_name = fkey.index.name
                            foreign_key = fkey.columns[0].name

                            if index_name == 'PRIMARY':
                                index_name = tbl.name + "_" + fkey.columns[0].name

                            if fkey.referencedColumns[0].owner.name in migration_tables:

                                if not first_foreign_created:
                                    migrations[ti].append('\n')
                                    first_foreign_created = True

                                migrations[ti].append(foreignKeyTemplate.format(
                                        foreignKey=foreign_key,
                                        foreignKeyName=index_name,
                                        tableKeyName=fkey.referencedColumns[0].name,
                                        foreignTableName=fkey.referencedColumns[0].owner.name,
                                        onDeleteAction=fkey.deleteRule.lower(),
                                        onUpdateAction=fkey.updateRule.lower()
                                ))

                            else:
                                if fkey.referencedColumns[0].owner.name not in foreign_keys:
                                    foreign_keys[fkey.referencedColumns[0].owner.name] = []

                                foreign_keys[fkey.referencedColumns[0].owner.name].append({
                                    'table': fkey.columns[0].owner.name,
                                    'key': foreign_key,
                                    'name': index_name,
                                    'referenced_table': fkey.referencedColumns[0].owner.name,
                                    'referenced_name': fkey.referencedColumns[0].name,
                                    'update_rule': fkey.updateRule,
                                    'delete_rule': fkey.deleteRule
                                })

                    migrations[ti].append("        });\n")

                    for fkey, fval in foreign_keys.iteritems():
                        if fkey == tbl.name:
                            keyed_tables = []
                            schema_table = 0
                            for item in fval:
                                if item['table'] not in keyed_tables:
                                    keyed_tables.append(item['table'])
                                    if schema_table == 0:
                                        foreign_table_name = item['table']
                                        migrations[ti].append('\n')
                                        migrations[ti].append(
                                            schemaCreateTemplate.format(tableName=item['table'])
                                        )
                                        schema_table = 1
                                    elif foreign_table_name != item['table']:
                                        foreign_table_name = item['table']
                                        migrations[ti].append("        });\n")
                                        migrations[ti].append('\n')
                                        migrations[ti].append(
                                            schemaCreateTemplate.format(tableName=item['table'])
                                        )
                                    migrations[ti].append(foreignKeyTemplate.format(
                                        foreignKey=item['key'],
                                        foreignKeyName=item['name'],
                                        tableKeyName=item['referenced_name'],
                                        foreignTableName=item['referenced_table'],
                                        onDeleteAction=item['delete_rule'].lower(),
                                        onUpdateAction=item['update_rule'].lower()
                                    ))

                            if schema_table == 1:
                                migrations[ti].append("        });\n")
                                migrations[ti].append('\n')

                    migrations[ti].append('    }\n')

                    ##########
                    # Reverse
                    ##########

                    migrations[ti].append(migrationDownTemplate)
                    migrations[ti].append(migrationEndingTemplate.format(tableName=table_name))
                    ti += 1

        return migrations

    out = cStringIO.StringIO()

    try:
        for schema in [(s, s.name == 'main') for s in cat.schemata]:
            table_tree = create_tree(out, schema[0], schema[1])
            migrations = export_schema(out, schema[0], schema[1], table_tree)

    except GenerateLaravel5MigrationError as e:
        Workbench.confirm(e.typ, e.message)
        return 1

    for name in sorted(migrations):
        out.write('Table name: {0}\n\n\n'.format(migration_tables[name]))
        out.write(''.join(migrations[name]))
        out.write('\n\n\n'.format(name))

    sql_text = out.getvalue()
    out.close()

    wizard = GenerateLaravel5MigrationWizard(sql_text)
    wizard.run()

    return 0

Example 95

Project: pol
Source File: psafe3.py
View license
def load(f, password):
    l.debug('Reading header ...')
    tag = f.read(4)
    if tag != TAG:
        raise PSafe3FormatError("Tag is wrong.  Is this a PSafe3 file?")
    salt = f.read(32)
    niter = struct.unpack("<I", f.read(4))[0]

    l.debug('Stretching password ...')
    P2 = stretch_key(password, salt, niter)
    HP2 = hashlib.sha256(P2).digest()
    if HP2 != f.read(32):
        raise BadPasswordError

    l.debug('Reading header ...')
    m = twofish.Twofish(P2)
    K = m.decrypt(f.read(16)) + m.decrypt(f.read(16))
    L = m.decrypt(f.read(16)) + m.decrypt(f.read(16))
    IV = f.read(16)

    m = twofish.Twofish(K)
    prev_ct = IV

    l.debug('Decrypting ...')
    plaintext = ''
    hmac_data = ''
    while True:
        ct = f.read(16)
        if ct == EOF:
            break
        plaintext += sxor(m.decrypt(ct), prev_ct)
        prev_ct = ct

    l.debug('Reading decrypted header ...')
    g = StringIO.StringIO(plaintext)
    in_header = True
    header = {}
    record = {}
    records = []
    had = set()
    while True:
        field = g.read(5)
        if not field:
            break
        length, t = struct.unpack("<IB", field)
        d = g.read(length)
        hmac_data += d
        if t in had:
            l.warn("Field type %s occurs twice", t)
        had.add(t)
        if in_header:
            if t == 0:
                header['version']  = struct.unpack("<H", d)[0]
            elif t == 1:
                header['uuid'] = uuid.UUID(bytes=d)
            elif t == 2:
                header['non-default-preferences'] = d
            elif t == 3:
                header['tree-display-status'] = d
            elif t == 4:
                header['last-save'] = unpack_ts(d)
            elif t == 5:
                header['last-save-who'] = d
            elif t == 6:
                header['last-save-what'] = d
            elif t == 7:
                header['last-save-by-user'] = d
            elif t == 8:
                header['last-save-on-host'] = d
            elif t == 9:
                header['database-name'] = d
            elif t == 10:
                header['database-description'] = d
            elif t == 11:
                header['database-filters'] = d
            elif t == 15:
                header['recently-used-filters'] = d
            elif t == 16:
                header['named-password-policies'] = d
            elif t == 17:
                header['empty-groups'] = d
            elif t == 255:
                in_header = False
                had = set()
            else:
                l.warn("Unknown header field: type %s; data %s",
                            t, repr(d))
        else:
            if t == 1:
                record['uuid'] = uuid.UUID(bytes=d)
            elif t == 2:
                record['group'] = d
            elif t == 3:
                record['title'] = d
            elif t == 4:
                record['username'] = d
            elif t == 5:
                record['notes'] = d
            elif t == 6:
                record['password'] = d
            elif t == 7:
                record['creation-time'] = unpack_ts(d)
            elif t == 8:
                record['password-modification-time'] = unpack_ts(d)
            elif t == 9:
                record['last-access-time'] = unpack_ts(d)
            elif t == 10:
                record['password-expiry-time'] = unpack_ts(d)
            elif t == 12:
                record['last-modification-time'] = unpack_ts(d)
            elif t == 13:
                record['url'] = d
            elif t == 14:
                record['autotype'] = d
            elif t == 15:
                record['password-history'] = d
            elif t == 16:
                record['password-policy'] = d
            elif t == 17:
                record['password-expiry-interval'] = d
            elif t == 18:
                record['run-command'] = d
            elif t == 19:
                record['double-click-action'] = d
            elif t == 20:
                record['email-address'] = d
            elif t == 21:
                record['protected-entry'] = (d != chr(0))
            elif t == 22:
                record['own-symbols-for-password'] = d
            elif t == 23:
                record['shift-double-click-action'] = d
            elif t == 24:
                record['password-policy-name'] = d
            elif t == 255:
                records.append(record)
                record = {}
                had = set()
            else:
                l.warn("Unknown record field: type %s; data %s",
                            t, repr(d))
        tl = length + 5
        if tl % 16 != 0:
            g.read(16 - (tl % 16))
    l.debug('Checking HMAC ...')
    if hmac.new(L, hmac_data, hashlib.sha256).digest() != f.read(32):
        raise IntegrityError
    return (header, records)

Example 96

Project: pol
Source File: test_import_keepass.py
View license
    def test_keepass_load1(self):
        self.assertEqual(pol.importers.keepass.load(
                                StringIO.StringIO(TEST_DB2),
                                'test', StringIO.StringIO(TEST_KEYFILE2)),
            ({489459835: {'creation-time': datetime(2999, 12, 28, 23, 59, 59),
                          'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
                          'flags': 0,
                          'id': 489459835,
                          'image-id': 0,
                          'last-access-time': datetime(2999, 12, 28, 23, 59, 59),
                          'last-modification-time': datetime(2999, 12, 28, 23, 59, 59),
                          'level': 0,
                          'name': u'Group 2'},
              2437480029: {'creation-time': datetime(2999, 12, 28, 23, 59, 59),
                           'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
                           'flags': 0,
                           'id': 2437480029,
                           'image-id': 0,
                           'last-access-time': datetime(2999, 12, 28, 23, 59, 59),
                           'last-modification-time': datetime(2999, 12, 28, 23, 59, 59),
                           'level': 0,
                           'name': u'Group 1'},
              2922083484: {'creation-time': datetime(2999, 12, 28, 23, 59, 59),
                           'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
                           'flags': 0,
                           'id': 2922083484,
                           'image-id': 0,
                           'last-access-time': datetime(2999, 12, 28, 23, 59, 59),
                           'last-modification-time': datetime(2999, 12, 28, 23, 59, 59),
                           'level': 1,
                           'name': u'Group 1.1'}},
             [{'binary-data': '',
               'binary-description': u'',
               'creation-time': datetime(2013, 4, 19, 20, 39, 18),
               'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
               'group': 489459835,
               'image-id': 0,
               'last-access-time': datetime(2013, 4, 19, 20, 39, 34),
               'last-modification-time': datetime(2013, 4, 19, 20, 39, 34),
               'notes': u'comment 5',
               'password': u'j:4O_nuR;Q-drfx\\9cddd(N;h=NpCVO<',
               'title': u'passphrase 5',
               'url': u'url 5',
               'username': u'username 5',
               'uuid': UUID('568f7151-3c51-498f-2417-c8bb802f97a1')},
              {'binary-data': '',
               'binary-description': u'',
               'creation-time': datetime(2013, 4, 19, 20, 39, 37),
               'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
               'group': 489459835,
               'image-id': 0,
               'last-access-time': datetime(2013, 4, 19, 20, 39, 50),
               'last-modification-time': datetime(2013, 4, 19, 20, 39, 50),
               'notes': u'comment 6',
               'password': u'#4XOkEEcH7-C%ON.YzI<8`9V_8"]Py:N',
               'title': u'passphrase 6',
               'url': u'url 6',
               'username': u'username 6',
               'uuid': UUID('698f7151-cdc7-3271-7da8-025a3f253fd4')},
              {'binary-data': '',
               'binary-description': u'',
               'creation-time': datetime(2013, 4, 19, 20, 38, 55),
               'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
               'group': 2922083484,
               'image-id': 0,
               'last-access-time': datetime(2013, 4, 19, 20, 39, 12),
               'last-modification-time': datetime(2013, 4, 19, 20, 39, 12),
               'notes': u'comment 4',
               'password': u'"fw6,Ll!TcCH3N&+_H>har5--Ja(f17!',
               'title': u'passphrase 4',
               'url': u'url 4',
               'username': u'username 4',
               'uuid': UUID('3f8f7151-ea1e-3f4b-5814-4ad9f6b533e7')},
              {'binary-data': '',
               'binary-description': u'',
               'creation-time': datetime(2013, 4, 19, 20, 38, 22),
               'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
               'group': 2437480029,
               'image-id': 0,
               'last-access-time': datetime(2013, 4, 19, 20, 38, 38),
               'last-modification-time': datetime(2013, 4, 19, 20, 38, 38),
               'notes': u'comment 2',
               'password': u"{wt_Xv'inhmSRlCpi-t}%)s}bt=8x:?^",
               'title': u'passphrase 2',
               'url': u'url 2',
               'username': u'username 2',
               'uuid': UUID('1e8f7151-cf05-ea7b-6bcd-be2bb591d496')},
              {'binary-data': '',
               'binary-description': u'',
               'creation-time': datetime(2013, 4, 19, 20, 37, 39),
               'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
               'group': 2437480029,
               'image-id': 0,
               'last-access-time': datetime(2013, 4, 19, 20, 38, 14),
               'last-modification-time': datetime(2013, 4, 19, 20, 38, 14),
               'notes': u'comment 1',
               'password': u"3d,,~{66JWKw'-3_yx'-cE>'h70hO%bO",
               'title': u'passphrase 1',
               'url': u'url 1',
               'username': u'username 1',
               'uuid': UUID('f38e7151-a29d-81ae-415e-e8f3e39a9592')},
              {'binary-data': '',
               'binary-description': u'',
               'creation-time': datetime(2013, 4, 19, 20, 38, 41),
               'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
               'group': 2922083484,
               'image-id': 0,
               'last-access-time': datetime(2013, 4, 19, 20, 38, 53),
               'last-modification-time': datetime(2013, 4, 19, 20, 38, 53),
               'notes': u'comment 3',
               'password': u"Mx\\_L]}>,B_:$2u3}(XqQ'IT^P-n8~%Q",
               'title': u'passphrase 3',
               'url': u'url 3',
               'username': u'username 3',
               'uuid': UUID('318f7151-42be-0221-dde5-343e7ed49742')},
              {'binary-data': '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
               'binary-description': u'bin-stream',
               'creation-time': datetime(2013, 4, 19, 20, 40, 5),
               'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
               'group': 2437480029,
               'image-id': 0,
               'last-access-time': datetime(2013, 4, 19, 20, 40, 5),
               'last-modification-time': datetime(2013, 4, 19, 20, 40, 5),
               'notes': u'KPX_CUSTOM_ICONS_4',
               'password': u'',
               'title': u'Meta-Info',
               'url': u'$',
               'username': u'SYSTEM',
               'uuid': UUID('00000000-0000-0000-0000-000000000000')},
              {'binary-data': '\x03\x00\x00\x00]\xfeH\x91\x01{\x90,\x1d\x00\x9ct+\xae\x00',
               'binary-description': u'bin-stream',
               'creation-time': datetime(2013, 4, 19, 20, 40, 5),
               'expiration-time': datetime(2999, 12, 28, 23, 59, 59),
               'group': 2437480029,
               'image-id': 0,
               'last-access-time': datetime(2013, 4, 19, 20, 40, 5),
               'last-modification-time': datetime(2013, 4, 19, 20, 40, 5),
               'notes': u'KPX_GROUP_TREE_STATE',
               'password': u'',
               'title': u'Meta-Info',
               'url': u'$',
               'username': u'SYSTEM',
               'uuid': UUID('00000000-0000-0000-0000-000000000000')}]))

Example 97

Project: VizAlerts
Source File: emailaction.py
View license
def send_email(fromaddr, toaddrs, subject, content, ccaddrs=None, bccaddrs=None, inlineattachments=None,
               appendattachments=None):
    """Generic function to send an email. The presumption is that all arguments have been validated prior to the call
        to this function.

    Input arguments are:
        fromaddr    single email address
        toaddr      string of recipient email addresses separated by the list of separators in EMAIL_RECIP_SPLIT_REGEX
        subject     string that is subject of email
        content     body of email, may contain HTML
        ccaddrs     cc recipients, see toaddr
        bccaddrs    bcc recipients, see toaddr
        inlineattachments   List of vizref dicts where each dict has one attachment. The minimum dict has an
                            imagepath key that points to the file to be attached.
        appendattachments   Appended (non-inline attachments). See inlineattachments for details on structure.

    Nothing is returned by this function unless there is an exception.

    """
    try:
        log.logger.info(
            u'sending email: {},{},{},{},{},{},{}'.format(config.configs['smtp.serv'], fromaddr, toaddrs, ccaddrs, bccaddrs,
                                                          subject, inlineattachments, appendattachments))
        log.logger.debug(u'email body: {}'.format(content))

        # using mixed type because there can be inline and non-inline attachments
        msg = MIMEMultipart('mixed')
        msg.set_charset('utf-8')
        msg.preamble = subject.encode('utf-8')
        msg['From'] = Header(fromaddr)
        msg['Subject'] = Header(subject.encode('utf-8'), 'UTF-8').encode()

        # Process direct recipients
        toaddrs = re.split(EMAIL_RECIP_SPLIT_REGEX, toaddrs.strip())
        msg['To'] = Header(', '.join(toaddrs))
        allrecips = toaddrs

        # Process indirect recipients
        if ccaddrs:
            ccaddrs = re.split(EMAIL_RECIP_SPLIT_REGEX, ccaddrs.strip())
            msg['CC'] = Header(', '.join(ccaddrs))
            allrecips.extend(ccaddrs)

        if bccaddrs:
            bccaddrs = re.split(EMAIL_RECIP_SPLIT_REGEX, bccaddrs.strip())
            # don't add to header, they are blind carbon-copied
            allrecips.extend(bccaddrs)

        # Create a section for the body and inline attachments
        msgalternative = MIMEMultipart(u'related')
        msg.attach(msgalternative)
        msgalternative.attach(MIMEText(content.encode('utf-8'), 'html', 'utf-8'))

        # Add inline attachments
        if inlineattachments != None:
            for vizref in inlineattachments:
                msgalternative.attach(mimify_file(vizref['imagepath'], inline=True))

        # Add appended attachments from Email Attachments field and prevent dup custom filenames
        #  MC: Feels like this code should be in VizAlert class? Or module? Not sure, leaving it here for now
        appendedfilenames = []
        if appendattachments != None:
            appendattachments = vizalert.merge_pdf_attachments(appendattachments)
            for vizref in appendattachments:
                # if there is no |filename= option set then use the exported imagepath
                if 'filename' not in vizref:
                    msg.attach(mimify_file(vizref['imagepath'], inline=False))
                else:
                    # we need to make sure the custom filename is unique, if so then
                    # use the custom filename
                    if vizref['filename'] not in appendedfilenames:
                        appendedfilenames.append(vizref['filename'])
                        msg.attach(mimify_file(vizref['imagepath'], inline=False, overridename=vizref['filename']))
                    # use the exported imagepath
                    else:
                        msg.attach(mimify_file(vizref['imagepath'], inline=False))
                        log.logger.info(u'Warning: attempted to attach duplicate filename ' + vizref[
                            'filename'] + ', using unique auto-generated name instead.')

        server = smtplib.SMTP(config.configs['smtp.serv'], config.configs['smtp.port'])
        if config.configs['smtp.ssl']:
            server.ehlo()
            server.starttls()
        if config.configs['smtp.user']:
            server.login(str(config.configs['smtp.user']), str(config.configs['smtp.password']))

        # from http://wordeology.com/computer/how-to-send-good-unicode-email-with-python.html
        io = StringIO()
        g = Generator(io, False)  # second argument means "should I mangle From?"
        g.flatten(msg)

        server.sendmail(fromaddr.encode('utf-8'), [addr.encode('utf-8') for addr in allrecips], io.getvalue())
        server.quit()
    except smtplib.SMTPConnectError as e:
        log.logger.error(u'Email failed to send; there was an issue connecting to the SMTP server: {}'.format(e))
        raise e
    except smtplib.SMTPHeloError as e:
        log.logger.error(u'Email failed to send; the SMTP server refused our HELO message: {}'.format(e))
        raise e
    except smtplib.SMTPAuthenticationError as e:
        log.logger.error(u'Email failed to send; there was an issue authenticating to SMTP server: {}'.format(e))
        raise e
    except smtplib.SMTPException as e:
        log.logger.error(u'Email failed to send; there was an issue sending mail via SMTP server: {}'.format(e))
        raise e
    except Exception as e:
        log.logger.error(u'Email failed to send: {}'.format(e))
        raise e

Example 98

Project: nzbget-subliminal
Source File: upload.py
View license
    def upload_file(self, command, pyversion, filename):
        # Sign if requested
        if self.sign:
            gpg_args = ["gpg", "--detach-sign", "-a", filename]
            if self.identity:
                gpg_args[2:2] = ["--local-user", self.identity]
            spawn(gpg_args,
                  dry_run=self.dry_run)

        # Fill in the data
        f = open(filename,'rb')
        content = f.read()
        f.close()
        basename = os.path.basename(filename)
        comment = ''
        if command=='bdist_egg' and self.distribution.has_ext_modules():
            comment = "built on %s" % platform.platform(terse=1)
        data = {
            ':action':'file_upload',
            'protcol_version':'1',
            'name':self.distribution.get_name(),
            'version':self.distribution.get_version(),
            'content':(basename,content),
            'filetype':command,
            'pyversion':pyversion,
            'md5_digest':md5(content).hexdigest(),
            }
        if command == 'bdist_rpm':
            dist, version, id = platform.dist()
            if dist:
                comment = 'built for %s %s' % (dist, version)
        elif command == 'bdist_dumb':
            comment = 'built for %s' % platform.platform(terse=1)
        data['comment'] = comment

        if self.sign:
            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
                                     open(filename+".asc").read())

        # set up the authentication
        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()

        # Build up the MIME payload for the POST data
        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
        sep_boundary = '\n--' + boundary
        end_boundary = sep_boundary + '--'
        body = StringIO.StringIO()
        for key, value in data.items():
            # handle multiple entries for the same name
            if type(value) != type([]):
                value = [value]
            for value in value:
                if type(value) is tuple:
                    fn = ';filename="%s"' % value[0]
                    value = value[1]
                else:
                    fn = ""
                value = str(value)
                body.write(sep_boundary)
                body.write('\nContent-Disposition: form-data; name="%s"'%key)
                body.write(fn)
                body.write("\n\n")
                body.write(value)
                if value and value[-1] == '\r':
                    body.write('\n')  # write an extra newline (lurve Macs)
        body.write(end_boundary)
        body.write("\n")
        body = body.getvalue()

        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)

        # build the Request
        # We can't use urllib2 since we need to send the Basic
        # auth right with the first request
        schema, netloc, url, params, query, fragments = \
            urlparse.urlparse(self.repository)
        assert not params and not query and not fragments
        if schema == 'http':
            http = httplib.HTTPConnection(netloc)
        elif schema == 'https':
            http = httplib.HTTPSConnection(netloc)
        else:
            raise AssertionError, "unsupported schema "+schema

        data = ''
        loglevel = log.INFO
        try:
            http.connect()
            http.putrequest("POST", url)
            http.putheader('Content-type',
                           'multipart/form-data; boundary=%s'%boundary)
            http.putheader('Content-length', str(len(body)))
            http.putheader('Authorization', auth)
            http.endheaders()
            http.send(body)
        except socket.error, e:
            self.announce(str(e), log.ERROR)
            return

        r = http.getresponse()
        if r.status == 200:
            self.announce('Server response (%s): %s' % (r.status, r.reason),
                          log.INFO)
        else:
            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
                          log.ERROR)
        if self.show_response:
            print '-'*75, r.read(), '-'*75

Example 99

Project: btb
Source File: fetch_mailboxforwarding_mail.py
View license
    def handle(self, *args, **kwargs):
        base_url = "https://www.mailboxforwarding.com/"
        
        if (not hasattr(settings, "MAILBOX_FORWARDING") or 
                not "username" in settings.MAILBOX_FORWARDING or
                not "password" in settings.MAILBOX_FORWARDING):
            print "Requires MAILBOX_FORWARDING settings, e.g.:"
            print 'MAILBOX_FORWARDING = {'
            print '  "username": "[email protected]",'
            print '  "password": "secret",'
            print '}'
            print "exit 1"
            sys.exit(1)

        sess = requests.Session()
        res = sess.post(base_url + "manage/login.php", {
            "action": "login",
            "email": settings.MAILBOX_FORWARDING["username"],
            "password": settings.MAILBOX_FORWARDING["password"],
            "loginsubmit.x": "0",
            "loginsubmit.y": "0"
        })
        # This is a slightly dirty hack -- we're matching a javascript data
        # structure with a regex, converting the quotes to doubles so it resembles
        # JSON, and then loading it as JSON.  This may prove brittle.
        match = re.search(r"Ext\.grid\.dummyData = (\[.*\]\]);", res.text, re.DOTALL)
        if not match:
            raise Exception("Can't find data. Are login creds correct?")
        text = match.group(1)
        text = text.replace('"', '\\"')
        text = text.replace("'", '"')
        data = json.loads(text)

        scans = {}
        packages = {}
        for checkbox, date, envelope, type_status, dl in data:
            details = {}
            match = re.search("Type: <b>([^<]+)</b>.*Status: <b>([^<]+)</b>", type_status)
            if not match:
                raise Exception("Can't match type/status")
            details['kind'] = match.group(1)
            details['status'] = match.group(2)

            if details['kind'] == "Letter" and details['status'] != "Scanned":
                continue

            match = re.search("pdfview.php\?id=(\d+)", dl)
            if match:
                id_ = match.group(1)
            else:
                # TODO: Handle packages correctly
                continue
                #raise Exception("Can't find ID")

            match = re.search("src=\"([^\"]+)\"", envelope)
            if not match:
                raise Exception("Can't match envelope image")
            details['envelope'] = match.group(1)


            if details['status'] == "Scanned":
                scans[id_] = details
            elif details['kind'] != "Letter":
                packages[id_] = details

        uploader = User.objects.get(username="uploader")
        org = Organization.objects.get(pk=1) #TODO: generalize this? 

        new_count = 0
        for id_, details in scans.iteritems():
            source_id = "mailboxforwarding.com-{}".format(id_)
            if Scan.objects.filter(source_id=source_id).exists():
                continue
            new_count += 1

            print "Downloading pdf", source_id
            res = sess.get("{}manage/pdfview.php?id={}".format(base_url, id_))
            in_pdf_fh = StringIO()
            in_pdf_fh.write(res.content)
            in_pdf_fh.seek(0)
            reader = PdfFileReader(in_pdf_fh)

            print "Downloading envelope", details['envelope']
            res = sess.get(details['envelope'])
            in_envelope_fh = StringIO()
            in_envelope_fh.write(res.content)
            in_envelope_fh.seek(0)
            img = Image.open(in_envelope_fh)
            out_envelope_fh = StringIO()
            img.save(out_envelope_fh, "pdf")
            envelope_reader = PdfFileReader(out_envelope_fh)

            writer = PdfFileWriter()
            writer.addPage(envelope_reader.getPage(0))
            for page in range(reader.getNumPages()):
                writer.addPage(reader.getPage(page))

            with tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) as fh:
                writer.write(fh)
                dest_pdf_name = fh.name

            in_envelope_fh.close()
            out_envelope_fh.close()
            in_pdf_fh.close()

            path = tasks.move_scan_file(filename=dest_pdf_name)
            scan = Scan.objects.create(
                uploader=uploader,
                pdf=os.path.relpath(path, settings.MEDIA_ROOT),
                under_construction=True,
                org=org,
                source_id=source_id
            )
            tasks.split_scan(scan=scan)

        if packages:
            print "Manual action needed on the following at " \
                  "https://www.mailboxforwarding.com/:"
            for id_, details in packages.iteritems():
                new_count += 1
                print details
        print "Examined {} letters, {} new.".format(len(data), new_count)

Example 100

Project: FriendlyTorrent
Source File: track.py
View license
    def get_infopage(self):
        try:
            if not self.config['show_infopage']:
                return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
            red = self.config['infopage_redirect']
            if red:
                return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
                        '<A HREF="'+red+'">Click Here</A>')
            
            s = StringIO()
            s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
                '<html><head><title>BitTorrent download info</title>\n')
            if self.favicon is not None:
                s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
            s.write('</head>\n<body>\n' \
                '<h3>BitTorrent download info</h3>\n'\
                '<ul>\n'
                '<li><strong>tracker version:</strong> %s</li>\n' \
                '<li><strong>server time:</strong> %s</li>\n' \
                '</ul>\n' % (version, isotime()))
            if self.config['allowed_dir']:
                if self.show_names:
                    names = [ (self.allowed[hash]['name'],hash)
                              for hash in self.allowed.keys() ]
                else:
                    names = [ (None,hash)
                              for hash in self.allowed.keys() ]
            else:
                names = [ (None,hash) for hash in self.downloads.keys() ]
            if not names:
                s.write('<p>not tracking any files yet...</p>\n')
            else:
                names.sort()
                tn = 0
                tc = 0
                td = 0
                tt = 0  # Total transferred
                ts = 0  # Total size
                nf = 0  # Number of files displayed
                if self.config['allowed_dir'] and self.show_names:
                    s.write('<table summary="files" border="1">\n' \
                        '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
                else:
                    s.write('<table summary="files">\n' \
                        '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
                for name,hash in names:
                    l = self.downloads[hash]
                    n = self.completed.get(hash, 0)
                    tn = tn + n
                    c = self.seedcount[hash]
                    tc = tc + c
                    d = len(l) - c
                    td = td + d
                    if self.config['allowed_dir'] and self.show_names:
                        if self.allowed.has_key(hash):
                            nf = nf + 1
                            sz = self.allowed[hash]['length']  # size
                            ts = ts + sz
                            szt = sz * n   # Transferred for this torrent
                            tt = tt + szt
                            if self.allow_get == 1:
                                linkname = '<a href="/file?info_hash=' + quote(hash) + '">' + name + '</a>'
                            else:
                                linkname = name
                            s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
                                % (b2a_hex(hash), linkname, size_format(sz), c, d, n, size_format(szt)))
                    else:
                        s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
                            % (b2a_hex(hash), c, d, n))
                if self.config['allowed_dir'] and self.show_names:
                    s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n'
                            % (nf, size_format(ts), tc, td, tn, size_format(tt)))
                else:
                    s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td></tr>\n'
                            % (nf, tc, td, tn))
                s.write('</table>\n' \
                    '<ul>\n' \
                    '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
                    '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
                    '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
                    '<li><em>downloaded:</em> reported complete downloads</li>\n' \
                    '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
                    '</ul>\n')

            s.write('</body>\n' \
                '</html>\n')
            return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
        except:
            print_exc()
            return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')