sys.stdout.writelines

Here are the examples of the python api sys.stdout.writelines taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

17 Examples 7

Example 1

Project: CDQR Source File: cdqr.py
Function: query_yes_no
def query_yes_no(question, default="yes"):
	if default == "yes":
		prompt = " [Y/n]"
		yes = set(['yes','y', 'ye', ''])
		no = set(['no','n'])
	else:
		prompt = " [y/N]"
		yes = set(['yes','y', 'ye'])
		no = set(['no','n',''])

	while True:
		sys.stdout.writelines(question + prompt+": ")
		choice = input().lower()
		if choice in yes:
			return True
		elif choice in no:
			return False
		else:
			sys.stdout.write("Please respond with 'yes' or 'no'")

Example 2

Project: CDQR Source File: cdqr.py
def query_plaso_location():
	# This prompts user for a plaso location and confirms it exists before returning
	# a valided file location
	while True:
		sys.stdout.writelines("Please enter valid location for Plaso directory: ")
		p_path = input()
		# Verify files exist
		l2t_loc = p_path.rstrip('\\')+"\\log2timeline.exe"
		p_loc = p_path.rstrip('\\')+"\\psort.exe"
		if not os.path.isfile(l2t_loc):
			print("ERROR: "+l2t_loc+" does not exist")
		else:
			if not os.path.isfile(p_loc):
				print("ERROR: "+p_loc+" does not exist")
			else:
				return l2t_loc, p_loc

Example 3

Project: CDQR Source File: cdqr.py
def status_marker(myproc):
	counter = 1
	while myproc.poll() is None:
		if counter%2 == 0:
			sys.stdout.writelines("| Still working...\r")
		else:
			sys.stdout.writelines("- Still working...\r")
		sys.stdout.flush()
		counter+=1
		time.sleep(1)

	if myproc.poll() != 0:
		print("ERROR: There was a problem. See log for details in log.")
		mylogfile.writelines("ERROR: There was a problem. See details in log.\n")
		print("\nExiting.......")
		sys.exit(1)

Example 4

Project: Veil-Evasion Source File: logmerge.py
Function: format_output
def format_output(database):
    prevtext = None
    prev = []
    database.append((None, None, None, None, None)) # Sentinel
    for (date, working_file, rev, author, text) in database:
        if text != prevtext:
            if prev:
                print sep2,
                for (p_date, p_working_file, p_rev, p_author) in prev:
                    print p_date, p_author, p_working_file, p_rev
                sys.stdout.writelines(prevtext)
            prev = []
        prev.append((date, working_file, rev, author))
        prevtext = text

Example 5

Project: ecogwiki Source File: merge3.py
Function: main
def main(argv):
    # as for diff3 and meld the syntax is "MINE BASE OTHER"
    a = file(argv[1], 'rt').readlines()
    base = file(argv[2], 'rt').readlines()
    b = file(argv[3], 'rt').readlines()

    m3 = Merge3(base, a, b)

    #for sr in m3.find_sync_regions():
    #    print sr

    # sys.stdout.writelines(m3.merge_lines(name_a=argv[1], name_b=argv[3]))
    sys.stdout.writelines(m3.merge_annotated())

Example 6

Project: svtools Source File: lsort.py
Function: execute
    def execute(self):
        
        counter = 0
        for vcf_file_name in self.vcf_file_names:
            samples = l_bp.parse_vcf(vcf_file_name, self.vcf_lines, self.vcf_headers)
            for sample in samples:
                self.vcf_headers.append("##SAMPLE=<ID=" + sample + ">\n")
            counter += 1
            if counter > self.batchsize:
                self.vcf_lines.sort(key=l_bp.vcf_line_key)
                self.write_temp_file()
                counter = 0
        # no need to write the final batch to file
        self.write_header()

        self.vcf_lines.sort(key=l_bp.vcf_line_key)
        iterables = self.temp_files + [self.vcf_lines]
        sys.stdout.writelines(merge(*iterables))

Example 7

Project: svtools Source File: lsort.py
Function: write_header
    def write_header(self):
        self.vcf_headers.append("##INFO=<ID=SNAME,Number=.,Type=String," + \
            "Description=\"Source sample name\">\n")
        self.vcf_headers.append("##INFO=<ID=ALG,Number=1,Type=String," + \
            "Description=\"Evidence PDF aggregation algorithm\">\n")
        self.vcf_headers.append("#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\t" + \
            "VARIOUS\n")
        self.vcf_headers.sort(cmp=l_bp.header_line_cmp)
        sys.stdout.writelines(self.vcf_headers)

Example 8

Project: KhtNotes Source File: merge3.py
Function: main
def main(argv):
    # as for diff3 and meld the syntax is "MINE BASE OTHER"
    a = file(argv[1], 'rt').readlines()
    base = file(argv[2], 'rt').readlines()
    b = file(argv[3], 'rt').readlines()

    m3 = Merge3(base, a, b)

    #for sr in m3.find_sync_regions():
    #    print sr

    # sys.stdout.writelines(m3.merge_lines(name_a=argv[1], name_b=argv[3]))
    sys.stdout.writelines(m3.merge())

Example 9

Project: filtered_websocket Source File: stdout_messages.py
Function: filter
    @classmethod
    def filter(cls, web_socket_instance, msg):
        sys.stdout.writelines(
            "--MESSAGE--\n%s => %s\n" % (web_socket_instance.id, msg)
        )
        sys.stdout.flush()

Example 10

Project: filtered_websocket Source File: stdout_pubsub.py
Function: filter
    @classmethod
    def filter(cls, web_socket_instance, data):
        sys.stdout.writelines(
            "--PUBSUB--\n%s\n" % (data)
        )
        sys.stdout.flush()

Example 11

Project: backuppc-archive-s3 Source File: backup-manager.py
def main():
    # check command line options
    parser = optparse.OptionParser(
        usage="usage: %prog [options] [list|delete|script]",
        description="" +
            "Companion maintenance script for BackupPC_archiveHost_s3. " +
            "By default, it assumes the 'list' command, which displays all " +
            "of the backups currently archived on S3.  The 'delete' command " +
            "is used to delete backups.  The 'script' command produces a " +
            "script that can be used to download and restore a backup.")
    parser.add_option("-H", "--host", dest="host",
                      help="Name of backed-up host")
    parser.add_option("-b", "--backup-number", dest="backupnum",
                      help="Backup number")
    parser.add_option("-a", "--age", dest="age",
                      help="Delete backups older than AGE days")
    parser.add_option("-k", "--keep", dest="keep",
                      help="When used with --age, keep this many recent " +
                           "backups (default=1)", default=1)
    parser.add_option("-f", "--filename", dest="filename",
                      help="Output filename for script")
    parser.add_option("-x", "--expire", dest="expire",
                      help="Maximum age of script, default 86400 seconds")
    parser.add_option("-t", "--test", dest="test", action="store_true",
                      help="Test mode; don't actually delete")
    parser.add_option("-u", "--unfinalized", dest="unfinalized",
                      action="store_true", help="Consider unfinalized backups")
    parser.add_option("-s", "--start-backups", dest="start",
                      action="store_true",
                      help="When used with --age, start backups for hosts " +
                           "with fewer than keep+1 backups")
    parser.add_option("-l", "--list", dest="list", action="store_true",
                      help="List stored backups after completing operations")

    (options, args) = parser.parse_args()

    bmgr = BackupManager(secrets.accesskey, secrets.sharedkey)

    if options.backupnum and not options.host:
        parser.error('Must specify --host when specifying --backup-number')

    if options.backupnum:
        options.backupnum = int(options.backupnum)

    if len(args) == 0:
        args.append('list')

    if len(args) > 1:
        parser.error('Too many arguments.')

    if args[0] != 'delete' and options.age:
        parser.error('--age only makes sense with delete')

    if options.start and not (args[0] == 'delete' and options.age):
        parser.error('--start-backups only makes sense with delete and --age')

    if args[0] != 'script' and (options.expire or options.filename):
        parser.error('--expire and --filename only make sense with script')

    if args[0] in ['list', 'script', 'delete']:
        if options.host:
            if options.host not in bmgr.all_backups:
                parser.error('No backups found for host "%s"' % options.host)
        else:
            if len(bmgr.all_backups) == 0:
                parser.error('No buckets found!')
    else:
        parser.error('Invalid option: %s' + args[0])

    if args[0] == 'script':
        if not options.host:
            parser.error('Must specify --host to generate a script for')

        if not options.backupnum and options.unfinalized:
            # assuming highest number
            options.backupnum = max(bmgr.all_backups[options.host].keys())
        elif not options.backupnum:
            # assuming highest finalized number
            options.backupnum = 0
            for backup in bmgr.all_backups[options.host].keys():
                if bmgr.all_backups[options.host][backup]['finalized'] > 0:
                    options.backupnum = max(options.backupnum, backup)
            if options.backupnum == 0:
                parser.error('No finalized backups found!  Try '
                             '--unfinalized if you dare')

        backup = bmgr.all_backups[options.host][options.backupnum]

        if not options.expire:
            options.expire = "86400"

        if options.filename:
            fd = open(options.filename, 'w')
            fd.writelines(make_restore_script(backup,
                          expire=int(options.expire)))
        else:
            sys.stdout.writelines(make_restore_script(backup,
                                  expire=int(options.expire)))
    elif args[0] == 'delete':
        to_ignore = int(options.keep)
        to_delete = []
        if options.host and options.backupnum:
            print("Will delete backup: %s %i (forced)" % (
                   options.host, options.backupnum))
            to_delete.append((options.host, options.backupnum))
        elif options.age:
            to_delete_dict = choose_backups_to_delete(bmgr.backups_by_age,
                                target_count=to_ignore,
                                max_age=int(options.age))
            for hostname, backuplist in to_delete_dict.items():
                for backupstat in backuplist:
                    print("Will delete backup: %s %i (expired at %g days)" % (
                            hostname, backupstat[0], backupstat[1] / 86400.0))
                    to_delete.append((hostname, backupstat[0]))

        else:
            parser.error('Need either an age or a host AND backup number.')

        if len(to_delete) > 0:
            for deletehost, deletebackupnum in to_delete:
                hostbackups = bmgr.all_backups.get(deletehost, {})
                deletebackup = hostbackups.get(deletebackupnum, {})
                deletekeys = deletebackup.get('keys', [])
                finalkey = deletebackup.get('finalkey', None)
                if len(deletekeys) > 0:
                    sys.stdout.write("Deleting backup: %s %d (%d keys)" % (
                            deletehost, deletebackupnum, len(deletekeys)))
                    for key in deletekeys:
                        if options.test:
                            sys.stdout.write('_')
                        else:
                            key.delete()
                            sys.stdout.write('.')
                        sys.stdout.flush()
                    if finalkey is not None:
                        if options.test:
                            sys.stdout.write('+')
                        else:
                            finalkey.delete()
                            sys.stdout.write('!')
                        sys.stdout.flush()
                    sys.stdout.write('\n')

        if options.start:
            for deletehost, deletebackupnum in to_delete:
                bmgr.invalidate_host_cache(deletehost)
            score_iter = choose_host_to_backup(bmgr.backups_by_age,
                                    target_count=int(options.keep) + 1)
            for candidate, score in score_iter:
                if score > 0:
                    sys.stdout.write('Starting archive operation for host: '
                                     '%s (score=%g)\n' % (candidate, score))
                    start_archive([candidate])
                    break
    if args[0] == 'list' or options.list:
        sys.stdout.write('%25s | %5s | %20s | %5s\n' % (
                "Hostname", "Bkup#", "Age", "Files"))
        sys.stdout.write(('-' * 72) + '\n')
        for hostname, backups in bmgr.all_backups.items():
            for backupnum in sorted(backups.keys()):
                filecount = len(backups[backupnum]['keys'])
                datestruct = backups[backupnum]['date']
                if backups[backupnum]['finalized'] > 0:
                    inprogress = ''
                else:
                    inprogress = '*'
                timestamp = time.mktime(datestruct)
                delta = int(time.time() - timestamp + time.timezone)
                if delta < 3600:
                    prettydelta = '%i min ago' % (delta / 60)
                elif delta < 86400:
                    prettydelta = '%i hr ago' % (delta / 3600)
                else:
                    days = int(delta / 60 / 60 / 24)
                    if days == 1:
                        s = ''
                    else:
                        s = 's'
                    prettydelta = '%i day%s ago' % (days, s)

                sys.stdout.write('%25s | %5i | %20s | %5i%s\n' % (
                    hostname, backupnum, prettydelta, filecount, inprogress))
        sys.stdout.write('* == not yet finalized (Age == time of '
                         'last activity)\n')

Example 12

Project: crmsh Source File: ui_history.py
Function: do_diff
    @command.skill_level('administrator')
    @command.completers(compl.join(compl.call(lambda: crm_report().peinputs_list()),
                                   compl.choice(['live'])),
                        compl.join(compl.call(lambda: crm_report().peinputs_list()),
                                   compl.choice(['live'])))
    def do_diff(self, context, t1, t2, *args):
        "usage: diff <pe> <pe> [status] [html]"
        self._init_source()
        opt_l = []
        if not self._common_pe_render_check(context, opt_l, *args):
            return False
        showfun = self._pe_config_plain
        mkhtml = "html" in opt_l
        if "status" in opt_l:
            showfun = self._pe_status_nohdr
        elif mkhtml:
            showfun = self._pe_config_noclr
        s = self._diff(showfun, t1, t2, html=mkhtml)
        if context.previous_level_is("cibconfig"):
            cib_factory.refresh()
        if s is None:
            return False
        if not mkhtml:
            utils.page_string(s)
        else:
            sys.stdout.writelines(s)

Example 13

Project: agdc Source File: tilecompare.py
def _compare_tile_contents(list_both, output):
    """Compare the tile pairs contained in list_both. Additionally, report
    those tiles that are only in Database 1, or only in  Database 2.

    Positional arguments: 3 lists as follows:
    1. production_and_test: those corresponding pairs which exist on Database 1
       and Database 2.
    2. production_not_test: the tiles found only on Database 1.
    3. test_not_production: the tiles found only on Database 2.
    Each element of the above lists is a 5-tuple:
    (level_name, tile_class_id on Database 1, tile_class_id on Database 2,
     tile_pathname on Database 1, tile_pathname on Database 2).

    Returns:
    List of tile-path pairs (path1, path2) for which a difference has been
    detected."""
    #pylint:disable=too-many-locals

    # Define a list of tuples (path1, path2) where the contents differ
    # Each
    rec_num = 0
    difference_pairs = []
    for tile_pair in list_both:
        rec_num += 1
        is_mosaic_vrt = False
        level, tile_class_id1, tile_class_id2, path1, path2 = tile_pair
        output.writelines('RECORD NUMBER %d tile_class_id2=%d level=%s\n'
                          %(rec_num, tile_class_id2, level))
        # For a mosaic tile, the tile entry may not be on the database, so
        # look in mosaic_cache:
        if tile_class_id2 in MOSAIC_CLASS_ID:
            path1 = os.path.join(os.path.dirname(path1), 'mosaic_cache',
                                 os.path.basename(path1))
            # For non-PQA tiles, the benchmark mosaic will be .vrt extension
            if level in ['NBAR', 'ORTHO']:
                path1 = re.match(r'(.+)\.tif$', path1).groups(1)[0] + '.vrt'
                is_mosaic_vrt = True

        # Check the Geotransform, Projection and shape (unless it is a vrt)
        if is_mosaic_vrt:
            data1, data2, msg = (None, None, "")
        else:
            # Skip checking of metadata for a vrt mosaic since we will check
            # with system diff command in _compare_data
            data1, data2, msg = _check_tile_metadata(path1, path2)

        if msg:
            output.writelines(msg)

        # Compare the tile contents
        are_different, msg = _compare_data(level,
                                           tile_class_id1, tile_class_id2,
                                           path1, path2, data1, data2)
        if are_different:
            difference_pairs.extend((path1, path2))
        if msg:
            sys.stdout.writelines(msg)
            output.writelines(msg)

    return difference_pairs

Example 14

Project: replicator Source File: fiber.py
Function: del
  def __del__( self ):

    sys.stdout.writelines( self.__chunks )
    if not self.__newline:
      sys.stdout.write( '\n' )

Example 15

Project: filtered_websocket Source File: stdout_rawdata.py
Function: filter
    @classmethod
    def filter(cls, web_socket_instance, data):
        sys.stdout.writelines("--RAWDATA--\n%s\n" % data)
        sys.stdout.flush()

Example 16

Project: pyfaidx Source File: test_FastaRecord.py
    def test_issue_62(self):
        """ Check for pathogenic FastaRecord.long_name behavior in mdshw5/pyfaidx#62 """
        deflines = []
        line_len = None
        with open('data/genes.fasta', 'rb') as fasta_file:
            with open('data/issue_62.fa', 'wb') as fasta_uniform_len:
                for line in fasta_file:
                    if line.startswith(b'>'):
                        deflines.append(line[1:-1].decode('ascii'))
                        fasta_uniform_len.write(line)
                    elif line_len is None:
                        line_len = len(line)
                        fasta_uniform_len.write(line)
                    elif line_len > len(line):
                        fasta_uniform_len.write(line.rstrip() + b'N' * (line_len - len(line)) + b'\n')
                    else:
                        fasta_uniform_len.write(line)
        fasta = Fasta('data/issue_62.fa', as_raw=True)
        long_names = []
        for record in fasta:
            long_names.append(record.long_name)
        try:
            os.remove('data/issue_62.fa')
            os.remove('data/issue_62.fa.fai')
        except EnvironmentError:
            pass
        sys.stdout.writelines(tuple(Differ().compare(deflines, long_names)))
        assert deflines == long_names

Example 17

Project: wikimo_content Source File: sync.py
def compare_site_and_disk(config, diff, site, docs, push, get):
    ''' Does both compare and push/get since it's quite similar code-wide'''
    for f in docs:
        full_path = './'+f+'.mediawiki'
        m_ondisk = hashlib.new(config['hashalg'])
        with open(full_path) as fd:
            on_disk = fd.read()
        m_ondisk.update(on_disk.encode('utf-8'))

        m_onsite = hashlib.new(config['hashalg'])
        page = site.Pages[f]
        on_site = page.text().encode('utf-8')+'\n'.encode('utf-8')
        m_onsite.update(on_site)

        if m_ondisk.digest() != m_onsite.digest():
            print("Page {} differ.".format(f))
            if (diff):
                #Just display diff in the correct order, we default to push-side-diff
                if get:
                    mydiff = difflib.unified_diff(blist2str(on_site.splitlines(1)), blist2str(on_disk.splitlines(1)))
                else:
                    mydiff = difflib.unified_diff(blist2str(on_disk.splitlines(1)), blist2str(on_site.splitlines(1)))

                sys.stdout.writelines(mydiff)

            #Now push or get whatever is needed to sync
            #But never do both push and get at once, would make no sense
            if get:
                print("Getting {} from site to disk.".format(f))
                with open(full_path, 'w') as fd:
                    fd.write(on_site.decode('utf-8'))
            elif push:
                check_repos_is_current(config)
                print("Pushing {} from disk to site.".format(f))
                page.save(on_disk, summary=u'Automated sync from {}'.format(config['repos']))