Here are the examples of the python api sys.stdout.buffer taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
29 Examples
5
Example 1
Project: pywb Source File: test_indexing.py
def cli_lines(cmds):
buff = BytesIO()
orig = sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else None
sys.stdout.buffer = buff
main(cmds)
sys.stdout.buffer = orig
lines = buff.getvalue().rstrip().split(b'\n')
# print first, last, num lines
print(lines[1].decode('utf-8'))
print(lines[-1].decode('utf-8'))
print('Total: ' + str(len(lines)))
3
Example 2
Project: osbs-client Source File: main.py
def cmd_backup(args, osbs):
dirname = time.strftime("osbs-backup-{0}-{1}-%Y-%m-%d-%H%M%S"
.format(args.instance, args.namespace))
if args.filename == '-':
outfile = sys.stdout.buffer if PY3 else sys.stdout
elif args.filename:
outfile = args.filename
else:
outfile = dirname + ".tar.bz2"
with paused_builds(osbs, quota_name='pause-backup'):
with TarWriter(outfile, dirname) as t:
for resource_type in BACKUP_RESOURCES:
logger.info("dumping %s", resource_type)
resources = osbs.dump_resource(resource_type)
t.write_file(resource_type + ".json", json.dumps(resources).encode('ascii'))
if not hasattr(outfile, "write"):
logger.info("backup archive created: %s", outfile)
3
Example 3
def __init__(self, usage='', version=''):
if six.PY3:
self.output_file = sys.stdout.buffer
self.error_file = sys.stderr.buffer
else:
self.output_file = sys.stdout
self.error_file = sys.stderr
self.encoding = getattr(self.output_file, 'encoding', 'utf-8') or 'utf-8'
self.verbosity_level = 0
self.terminal_colors = not sys.platform.startswith('win') and self.is_terminal()
if self.is_terminal():
w, _h = getTerminalSize()
self.terminal_width = w
else:
self.terminal_width = 80
self.name = self.__class__.__name__.lower()
3
Example 4
Project: mochi Source File: main.py
def _pyc_compile(in_file_name, env, out_file_name, show_tokens=False, optimize=-1):
"""Compile a Mochi file into a Python bytecode file.
"""
if not out_file_name:
out_file = sys.stdout.buffer
else:
out_file = open(out_file_name, 'wb')
target_ast = translator.translate_file(in_file_name, show_tokens=show_tokens)
import_env_file = Path(__file__).absolute().parents[0] / env
import_env_ast = translator.translate_file(import_env_file.as_posix())
target_ast.body = import_env_ast.body + target_ast.body
output_pyc(compile(target_ast, in_file_name, 'exec', optimize=optimize),
buffer=out_file)
3
Example 5
Project: python-hl7 Source File: client.py
def stdout(content):
# In Python 3, can't write bytes via sys.stdout.write
# http://bugs.python.org/issue18512
if six.PY3 and isinstance(content, six.binary_type):
out = sys.stdout.buffer
newline = b'\n'
else:
out = sys.stdout
newline = '\n'
out.write(content + newline)
3
Example 6
def update(self, percent, message=''):
out = (sys.stdout.buffer if py3 else sys.stdout)
if self.cleared:
out.write(self.header.encode(enc))
self.cleared = 0
n = int((self.width-10)*percent)
msg = message.center(self.width)
msg = (self.term.BOL + self.term.UP + self.term.CLEAR_EOL +
(self.bar % (100*percent, '='*n, '-'*(self.width-10-n))) +
self.term.CLEAR_EOL + msg).encode(enc)
out.write(msg)
out.flush()
3
Example 7
def clear(self):
out = (sys.stdout.buffer if py3 else sys.stdout)
if not self.cleared:
out.write((self.term.BOL + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL).encode(enc))
self.cleared = 1
out.flush()
3
Example 8
def prints(*args, **kwargs): # {{{
f = kwargs.get('file', sys.stdout.buffer if py3 else sys.stdout)
end = kwargs.get('end', b'\n')
enc = getattr(f, 'encoding', 'utf-8') or 'utf-8'
if isinstance(end, unicode):
end = end.encode(enc)
for x in args:
if isinstance(x, unicode):
x = x.encode(enc)
f.write(x)
f.write(b' ')
f.write(end)
if py3 and f is sys.stdout.buffer:
f.flush()
3
Example 9
Project: lglaf Source File: dump-file.py
def open_local_writable(path):
if path == '-':
try: return sys.stdout.buffer
except: return sys.stdout
else:
return open(path, "wb")
3
Example 10
Project: django-ca Source File: base.py
def __init__(self, stdout=None, stderr=None, no_color=False):
if self.binary_output is True and six.PY3 is True:
self.stdout = BinaryOutputWrapper(stdout or sys.stdout.buffer)
self.stderr = BinaryOutputWrapper(stderr or sys.stderr.buffer)
self.style = no_style()
else:
super(BaseCommand, self).__init__(stdout, stderr, no_color=no_color)
0
Example 11
Project: intelhex Source File: compat.py
def get_binary_stdout():
return sys.stdout.buffer
0
Example 12
def __init__(self, filenames, out_file=None, write_gzip=False,
force_read_gzip=None, read_record_ids=None, preserve_block=True,
out_dir=None, print_progress=False, keep_going=False):
if not out_file:
try:
out_file = sys.stdout.buffer
except AttributeError:
out_file = sys.stdout
self.filenames = filenames
self.out_file = out_file
self.force_read_gzip = force_read_gzip
self.write_gzip = write_gzip
self.current_filename = None
self.read_record_ids = read_record_ids
self.preserve_block = preserve_block
self.out_dir = out_dir
self.print_progress = print_progress
self.keep_going = keep_going
self.check_block_length = False
0
Example 13
Project: petl Source File: sources.py
def _get_stdout_binary():
try:
return sys.stdout.buffer
except AttributeError:
pass
try:
fd = sys.stdout.fileno()
return os.fdopen(fd, 'ab', 0)
except Exception:
pass
try:
return sys.__stdout__.buffer
except AttributeError:
pass
try:
fd = sys.__stdout__.fileno()
return os.fdopen(fd, 'ab', 0)
except Exception:
pass
# fallback
return sys.stdout
0
Example 14
Project: mochi Source File: main.py
def output_code(code):
import marshal
marshal.dump(code, sys.stdout.buffer)
0
Example 15
def main(argv):
(options, input_files) = parser.parse_args(args=argv[1:])
try: # python3
out = sys.stdout.buffer
except AttributeError: # python2
out = sys.stdout
if options.output:
out = open(options.output, 'ab')
if options.output.endswith('.gz'):
options.gzip = True
if len(input_files) < 1:
parser.error("no imput warc file(s)")
warcinfo = warcinfo_fields(
description = options.description,
operator = options.operator,
publisher = options.publisher,
audience = options.audience,
)
arc = ArcTransformer(options.output, warcinfo, options.resource, options.response)
for name in expand_files(input_files):
fh = MixedRecord.open_archive(filename=name, gzip="auto")
try:
for record in fh:
if isinstance(record, WarcRecord):
print(' WARC', record.url, file=sys.stderr)
warcs = [record]
else:
print('ARC ', record.url, file=sys.stderr)
warcs = arc.convert(record)
for warcrecord in warcs:
warcrecord.write_to(out, gzip=options.gzip)
finally:
fh.close()
return 0
0
Example 16
def main(argv):
(options, input_files) = parser.parse_args(args=argv[1:])
try: # python3
out = sys.stdout.buffer
except AttributeError: # python2
out = sys.stdout
if len(input_files) < 1:
fh = WarcRecord.open_archive(file_handle=sys.stdin, gzip=None)
for record in fh:
process(record, out, options)
else:
for name in expand_files(input_files):
fh = WarcRecord.open_archive(name, gzip="auto")
for record in fh:
process(record, out, options)
fh.close()
return 0
0
Example 17
def main(argv):
(options, args) = parser.parse_args(args=argv[1:])
try: # python3
out = sys.stdout.buffer
except AttributeError: # python2
out = sys.stdout
if len(args) < 1:
# dump the first record on stdin
with closing(WarcRecord.open_archive(file_handle=sys.stdin, gzip=None)) as fh:
dump_record(fh, out)
else:
# dump a record from the filename, with optional offset
filename = args[0]
if len(args) > 1:
offset = int(args[1])
else:
offset = 0
with closing(WarcRecord.open_archive(filename=filename, gzip="auto")) as fh:
fh.seek(offset)
dump_record(fh, out)
return 0
0
Example 18
def main(argv):
(options, input_files) = parser.parse_args(args=argv[1:])
try: # python3
out = sys.stdout.buffer
except AttributeError: # python2
out = sys.stdout
if len(input_files) < 1:
parser.error("no pattern")
pattern, input_files = input_files[0].encode(), input_files[1:]
invert = options.invert
pattern = re.compile(pattern)
if not input_files:
fh = WarcRecord.open_archive(file_handle=sys.stdin, gzip=None)
filter_archive(fh, options, pattern, out)
else:
for name in expand_files(input_files):
fh = WarcRecord.open_archive(name, gzip="auto")
filter_archive(fh, options, pattern,out)
fh.close()
return 0
0
Example 19
def main(argv):
(options, input_files) = parser.parse_args(args=argv[1:])
try: # python3
out = sys.stdout.buffer
except AttributeError: # python2
out = sys.stdout
if len(input_files) < 1:
parser.error("no imput warc file(s)")
out.write(b'#WARC filename offset warc-type warc-subject-uri warc-record-id content-type content-length\n')
for name in expand_files(input_files):
fh = WarcRecord.open_archive(name, gzip="auto")
try:
for (offset, record, errors) in fh.read_records(limit=None):
if record:
fields = [name.encode('utf-8'),
str(offset).encode('utf-8'),
record.type or b'-',
record.url or b'-',
record.id or b'-',
record.content_type or b'-',
str(record.content_length).encode('utf-8')]
out.write(b' '.join(fields) + b'\n')
elif errors:
pass
# ignore
else:
pass
# no errors at tail
finally:
fh.close()
return 0
0
Example 20
Project: warctools Source File: warcpayload.py
def dump_payload_from_stream(fh):
try: # python3
out = sys.stdout.buffer
except AttributeError: # python2
out = sys.stdout
for (offset, record, errors) in fh.read_records(limit=1, offsets=False):
if record:
if (record.type == WarcRecord.RESPONSE
and record.content_type.startswith(b'application/http')):
f = FileHTTPResponse(record.content_file)
f.begin()
else:
f = record.content_file
buf = f.read(8192)
while buf != b'':
out.write(buf)
buf = f.read(8192)
elif errors:
print("warc errors at %s:%d"%(name, offset if offset else 0), file=sys.stderr)
for e in errors:
print('\t', e)
0
Example 21
Project: biocode Source File: fastq_simple_stats.py
def main():
parser = argparse.ArgumentParser( description='Provides simple quantitative statistics for a given FASTQ file')
## output file to be written
parser.add_argument('input_files', metavar='N', type=str, nargs='+', help='Path to one or more input files, separated by spaces' )
parser.add_argument('-o', '--output_file', type=str, required=False, help='Optional path to an output file to be created, else prints on STDOUT' )
args = parser.parse_args()
## open the output file
fout = None
if args.output_file is None:
fout = codecs.getwriter('utf8')(sys.stdout.buffer)
else:
fout = open(args.output_file, "w")
## values that will be reported
entry_count = 0
total_bases = 0
gc_count = 0
line_number = 0
for input_file in args.input_files:
if input_file.endswith('.gz'):
fh = gzip.open( input_file, 'rb')
is_compressed = True
else:
fh = open( input_file, 'rU' )
is_compressed = False
for line in fh:
if is_compressed:
line = line.decode()
line = line.rstrip()
line_number += 1
## every 4th line is the start of a sequence entry (check still)
if line_number % 4 == 1:
if line.startswith('@'):
entry_count += 1
else:
raise Exception("Error, expected every 4th line to start with @ and this one didn't: {0}".format(line) )
elif line_number % 4 == 2:
total_bases += len(line)
gc_count += line.count('G')
gc_count += line.count('C')
avg_entry_length = total_bases / entry_count
fout.write("Total sequence entries: {0}\n".format(entry_count))
fout.write("Total bases: {0}\n".format(total_bases))
fout.write("Avg sequence length: {0:.1f}\n".format(avg_entry_length))
fout.write("GC percentage: {0:.1f}\n".format( (gc_count / total_bases) * 100))
0
Example 22
Project: biocode Source File: calculate_feature_coverage.py
def main():
parser = argparse.ArgumentParser( description='Provides coverage information for features in a GFF3 file')
## output file to be written
parser.add_argument('evidence_files', metavar='N', type=str, nargs='+', help='Path to one or more evidence files, separated by spaces' )
parser.add_argument('-r', '--reference', type=str, required=True, help='Input path to the reference GFF3 file. So we know what feature type to report on, format should be like FILE:TYPE' )
parser.add_argument('-f', '--fasta', type=str, required=True, help='Input path to the reference FASTA file.' )
parser.add_argument('-o', '--output_file', type=str, required=False, help='Optional path to an output file to be created, else prints on STDOUT' )
args = parser.parse_args()
## parse the fasta
fasta = biocodeutils.fasta_dict_from_file(args.fasta)
## open the output file
fout = None
if args.output_file is None:
fout = codecs.getwriter('utf8')(sys.stdout.buffer)
else:
fout = open(args.output_file, "w")
####################################################
## Sanity checks
allowed_extensions = ['bed', 'gff3', 'pileup', 'sam']
for ev_file in args.evidence_files:
valid_ext_found = False
for ext in allowed_extensions:
if ev_file.endswith(ext):
valid_ext_found = True
if valid_ext_found == False:
raise Exception("ERROR: Evidence file passed with unsupported file extension: {0}. Supported extensions are {1}".format(ev_file, allowed_extensions))
## The input file should be defined as $path:$feattype
if ':' not in args.reference:
raise Exception("ERROR: input_file must be like /path/to/some.gff3:mRNA")
ref_file_parts = args.reference.split(':')
print("DEBUG: part count: {0}".format(len(ref_file_parts)))
if ref_file_parts[0].endswith('.gff3'):
(ref_assemblies, ref_features) = biocodegff.get_gff3_features( ref_file_parts[0] )
else:
raise Exception("ERROR: Expected input file (-i) to have a gff3 extension, got {0}".format(ref_file_parts[0]))
####################################################
## Initialize the coverage arrays
fasta_cov = dict()
for seq_id in fasta:
# create a list of 0s the length of the molecule
fasta_cov[seq_id] = [0] * len(fasta[seq_id]['s'])
####################################################
## Now parse the evidence files
for ev_file in args.evidence_files:
if ev_file.endswith('pileup'):
parse_pileup(fasta_cov, ev_file)
elif ev_file.endswith('sam'):
parse_sam(fasta_cov, ev_file)
else:
print("INFO: ignoring evidence file {0} because code to handle its file type isn't currently implemented".format(ev_file))
for id in fasta_cov:
covered_bases = 0
for i in fasta_cov[id]:
if fasta_cov[id][i] > 0:
covered_bases += 1
fout.write("{0}\t{1}\t{2}\n".format(id, len(fasta[id]['s']), covered_bases))
0
Example 23
def to_csv(pdf, types, encoding):
objs = []
fields = set()
for t in types:
new_objs = getattr(pdf, t + "s")
if len(new_objs):
objs += new_objs
fields = fields.union(set(new_objs[0].keys()))
first_columns = [
"object_type", "pageid",
"x0", "x1", "y0", "y1",
"doctop", "top", "bottom",
"width", "height"
]
cols = first_columns + list(sorted(set(fields) - set(first_columns)))
stdout = (sys.stdout.buffer if sys.version_info[0] >= 3 else sys.stdout)
w = unicodecsv.DictWriter(stdout,
fieldnames=cols, encoding=encoding)
w.writeheader()
w.writerows(objs)
0
Example 24
Project: buildozer Source File: __init__.py
def cmd_expect(self, command, **kwargs):
from pexpect import spawnu
# prepare the environ, based on the system + our own env
env = copy(environ)
env.update(self.environ)
# prepare the process
kwargs.setdefault('env', env)
kwargs.setdefault('show_output', self.log_level > 1)
sensible = kwargs.pop('sensible', False)
show_output = kwargs.pop('show_output')
if show_output:
if IS_PY3:
kwargs['logfile'] = codecs.getwriter('utf8')(stdout.buffer)
else:
kwargs['logfile'] = codecs.getwriter('utf8')(stdout)
if not sensible:
self.debug('Run (expect) {0!r}'.format(command))
else:
self.debug('Run (expect) {0!r} ...'.format(command.split()[0]))
self.debug('Cwd {}'.format(kwargs.get('cwd')))
return spawnu(command, **kwargs)
0
Example 25
def main():
"""Entry-point of the executable."""
# Get command-line options
parser = argparse.ArgumentParser(description=__doc__.strip())
parser.add_argument('input', default='-', help='input filename or URL')
parser.add_argument(
'-v', '--version', action='version', version=__version__)
parser.add_argument(
'-f', '--format', help='output format',
choices=sorted([surface.lower() for surface in SURFACES]))
parser.add_argument(
'-d', '--dpi', default=96, type=float,
help='ratio between 1 inch and 1 pixel')
parser.add_argument(
'-W', '--width', default=None, type=float,
help='width of the parent container in pixels')
parser.add_argument(
'-H', '--height', default=None, type=float,
help='height of the parent container in pixels')
parser.add_argument(
'-s', '--scale', default=1, type=float, help='output scaling factor')
parser.add_argument(
'-u', '--unsafe', action='store_true',
help='resolve XML entities and allow very large files '
'(WARNING: vulnerable to XXE attacks and various DoS)')
parser.add_argument('-o', '--output', default='-', help='output filename')
options = parser.parse_args()
kwargs = {
'parent_width': options.width, 'parent_height': options.height,
'dpi': options.dpi, 'scale': options.scale, 'unsafe': options.unsafe}
kwargs['write_to'] = (
sys.stdout.buffer if options.output == '-' else options.output)
if options.input == '-':
kwargs['file_obj'] = sys.stdin.buffer
else:
kwargs['url'] = options.input
output_format = (
options.format or
os.path.splitext(options.output)[1].lstrip('.') or
'pdf').upper()
SURFACES[output_format.upper()].convert(**kwargs)
0
Example 26
def main():
args = parser.parse_args()
logging.basicConfig(format='%(name)s: %(levelname)s: %(message)s',
level=logging.DEBUG if args.debug else logging.INFO)
# Binary stdout (output data from device as-is)
try: stdout_bin = sys.stdout.buffer
except: stdout_bin = sys.stdout
if args.serial_path:
comm = FileCommunication(args.serial_path)
else:
comm = autodetect_device()
with closing(comm):
if not args.skip_hello:
try_hello(comm)
_logger.debug("Hello done, proceeding with commands")
for command in get_commands(args.command):
try:
payload = command_to_payload(command)
header, response = comm.call(payload)
# For debugging, print header
if command[0] == '!':
_logger.debug('Header: %s',
' '.join(repr(header[i:i+4]).replace("\\x00", "\\0")
for i in range(0, len(header), 4)))
stdout_bin.write(response)
except Exception as e:
_logger.warn(e)
if args.debug:
import traceback; traceback.print_exc()
0
Example 27
Project: nlzss Source File: lzss3.py
def main(args=None):
if args is None:
args = sys.argv[1:]
if '--overlay' in args:
args.remove('--overlay')
overlay = True
else:
overlay = False
if len(args) < 1 or args[0] == '-':
if overlay:
print("Can't decompress overlays from stdin", file=stderr)
return 2
if hasattr(stdin, 'buffer'):
f = stdin.buffer
else:
f = stdin
else:
try:
f = open(args[0], "rb")
except IOError as e:
print(e, file=stderr)
return 2
stdout = sys.stdout
if hasattr(stdout, 'buffer'):
# grab the underlying binary stream
stdout = stdout.buffer
try:
if overlay:
decompress_overlay(f, stdout)
else:
stdout.write(decompress_file(f))
except IOError as e:
if e.errno == EPIPE:
# don't complain about a broken pipe
pass
else:
raise
except (DecompressionError,) as e:
print(e, file=stderr)
return 1
return 0
0
Example 28
def _process_run(cache_callable=True):
"""Run this function in a worker process to receive and run tasks.
It waits for tasks on stdin, and sends the results back via stdout.
"""
# use sys.stdout only for pickled objects, everything else goes to stderr
# NOTE: .buffer is the binary mode interface for stdin and out in py3k
try:
pickle_out = sys.stdout.buffer
except AttributeError:
pickle_out = sys.stdout
try:
pickle_in = sys.stdin.buffer
except AttributeError:
pickle_in = sys.stdin
sys.stdout = sys.stderr
exit_loop = False
last_callable = None # cached callable
while not exit_loop:
task = None
try:
# wait for task to arrive
task = pickle.load(pickle_in)
if task == "EXIT":
exit_loop = True
else:
data, task_callable, task_index = task
if task_callable is None:
if last_callable is None:
err = ("No callable was provided and no cached "
"callable is available.")
raise Exception(err)
task_callable = last_callable.fork()
elif cache_callable:
# store callable in cache
last_callable = task_callable
task_callable.setup_environment()
task_callable = task_callable.fork()
else:
task_callable.setup_environment()
result = task_callable(data)
del task_callable # free memory
pickle.dump(result, pickle_out, protocol=-1)
pickle_out.flush()
except Exception as exception:
# return the exception instead of the result
if task is None:
print("unpickling a task caused an exception in a process:")
else:
print("task %d caused exception in process:" % task[2])
print(exception)
traceback.print_exc()
sys.stdout.flush()
sys.exit()
0
Example 29
Project: oslo.rootwrap Source File: daemon.py
def daemon_start(config, filters):
temp_dir = tempfile.mkdtemp(prefix='rootwrap-')
LOG.debug("Created temporary directory %s", temp_dir)
try:
# allow everybody to find the socket
rwxr_xr_x = (stat.S_IRWXU |
stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
os.chmod(temp_dir, rwxr_xr_x)
socket_path = os.path.join(temp_dir, "rootwrap.sock")
LOG.debug("Will listen on socket %s", socket_path)
manager_cls = get_manager_class(config, filters)
manager = manager_cls(address=socket_path)
server = manager.get_server()
try:
# allow everybody to connect to the socket
rw_rw_rw_ = (stat.S_IRUSR | stat.S_IWUSR |
stat.S_IRGRP | stat.S_IWGRP |
stat.S_IROTH | stat.S_IWOTH)
os.chmod(socket_path, rw_rw_rw_)
try:
# In Python 3 we have to use buffer to push in bytes directly
stdout = sys.stdout.buffer
except AttributeError:
stdout = sys.stdout
stdout.write(socket_path.encode('utf-8'))
stdout.write(b'\n')
stdout.write(bytes(server.authkey))
sys.stdin.close()
sys.stdout.close()
sys.stderr.close()
# Gracefully shutdown on INT or TERM signals
stop = functools.partial(daemon_stop, server)
signal.signal(signal.SIGTERM, stop)
signal.signal(signal.SIGINT, stop)
LOG.info("Starting rootwrap daemon main loop")
server.serve_forever()
finally:
conn = server.listener
# This will break accept() loop with EOFError if it was not in the
# main thread (as in Python 3.x)
conn.close()
# Closing all currently connected client sockets for reading to
# break worker threads blocked on recv()
for cl_conn in conn.get_accepted():
try:
cl_conn.half_close()
except Exception:
# Most likely the socket have already been closed
LOG.debug("Failed to close connection")
LOG.info("Waiting for all client threads to finish.")
for thread in threading.enumerate():
if thread.daemon:
LOG.debug("Joining thread %s", thread)
thread.join()
finally:
LOG.debug("Removing temporary directory %s", temp_dir)
shutil.rmtree(temp_dir)