sys.stdin.readlines

Here are the examples of the python api sys.stdin.readlines taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

75 Examples 7

Example 1

Project: mavelous
Source File: calcdeps.py
View license
def GetInputsFromOptions(options):
  """Generates the inputs from flag options.

  Args:
    options: The flags to calcdeps.
  Returns:
    A list of inputs (strings).
  """
  inputs = options.inputs
  if not inputs:  # Parse stdin
    logging.info('No inputs specified. Reading from stdin...')
    inputs = filter(None, [line.strip('\n') for line in sys.stdin.readlines()])

  logging.info('Scanning files...')
  inputs = ExpandDirectories(inputs)

  return FilterByExcludes(options, inputs)

Example 2

Project: phabricator-tools
Source File: aoncmd_query.py
View license
def _set_options(args, d):
    phlsys_dictutil.set_if_true(d, 'ids', args.ids)
    phlsys_dictutil.set_if_true(d, 'arcanistProjects', args.arcanist_projects)
    phlsys_dictutil.set_if_true(d, 'branches', args.branches)
    phlsys_dictutil.set_if_true(d, 'limit', args.max_results)
    phlsys_dictutil.set_if_true(d, 'offset', args.offset_results)

    if args.ids_stdin:
        ids = [int(i) for i in " ".join(sys.stdin.readlines()).split()]
        d["ids"] = args.ids + ids

    if args.status_type:
        d["status"] = "status-" + args.status_type

Example 3

View license
def post_receive(mailer, subject_prefix, subject_template=None):
    lines = sys.stdin.readlines()
    commits = {}
    for line in lines:
        old_rev, new_rev, ref_name = parse_post_receive_line(line)
        commits[ref_name] = get_commits(old_rev, new_rev)
    process_commits(commits, mailer, subject_prefix, subject_template)

Example 4

Project: subscription-manager
Source File: test_lock.py
View license
def main(args):
    lock_file_path = args[1]
    test_lock = lock.Lock(lock_file_path)

    # could return a useful value, so the thread communicating with
    # it could notice it couldn't get the lock
    res = test_lock.acquire(blocking=False)
    if res is False:
        return 128

    # exit on any stdin input
    for line in sys.stdin.readlines():
        return 1

Example 5

Project: clustershell
Source File: Nodeset.py
View license
def process_stdin(xsetop, xsetcls, autostep):
    """Process standard input and operate on xset."""
    # Build temporary set (stdin accumulator)
    tmpset = xsetcls(autostep=autostep)
    for line in sys.stdin.readlines():
        # Support multi-lines and multi-nodesets per line
        line = line[0:line.find('#')].strip()
        for elem in line.split():
            # Do explicit object creation for RangeSet
            tmpset.update(xsetcls(elem, autostep=autostep))
    # Perform operation on xset
    if tmpset:
        xsetop(tmpset)

Example 6

Project: timed
Source File: client.py
View license
@cmdapp.cmd
def parse(logfile, time_format):
  "parses a stream with text formatted as a Timed logfile and shows a summary"

  records = [server.record_from_txt(line, only_elapsed=True,
    time_format=time_format) for line in sys.stdin.readlines()]

  # TODO: make this code better.
  def output(summary):
    width = max([len(p[0]) for p in summary]) + 3
    print '\n'.join([
      "%s%s%s" % (p[0], ' ' * (width - len(p[0])),
        colored(minutes_to_txt(p[1]), 'red')) for p in summary])

  output(server.summarize(records))

Example 7

View license
    def handle(self, *args, **options):
        lines = sys.stdin.readlines()
        handler = EmailHandler()
        try:
            answer = handler.handle(lines)
        except CouldNotFindIdentifier:
            pass
        answer.save()

Example 8

Project: devassistant
Source File: actions.py
View license
    @classmethod
    def gather_input(cls, received):
        if received == '-':
            # read from stdin
            to_run = []
            for l in sys.stdin.readlines():
                to_run.append(l)
            to_run = ''.join(to_run)
        else:
            to_run = received
        return to_run

Example 9

Project: onion-py
Source File: onion.py
View license
def atlas(m, n):
  fields = 'nickname,fingerprint,last_seen,running,flags,advertised_bandwidth,or_addresses'
  print(fields)
  for line in sys.stdin.readlines():
    l = line.strip().split(",")
    fp = l[2] if len(l) >= 3 else l[0]
    d = m.query('details',lookup=fp, limit=1, type='relay', field=fields)
    if len(d.relays) < 1:
      print('not_found,{},...'.format(fp))
    else:
      r = d.relays[0]
      print(",".join([str(x) for x in [r.nickname,r.fingerprint,r.last_seen,r.running,r.flags,r.bandwidth[3],r.or_addresses[0]]]))

Example 10

Project: mysql-size-estimator
Source File: cli.py
View license
    def _read_table_from_stdin(self):
        data = sys.stdin.readlines()
        table_str = " ".join(data)
        try:
            self.table = self._parser.parse_table(table_str)
        except:
            raise CliSQLParseException("Cannot parse given table")

Example 11

Project: euscan
Source File: scan_metadata.py
View license
    def handle(self, *args, **options):
        set_verbosity_level(logger, options.get("verbosity", 1))

        if options['all'] or options['category']:
            packages = None

        elif len(args):
            packages = [pkg for pkg in args]
        else:
            packages = [pkg[:-1] for pkg in sys.stdin.readlines()]

        scan_metadata(
            packages=packages,
            category=options['category'],
            logger=logger,
            populate=options['populate'],
        )

Example 12

Project: euscan
Source File: scan_portage.py
View license
    def handle(self, *args, **options):
        set_verbosity_level(logger, options.get("verbosity", 1))

        if options['all'] or options['category']:
            packages = None
        elif len(args):
            packages = [pkg for pkg in args]
        else:
            packages = [pkg[:-1] for pkg in sys.stdin.readlines()]

        scan_portage(
            packages=packages,
            category=options['category'],
            no_log=options["no-log"],
            purge_packages=options["purge-packages"],
            purge_versions=options["purge-versions"],
            upstream=options["upstream"],
            logger=logger,
        )

Example 13

Project: euscan
Source File: scan_upstream.py
View license
    def handle(self, *args, **options):
        set_verbosity_level(logger, options.get("verbosity", 1))

        if options['all']:
            packages = None

        elif len(args):
            packages = [pkg for pkg in args]
        else:
            packages = [pkg[:-1] for pkg in sys.stdin.readlines()]

        scan_upstream(
            packages=packages,
            purge_versions=options["purge-versions"],
            logger=logger,
        )

Example 14

Project: misc-scripts
Source File: check_url_list.py
View license
def main():
    urls = {}

    for line in sys.stdin.readlines():
        line = line.strip()
        if line not in urls:
            sys.stderr.write("+ checking URL: %s\n" % line)
            urls[line] = {'code': get_url_nofollow(line), 'count': 1}
            sys.stderr.write("++ %s\n" % str(urls[line]))
        else:
            urls[line]['count'] = urls[line]['count'] + 1

    for url in urls:
        if urls[url]['code'] != 200:
            print "%d\t%d\t%s" % (urls[url]['count'], urls[url]['code'], url)

Example 15

Project: pan-python
Source File: panconf.py
View license
def read_file(path):
    if path == '-':
        lines = sys.stdin.readlines()
    else:
        try:
            f = open(path)
        except IOError as msg:
            print('open %s: %s' % (path, msg), file=sys.stderr)
            sys.exit(1)
        lines = f.readlines()
        f.close()

    return ''.join(lines)

Example 16

Project: pan-python
Source File: panconf.py
View license
def read_file(path):
    if path == '-':
        lines = sys.stdin.readlines()
    else:
        try:
            f = open(path)
        except IOError as msg:
            print('open %s: %s' % (path, msg), file=sys.stderr)
            sys.exit(1)
        lines = f.readlines()
        f.close()

    return ''.join(lines)

Example 17

Project: shinken
Source File: send_nsca.py
View license
def main(hostname, port, encryption, password):
    notif = NSCANotifier(hostname, port, encryption, password)

    for line in sys.stdin.readlines():
        line = line.rstrip()
        if not line:
            continue
        notif = line.split(opts.delimiter)
        if len(notif) == 3:
            # only host, rc, output
            notif.insert(1, '')  # insert service
        # line consists of host, service, rc, output
        assert len(notif) == 4
        notif.svc_result(*notif)

Example 18

Project: shinken
Source File: send_nsca.py
View license
def main(hostname, port, encryption, password):
    notif = NSCANotifier(hostname, port, encryption, password)

    for line in sys.stdin.readlines():
        line = line.rstrip()
        if not line:
            continue
        notif = line.split(opts.delimiter)
        if len(notif) == 3:
            # only host, rc, output
            notif.insert(1, '')  # insert service
        # line consists of host, service, rc, output
        assert len(notif) == 4
        notif.svc_result(*notif)

Example 19

Project: pockyt
Source File: client.py
View license
    def _get_redirect_input(self):
        for line in sys.stdin.readlines():
            data = line.strip()
            if data:
                info = self._unformat_spec.parse(data)
                self._input.append(info)
            else:
                continue

Example 20

Project: pockyt
Source File: client.py
View license
    def _get_redirect_input(self):
        for line in sys.stdin.readlines():
            data = line.strip()
            if data:
                info = self._unformat_spec.parse(data)
                self._input.append(info)
            else:
                continue

Example 21

Project: fuel-octane
Source File: clean_env.py
View license
def main():
    hosts = [line.rstrip('\n') for line in sys.stdin.readlines()]
    access_data = {
        'user': os.environ['OS_USERNAME'],
        'password': os.environ['OS_PASSWORD'],
        'tenant': os.environ['OS_TENANT_NAME'],
        'auth_url': os.environ['OS_AUTH_URL'],
    }

    cleanup_nova_services(access_data, hosts)
    cleanup_neutron_agents(access_data, hosts)

Example 22

Project: release-tools
Source File: annotate-lp-bugs.py
View license
def main():
    args = _parse_args()

    lp = Launchpad.login_with('openstack-releasing', 'production')

    for line in sys.stdin.readlines():
        bugnum = line.strip()
        _annotate_bug(lp, args.project, bugnum)

Example 23

View license
def main():
    args = _parse_args()

    lp = Launchpad.login_with('openstack-releasing', 'production')

    bugs = [line.strip() for line in sys.stdin.readlines()]
    for bug in _filter_bugs(lp, args.project, args.importance, bugs):
        print(bug)

Example 24

View license
def main():
    args = _parse_args()

    lp = Launchpad.login_with('openstack-releasing', 'production')

    bugs = [line.strip() for line in sys.stdin.readlines()]
    for bug in _filter_bugs(lp, args.project, args.tag, bugs):
        print(bug)

Example 25

Project: release-tools
Source File: lp-tag.py
View license
def main():
    args = _parse_args()
    lp = Launchpad.login_with('openstack-releasing', 'production')
    bugnums = [line.strip() for line in sys.stdin.readlines()]
    for bugnum in bugnums:
        bug = lp.bugs[bugnum]
        tag = args.tag
        tags = bug.tags
        if tag not in tags:
            tags.append(tag)
            bug.tags = tags
            bug.lp_save()

Example 26

View license
def read_file(path):
    if path == '-':
        lines = sys.stdin.readlines()
    else:
        try:
            f = open(path)
        except IOError as msg:
            print('open %s: %s' % (path, msg), file=sys.stderr)
            sys.exit(1)
        lines = f.readlines()
        f.close()

    return ''.join(lines)

Example 27

View license
def read_file(path):
    if path == '-':
        lines = sys.stdin.readlines()
    else:
        try:
            f = open(path)
        except IOError as msg:
            print('open %s: %s' % (path, msg), file=sys.stderr)
            sys.exit(1)
        lines = f.readlines()
        f.close()

    return ''.join(lines)

Example 28

Project: conda-execute
Source File: execute.py
View license
def main():
    parser = argparse.ArgumentParser(description='Execute a script in a temporary conda environment.')
    parser.add_argument('path', nargs='?',
                        help='The script to execute.')
    parser.add_argument('--force-env', '-f', help='Force re-creation of the environment, even if it already exists.', action='store_true')

    quiet_or_verbose = parser.add_mutually_exclusive_group()
    quiet_or_verbose.add_argument('--verbose', '-v', help='Turn on verbose output.', action='store_true')
    quiet_or_verbose.add_argument('--quiet', '-q', help='Prevent any output, other than that of the script being executed.',
                                  action='store_true')
    import sys
    class StdIn(argparse.Action):
        def __call__(self, parser, namespace, values, option_string=None):
            # Values could be None, or an empty list.
            if not values:
                setattr(namespace, self.dest, sys.stdin.readlines())
            else:
                values = [line + '\n' for line in values]
                setattr(namespace, self.dest, values)

    parser.add_argument('--code', '-c', nargs='*', action=StdIn,
                        help='The code to execute.')
    parser.add_argument('remaining_args', help='Remaining arguments are passed through to the called script.',
                        nargs=argparse.REMAINDER)
    args = parser.parse_args()

    log_level = logging.WARN
    if args.verbose:
        log_level = logging.DEBUG
    elif args.quiet:
        log_level = logging.ERROR

    # Configure the logging as desired.
    conda_execute.config.setup_logging(log_level)
    log.debug('Arguments passed: {}'.format(args))

    exit_actions = []

    try:
        if args.code:
            path = _write_code_to_disk(args.code)
            # Queue the temporary file up for cleaning.
            exit_actions.append(lambda: os.remove(path))
        elif args.path:
            # check to see if `args.path` is a remote path, download whatever
            # is at that remote location and stash it as args.code.
            if args.path.startswith('http'):
                # download code from the remote path and write it to disk
                code = requests.get(args.path).content.decode()
                path = _write_code_to_disk(code)
                # Queue the temporary file up for cleaning.
                exit_actions.append(lambda: os.remove(path))
            else:
                path = os.path.abspath(args.path)
        else:
            raise ValueError('Either pass the filename to execute, or pipe with -c.')

        exit_actions.append(cleanup_tmp_envs)
        exit(execute(path, force_env=args.force_env, arguments=args.remaining_args))
    finally:
        for action in exit_actions:
            action()

Example 29

Project: podoc
Source File: cli.py
View license
@click.command(help=PODOC_HELP)
@click.argument('files',
                # TODO: nargs=-1 for multiple files concat
                required=False,
                type=click.Path(exists=True, file_okay=True,
                                dir_okay=True, resolve_path=True))
@click.option('-f', '-r', '--from', '--read', default='markdown',
              help='Source format.')
@click.option('-t', '-w', '--to', '--write', default='ast',
              help='Target format.')
@click.option('-o', '--output',
              help='Output path.')
@click.option('--data-dir',
              help='Output directory.')
@click.option('--no-pandoc', default=False, is_flag=True,
              help='Disable pandoc formats.')
@click.version_option(__version__)
@click.help_option()
def podoc(files=None,
          read=None,
          write=None,
          output=None,
          data_dir=None,
          no_pandoc=False,
          ):
    """Convert a file or a string from one format to another."""
    # Create the Podoc instance.
    podoc = Podoc(with_pandoc=not(no_pandoc))
    # If no files are provided, read from the standard input (like pandoc).
    if not files:
        logger.debug("Reading contents from stdin...")
        contents_s = ''.join(sys.stdin.readlines())
        # From string to object.
        contents = podoc.loads(contents_s, read)
        logger.debug("Converting `%s` from %s to %s (file: `%s`).",
                     _shorten_string(contents_s),
                     read, write, output,
                     )
        out = podoc.convert(contents, source=read, target=write,
                            output=output)
    else:
        # TODO: multiple files
        logger.debug("Converting file `%s` from %s to %s in %s.",
                     files, read, write, output)
        out = podoc.convert(files, source=read, target=write, output=output)
    if output is None:
        click.echo(podoc.dumps(out, write))
        return

Example 30

Project: python-xlib
Source File: genprottest.py
View license
def read_defs():
    global request_defs, reply_defs, struct_defs
    global mini_request_defs, resource_request_defs
    global event_defs

    request_defs = {}
    mini_request_defs = {}
    resource_request_defs = {}
    reply_defs = {}
    struct_defs = {}
    event_defs = {}

    for line in sys.stdin.readlines():
        parts = line.strip().split()

        fields = []
        for f in parts[2:]:
            fields.append(f.split(':'))

        if parts[0] == 'REQUEST':
            request_defs[parts[1]] = fields
        elif parts[0] == 'MINIREQUEST':
            mini_request_defs[parts[1]] = MINI_DEF
        elif parts[0] == 'RESOURCEREQUEST':
            resource_request_defs[parts[1]] = RESOURCE_DEF
        elif parts[0] == 'REPLY':
            reply_defs[parts[1]] = fields
        elif parts[0] == 'STRUCT':
            struct_defs[parts[1]] = fields
        elif parts[0] == 'EVENT':
            event_defs[parts[1]] = fields

Example 31

Project: pandashells
Source File: p_parallel.py
View license
def main():
    msg = "Tool to run shell commands in parallel.  Spawns processes "
    msg += "to concurrently run commands supplied on stdin. "

    msg = textwrap.dedent(
        """
        Read a list of commands from stdin and execute them in parrallel.

        -----------------------------------------------------------------------
        Examples:

            * This line generates commands that will be used in the examples.
                time seq 10 \\
                | p.format -t 'sleep 1; echo done {n}' --names n -i noheader

            * Execute the commands one at a time, no parallelism
                time seq 10 \\
                | p.format -t 'sleep 1; echo done {n}' --names n -i noheader \\
                | p.parallel -n 1

            * Execute all commands in parallel
                time seq 10 \\
                | p.format -t 'sleep 1; echo done {n}' --names n -i noheader \\
                | p.parallel -n 10

            * Suppress stdout from processes and echo commands
                time seq 10 \\
                | p.format -t 'sleep 1; echo done {n}' --names n -i noheader \\
                | p.parallel -n 10 -c -s stdout

            * Make a histogram of how long the individual jobs took
                time seq 100 \\
                | p.format -t 'sleep 1; echo done {n}' --names n -i noheader \\
                | p.parallel -n 50 -v \\
                | grep __job__ \\
                | p.df 'df.dropna()' 'df.duration_sec.hist(bins=20)'
        -----------------------------------------------------------------------
        """
    )

    # read command line arguments
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter, description=msg)

    msg = "The number of jobs to run in parallel. If not supplied, will "
    msg += "default to the number of detected cores."
    parser.add_argument('--njobs', '-n', dest='njobs', default=[None],
                        nargs=1, type=int, help=msg)
    parser.add_argument("-v", "--verbose", action="store_true", default=False,
                        help="Enable verbose output")

    parser.add_argument("-c", "--show_commands", action="store_true",
                        default=False, help="Print commands to stdout")

    msg = "Suppress stdout, stderr, or both for all running jobs"
    parser.add_argument("-s", "--suppress",
                        choices=['stdout', 'stderr', 'both'], default=[None],
                        nargs=1, help=msg)

    # parse arguments
    args = parser.parse_args()

    # get the commands from stdin
    cmd_list = sys.stdin.readlines()

    # get suppression vars from args
    suppress_stdout = 'stdout' in args.suppress or 'both' in args.suppress
    suppress_stderr = 'stderr' in args.suppress or 'both' in args.suppress

    # run the commands
    parallel_lib.parallel(
        cmd_list,
        njobs=args.njobs[0],
        verbose=args.verbose,
        suppress_cmd=(not args.show_commands),
        suppress_stdout=suppress_stdout,
        suppress_stderr=suppress_stderr,
        assume_hyperthread=True)

Example 32

Project: twitter
Source File: archiver.py
View license
def main(args=sys.argv[1:]):
    options = {
        'oauth': False,
        'save-dir': ".",
        'api-rate': False,
        'timeline': "",
        'mentions': "",
        'dms': "",
        'favorites': False,
        'follow-redirects': False,
        'redirect-sites': None,
        'isoformat': False,
    }
    try:
        parse_args(args, options)
    except GetoptError as e:
        err("I can't do that, %s." % e)
        raise SystemExit(1)

    # exit if no user given
    # except if asking for API rate, or archive of timeline or mentions
    if not options['extra_args'] and not (options['api-rate'] or
                                          options['timeline'] or
                                          options['mentions'] or
                                          options['dms']):
        print(__doc__)
        return

    # authenticate using OAuth, asking for token if necessary
    if options['oauth']:
        oauth_filename = (os.environ.get('HOME', 
                          os.environ.get('USERPROFILE', '')) 
                          + os.sep
                          + '.twitter-archiver_oauth')
        
        if not os.path.exists(oauth_filename):
            oauth_dance("Twitter-Archiver", CONSUMER_KEY, CONSUMER_SECRET,
                        oauth_filename)
        oauth_token, oauth_token_secret = read_token_file(oauth_filename)
        auth = OAuth(oauth_token, oauth_token_secret, CONSUMER_KEY,
                     CONSUMER_SECRET)
    else:
        auth = NoAuth()

    twitter = Twitter(auth=auth, api_version='1.1', domain='api.twitter.com')

    if options['api-rate']:
        rate_limit_status(twitter)
        return

    global format_text
    if options['follow-redirects'] or options['redirect-sites'] :
        if options['redirect-sites']:
            hosts = parse_host_list(options['redirect-sites'])
        else:
            hosts = None
        format_text = functools.partial(expand_format_text, hosts)
    else:
        format_text = direct_format_text

    # save own timeline or mentions (the user used in OAuth)
    if options['timeline'] or options['mentions']:
        if isinstance(auth, NoAuth):
            err("You must be authenticated to save timeline or mentions.")
            raise SystemExit(1)

        if options['timeline']:
            filename = options['save-dir'] + os.sep + options['timeline']
            print("* Archiving own timeline in %s" % filename)
        elif options['mentions']:
            filename = options['save-dir'] + os.sep + options['mentions']
            print("* Archiving own mentions in %s" % filename)

        tweets = {}
        try:
            tweets = load_tweets(filename)
        except Exception as e:
            err("Error when loading saved tweets: %s - continuing without"
                % str(e))

        try:
            statuses(twitter, "", tweets, options['mentions'], options['favorites'], isoformat=options['isoformat'])
        except KeyboardInterrupt:
            err()
            err("Interrupted")
            raise SystemExit(1)

        save_tweets(filename, tweets)
        if options['timeline']:
            print("Total tweets in own timeline: %i" % len(tweets))
        elif options['mentions']:
            print("Total mentions: %i" % len(tweets))

    if options['dms']:
        if isinstance(auth, NoAuth):
            err("You must be authenticated to save DMs.")
            raise SystemExit(1)

        filename = options['save-dir'] + os.sep + options['dms']
        print("* Archiving own DMs in %s" % filename)

        dms = {}
        try:
            dms = load_tweets(filename)
        except Exception as e:
            err("Error when loading saved DMs: %s - continuing without"
                % str(e))

        try:
            statuses(twitter, "", dms, received_dms=True, isoformat=options['isoformat'])
            statuses(twitter, "", dms, received_dms=False, isoformat=options['isoformat'])
        except KeyboardInterrupt:
            err()
            err("Interrupted")
            raise SystemExit(1)

        save_tweets(filename, dms)
        print("Total DMs sent and received: %i" % len(dms))


    # read users from command-line or stdin
    users = options['extra_args']
    if len(users) == 1 and users[0] == "-":
        users = [line.strip() for line in sys.stdin.readlines()]

    # save tweets for every user
    total, total_new = 0, 0
    for user in users:
        filename = options['save-dir'] + os.sep + user
        if options['favorites']:
            filename = filename + "-favorites"
        print("* Archiving %s tweets in %s" % (user, filename))

        tweets = {}
        try:
            tweets = load_tweets(filename)
        except Exception as e:
            err("Error when loading saved tweets: %s - continuing without"
                % str(e))

        new = 0
        before = len(tweets)
        try:
            statuses(twitter, user, tweets, options['mentions'], options['favorites'], isoformat=options['isoformat'])
        except KeyboardInterrupt:
            err()
            err("Interrupted")
            raise SystemExit(1)

        save_tweets(filename, tweets)
        total += len(tweets)
        new = len(tweets) - before
        total_new += new
        print("Total tweets for %s: %i (%i new)" % (user, len(tweets), new))

    print("Total: %i tweets (%i new) for %i users"
          % (total, total_new, len(users)))

Example 33

Project: utter-va
Source File: archiver.py
View license
def main(args=sys.argv[1:]):
    options = {
        'oauth': False,
        'save-dir': ".",
        'api-rate': False,
        'timeline': "",
        'mentions': "",
        'dms': "",
        'favorites': False,
        'follow-redirects': False,
        'redirect-sites': None,
        'isoformat': False,
    }
    try:
        parse_args(args, options)
    except GetoptError as e:
        err("I can't do that, %s." % e)
        raise SystemExit(1)

    # exit if no user given
    # except if asking for API rate, or archive of timeline or mentions
    if not options['extra_args'] and not (options['api-rate'] or
                                          options['timeline'] or
                                          options['mentions'] or
                                          options['dms']):
        print(__doc__)
        return

    # authenticate using OAuth, asking for token if necessary
    if options['oauth']:
        oauth_filename = (os.environ.get('HOME', 
                          os.environ.get('USERPROFILE', '')) 
                          + os.sep
                          + '.twitter-archiver_oauth')
        
        if not os.path.exists(oauth_filename):
            oauth_dance("Twitter-Archiver", CONSUMER_KEY, CONSUMER_SECRET,
                        oauth_filename)
        oauth_token, oauth_token_secret = read_token_file(oauth_filename)
        auth = OAuth(oauth_token, oauth_token_secret, CONSUMER_KEY,
                     CONSUMER_SECRET)
    else:
        auth = NoAuth()

    twitter = Twitter(auth=auth, api_version='1.1', domain='api.twitter.com')

    if options['api-rate']:
        rate_limit_status(twitter)
        return

    global format_text
    if options['follow-redirects'] or options['redirect-sites'] :
        if options['redirect-sites']:
            hosts = parse_host_list(options['redirect-sites'])
        else:
            hosts = None
        format_text = functools.partial(expand_format_text, hosts)
    else:
        format_text = direct_format_text

    # save own timeline or mentions (the user used in OAuth)
    if options['timeline'] or options['mentions']:
        if isinstance(auth, NoAuth):
            err("You must be authenticated to save timeline or mentions.")
            raise SystemExit(1)

        if options['timeline']:
            filename = options['save-dir'] + os.sep + options['timeline']
            print("* Archiving own timeline in %s" % filename)
        elif options['mentions']:
            filename = options['save-dir'] + os.sep + options['mentions']
            print("* Archiving own mentions in %s" % filename)

        tweets = {}
        try:
            tweets = load_tweets(filename)
        except Exception as e:
            err("Error when loading saved tweets: %s - continuing without"
                % str(e))

        try:
            statuses(twitter, "", tweets, options['mentions'], options['favorites'], isoformat=options['isoformat'])
        except KeyboardInterrupt:
            err()
            err("Interrupted")
            raise SystemExit(1)

        save_tweets(filename, tweets)
        if options['timeline']:
            print("Total tweets in own timeline: %i" % len(tweets))
        elif options['mentions']:
            print("Total mentions: %i" % len(tweets))

    if options['dms']:
        if isinstance(auth, NoAuth):
            err("You must be authenticated to save DMs.")
            raise SystemExit(1)

        filename = options['save-dir'] + os.sep + options['dms']
        print("* Archiving own DMs in %s" % filename)

        dms = {}
        try:
            dms = load_tweets(filename)
        except Exception as e:
            err("Error when loading saved DMs: %s - continuing without"
                % str(e))

        try:
            statuses(twitter, "", dms, received_dms=True, isoformat=options['isoformat'])
            statuses(twitter, "", dms, received_dms=False, isoformat=options['isoformat'])
        except KeyboardInterrupt:
            err()
            err("Interrupted")
            raise SystemExit(1)

        save_tweets(filename, dms)
        print("Total DMs sent and received: %i" % len(dms))


    # read users from command-line or stdin
    users = options['extra_args']
    if len(users) == 1 and users[0] == "-":
        users = [line.strip() for line in sys.stdin.readlines()]

    # save tweets for every user
    total, total_new = 0, 0
    for user in users:
        filename = options['save-dir'] + os.sep + user
        if options['favorites']:
            filename = filename + "-favorites"
        print("* Archiving %s tweets in %s" % (user, filename))

        tweets = {}
        try:
            tweets = load_tweets(filename)
        except Exception as e:
            err("Error when loading saved tweets: %s - continuing without"
                % str(e))

        new = 0
        before = len(tweets)
        try:
            statuses(twitter, user, tweets, options['mentions'], options['favorites'], isoformat=options['isoformat'])
        except KeyboardInterrupt:
            err()
            err("Interrupted")
            raise SystemExit(1)

        save_tweets(filename, tweets)
        total += len(tweets)
        new = len(tweets) - before
        total_new += new
        print("Total tweets for %s: %i (%i new)" % (user, len(tweets), new))

    print("Total: %i tweets (%i new) for %i users"
          % (total, total_new, len(users)))

Example 34

Project: pyjip
Source File: jip_bash.py
View license
def main():
    args = parse_args(__doc__, options_first=False)
    pipeline = jip.Pipeline()
    bash = pipeline.job(
        args['--name'] if args['--name'] else 'bash'
    ).run('bash')
    if not args['--cmd']:
        args['--cmd'] = "\n".join(sys.stdin.readlines())

    bash.input = [sys.stdin if a == 'stdin' else a
                  for a in args['--input']]
    bash.output = [sys.stdout if a == 'stdout' else a
                   for a in args['--output']]
    bash.outfile = [a for a in args['--outfile']]
    bash.cmd = args['--cmd']
    if not args['--cmd']:
        print >>sys.stderr, "No Command specified!"
        sys.exit(1)

    if args['--dry'] or args['--show']:
        jip.cli.dry(pipeline, [],
                    dry=args['--dry'],
                    show=args['--show'])
        return

    profile = jip.profiles.get(name='default'
                               if not args['--profile']
                               else args['--profile'])
    profile.load_args(args)

    jobs = jip.jobs.create_jobs(pipeline, [], keep=args['--keep'],
                                profile=profile,
                                profiler=args['--with-profiler'])

    force = args['--force']
    if not args["--submit"]:
        # assign job ids
        for i, j in enumerate(jobs):
            j.id = i + 1
        for exe in jip.jobs.create_executions(jobs):
            if exe.completed and not force:
                print >>sys.stderr, colorize("Skipping", YELLOW), exe.name
            else:
                success = jip.jobs.run_job(exe.job)
                if not success:
                    print >>sys.stderr, colorize(exe.job.state, RED)
                    sys.exit(1)
    else:
        try:
            #####################################################
            # Iterate the executions and submit
            #####################################################
            for exe in jip.jobs.create_executions(jobs, save=True,
                                                  check_outputs=not force,
                                                  check_queued=not force):
                if exe.completed and not force:
                    print colorize("Skipping %s" % exe.name, YELLOW)
                else:
                    if jip.jobs.submit_job(exe.job, force=force):
                        print "Submitted %s with remote id %s" % (
                            exe.job.id, exe.job.job_id
                        )
        except Exception as err:
            log.debug("Submission error: %s", err, exc_info=True)
            print >>sys.stderr, colorize("Error while submitting job:", RED), \
                colorize(str(err), RED)
            ##################################################
            # delete all submitted jobs
            ##################################################
            jip.jobs.delete(jobs, clean_logs=True)

Example 35

Project: pyjip
Source File: jip_pipe.py
View license
def main():
    args = parse_args(__doc__, options_first=False)
    pipeline = jip.Pipeline()
    if not args['--cmd']:
        args['--cmd'] = "\n".join(sys.stdin.readlines())
    if not args['--cmd']:
        print >>sys.stderr, "No Command specified!"
        sys.exit(1)

    @jip.pipeline()
    def embedded_pipeline():
        """Embedded pipeline to run a custom pipeline script

        usage:
            embedded [-i <input>] [-I <inputs>...]

        Inputs:
            -i, --input <input>       Single input file
                                      [default: stdin]
            -I, --inputs <inputs>...  List of input files
        """
        return "\n".join(args['--cmd'])
    pipeline.job(
        args['--name'] if args['--name'] else 'pipeline'
    ).run('embedded_pipeline',
          input=[sys.stdin if a == 'stdin' else a for a in args['--input']],
          inputs=args['--inputs'])

    if args['--dry'] or args['--show']:
        jip.cli.dry(pipeline, [],
                    dry=args['--dry'],
                    show=args['--show'])
        return

    profile = jip.profiles.get(name='default'
                               if not args['--profile']
                               else args['--profile'])
    profile.load_args(args)

    jobs = jip.jobs.create_jobs(pipeline, [], keep=args['--keep'],
                                profile=profile,
                                profiler=args['--with-profiler'])

    force = args['--force']
    if not args["--submit"]:
        # assign job ids
        for i, j in enumerate(jobs):
            j.id = i + 1
        for exe in jip.jobs.create_executions(jobs):
            if exe.completed and not force:
                print >>sys.stderr, colorize("Skipping", YELLOW), exe.name
            else:
                success = jip.jobs.run_job(exe.job)
                if not success:
                    print >>sys.stderr, colorize(exe.job.state, RED)
                    sys.exit(1)
    else:
        try:
            #####################################################
            # Iterate the executions and submit
            #####################################################
            for exe in jip.jobs.create_executions(jobs, save=True,
                                                  check_outputs=not force,
                                                  check_queued=not force):
                if exe.completed and not force:
                    print colorize("Skipping %s" % exe.name, YELLOW)
                else:
                    if jip.jobs.submit_job(exe.job, force=force):
                        print "Submitted %s with remote id %s" % (
                            exe.job.id, exe.job.job_id
                        )
        except Exception as err:
            log.debug("Submission error: %s", err, exc_info=True)
            print >>sys.stderr, colorize("Error while submitting job:", RED), \
                colorize(str(err), RED)
            ##################################################
            # delete all submitted jobs
            ##################################################
            jip.jobs.delete(jobs, clean_logs=True)

Example 36

Project: translate
Source File: mozfunny2prop.py
View license
def main(argv=None):
    import sys
    lines = sys.stdin.readlines()
    for line in funny2prop(lines):
        sys.stdout.write(line)

Example 37

Project: translate
Source File: pydiff.py
View license
    def writediff(self, outfile):
        """writes the actual diff to the given file"""
        validfiles = True
        if os.path.exists(self.fromfile):
            with open(self.fromfile, 'U') as fh:
                self.from_lines = fh.readlines()
            fromfiledate = os.stat(self.fromfile).st_mtime
        elif self.fromfile == "-":
            self.from_lines = sys.stdin.readlines()
            fromfiledate = time.time()
        elif self.options.new_file or self.options.unidirectional_new_file:
            self.from_lines = []
            fromfiledate = 0
        else:
            outfile.write("%s: No such file or directory\n" % self.fromfile)
            validfiles = False
        if os.path.exists(self.tofile):
            with open(self.tofile, 'U') as fh:
                self.to_lines = fh.readlines()
            tofiledate = os.stat(self.tofile).st_mtime
        elif self.tofile == "-":
            self.to_lines = sys.stdin.readlines()
            tofiledate = time.time()
        elif self.options.new_file:
            self.to_lines = []
            tofiledate = 0
        else:
            outfile.write("%s: No such file or directory\n" % self.tofile)
            validfiles = False
        if not validfiles:
            return
        fromfiledate = time.ctime(fromfiledate)
        tofiledate = time.ctime(tofiledate)
        compare_from_lines = self.from_lines
        compare_to_lines = self.to_lines
        if self.options.ignore_case:
            compare_from_lines = [line.lower() for line in compare_from_lines]
            compare_to_lines = [line.lower() for line in compare_to_lines]
        matcher = difflib.SequenceMatcher(None, compare_from_lines, compare_to_lines)
        groups = matcher.get_grouped_opcodes(self.options.unified_lines)
        started = False
        fromstring = '--- %s\t%s%s' % (self.fromfile, fromfiledate, lineterm)
        tostring = '+++ %s\t%s%s' % (self.tofile, tofiledate, lineterm)

        for group in groups:
            hunk = "".join([line for line in self.unified_diff(group)])
            if self.options.fromcontains:
                if self.options.ignore_case_contains:
                    hunk_from_lines = "".join([line.lower() for line in self.get_from_lines(group)])
                else:
                    hunk_from_lines = "".join(self.get_from_lines(group))
                for accelerator in self.options.accelchars:
                    hunk_from_lines = hunk_from_lines.replace(accelerator, "")
                if self.options.fromcontains not in hunk_from_lines:
                    continue
            if self.options.tocontains:
                if self.options.ignore_case_contains:
                    hunk_to_lines = "".join([line.lower() for line in self.get_to_lines(group)])
                else:
                    hunk_to_lines = "".join(self.get_to_lines(group))
                for accelerator in self.options.accelchars:
                    hunk_to_lines = hunk_to_lines.replace(accelerator, "")
                if self.options.tocontains not in hunk_to_lines:
                    continue
            if self.options.contains:
                if self.options.ignore_case_contains:
                    hunk_lines = "".join([line.lower() for line in self.get_from_lines(group) + self.get_to_lines(group)])
                else:
                    hunk_lines = "".join(self.get_from_lines(group) + self.get_to_lines(group))
                for accelerator in self.options.accelchars:
                    hunk_lines = hunk_lines.replace(accelerator, "")
                if self.options.contains not in hunk_lines:
                    continue
            if not started:
                outfile.write(fromstring)
                outfile.write(tostring)
                started = True
            outfile.write(hunk)
        if not started and self.options.report_identical_files:
            outfile.write("Files %s and %s are identical\n" %
                          (self.fromfile, self.tofile))

Example 38

Project: TrustRouter
Source File: ndiff.py
View license
def restore(which):
    restored = difflib.restore(sys.stdin.readlines(), which)
    sys.stdout.writelines(restored)

Example 39

Project: urwid
Source File: old_str_util.py
View license
def process_east_asian_width():
    import sys
    out = []
    last = None
    for line in sys.stdin.readlines():
        if line[:1] == "#": continue
        line = line.strip()
        hex,rest = line.split(";",1)
        wid,rest = rest.split(" # ",1)
        word1 = rest.split(" ",1)[0]

        if "." in hex:
            hex = hex.split("..")[1]
        num = int(hex, 16)

        if word1 in ("COMBINING","MODIFIER","<control>"):
            l = 0
        elif wid in ("W", "F"):
            l = 2
        else:
            l = 1

        if last is None:
            out.append((0, l))
            last = l

        if last == l:
            out[-1] = (num, l)
        else:
            out.append( (num, l) )
            last = l

    print("widths = [")
    for o in out[1:]:  # treat control characters same as ascii
        print("\t%r," % (o,))
    print("]")

Example 40

Project: Veil-Evasion
Source File: ndiff.py
View license
def restore(which):
    restored = difflib.restore(sys.stdin.readlines(), which)
    sys.stdout.writelines(restored)

Example 41

Project: stash
Source File: xargs.py
View license
def main(args):
    ap = argparse.ArgumentParser()
    ap.add_argument('-n',
                    nargs='?',
                    metavar='number',
                    type=int,
                    help='maximum number of arguments taken from standard input for each invocation of utility')

    ap.add_argument('-I',
                    dest='replstr',
                    nargs='?',
                    help='replacement string')

    ap.add_argument('utility',
                    nargs='?',
                    default='echo',
                    help='utility to invoke')

    ap.add_argument('args_to_pass',
                    metavar='arguments',
                    nargs=argparse.REMAINDER,
                    help='arguments to the utility')

    ns = ap.parse_args(args)

    lines = [line.strip() for line in sys.stdin.readlines()]
    n = ns.n if ns.n else len(lines)
    if ns.replstr:
        n = 1

    while lines:
        rest = ' '.join(lines[:n])
        lines = lines[n:]
        args_to_pass = ' '.join(ns.args_to_pass)

        if rest.strip():

            if ns.replstr:
                args_to_pass = args_to_pass.replace(ns.replstr, rest)
                rest = ''

            cmdline = '%s %s %s' % (ns.utility,
                                    args_to_pass,
                                    rest)

            _stash(cmdline)

Example 42

Project: cgat
Source File: PdbTools.py
View license
def ConvertSequence2StructuralAlignment( src1, src2, source=None, format="plain", check_residues = 1):
    """calculate a structural alignment from two pdb files.
    """

    ca1 = GetPdbCoordinates( src1, renumber = 1)

    if len(ca1) == 0:
        raise "no coordinates found for %s" % src1

    ca2 = GetPdbCoordinates( src2, renumber = 1 )

    if len(ca2) == 0:
        raise "no coordinates found for %s" % src2

    if string.lower(format) not in ("plain",):
        raise "unknown alignment format %s" % format

    if source:
        lines = open(source, "r").readlines()
    else:
        lines = sys.stdin.readlines()

    ## replace gap characters
    lines = map(lambda x: re.sub( "\s", "", string.replace(x, ".", "-")), lines)
    if not lines:
        raise ValueError, "alignment is empty"

    lali = len(lines[0])

    current1 = 0
    current2 = 0

    index1 = 0
    index2 = 0

    output = []

    alignment = []

    for x in range(0, lali):

        res1 = lines[0][x]
        res2 = lines[1][x]

        if res1 != "-": current1+=1
        if res2 != "-": current2+=1

        try:
            while (ca1[index1][0] < current1): index1 += 1
            while (ca2[index2][0] < current2): index2 += 1                    
        except IndexError:
            break

        if res1 == "-" or res2 == "-":
            continue

        (i1, aa1, x1, y1, z1) = ca1[index1]
        (i2, aa2, x2, y2, z2) = ca2[index2]        

        if check_residues:
            if aa1 != res1:
                sys.stderr.write("# mismatch in 1:%s at residue alignment %i(%s) -> structure %i(%s)\n" %\
                                 (source, current1, res1, index1, aa1))
            if aa2 != res2:
                sys.stderr.write("# mismatch in 2:%s at residue %i(%s) -> %i(%s)\n" %\
                                 (source, current2, res2, index2, aa2))

        alignment.append( (x1, y1, z1, x2, y2, z2, 1) )

    return alignment

Example 43

Project: write-it
Source File: handleemail.py
View license
    def handle(self, *args, **options):
        lines = sys.stdin.readlines()
        if settings.INCOMING_EMAIL_LOGGING == 'ALL':
            if not settings.ADMINS:
                return
            text_content = "New incomming email"
            subject = "New incomming email"

            mail = EmailMultiAlternatives('%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
                text_content,  # content
                settings.DEFAULT_FROM_EMAIL,  # From
                [a[1] for a in settings.ADMINS]  # To
                )
            mail.attach('mail.txt', ''.join(lines), 'text/plain')
            mail.send()

        handler = EmailHandler(answer_class=AnswerForManageCommand)
        try:
            answer = handler.handle(lines)
            answer.send_back()
        except CouldNotFindIdentifier:
            pass
        except:
            tb = traceback.format_exc()
            text_content = "Error the traceback was:\n" + tb
            #mail_admins('Error handling incoming email', html_message, html_message=html_message)
            subject = "Error handling incoming email"
            mail = EmailMultiAlternatives('%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
                text_content,  # content
                settings.DEFAULT_FROM_EMAIL,  # From
                [a[1] for a in settings.ADMINS],  # To
                )
            mail.attach('mail.txt', ''.join(lines), 'text/plain')
            mail.send()

Example 44

Project: fbpush
Source File: main.py
View license
def curses_push(args):
    """
    """
    devices = get_device()
    if len(devices) == 0:
        print('No device is selected.')
        return

    logger.debug('%d devices selected' % len(devices))
    configlets = set()
    has_error = False

    if config.stdin:
        lines = sys.stdin.readlines()
        configlets.add(Configlet(config.STDIN_FNAME, lines))
    else:
        for arg in args:
            configlets.update(get_configlets(arg))

    # some error checking for configlets
    for configlet in configlets:
        # warn empty file
        if configlet.is_empty:
            print('WARNING: file %s is empty' % configlet.name)
            continue
        # Configlet error checking:(1) check vendor tag and (2) unicode
        # check vendor tag
        if configlet.tags.get('vendor') is None:
            print('Configlet %s needs to have vendor tag' % configlet.name)
            has_error = True
        # check for unicode
        for line in configlet.lines:
            try:
                line.decode('ascii')
            except UnicodeDecodeError:
                print('configlets %s contains unicode at line\n-%s' %
                      (configlet.name, line))
                has_error = True

    if has_error:
        return

    if config.gen_md5:
        for configlet in configlets:
            print('%s %s' % (configlet.name, configlet.gen_md5))
        return

    work_to_do = False
    dry_run_needed = False

    for device in devices:

        device.configlets = []

        for configlet in configlets:
            if configlet.valid_for(device) and not configlet.is_empty:
                device.configlets.append(configlet)
                work_to_do = True
                if device.vendor in config.DRY_RUN_VENDORS:
                    dry_run_needed = True

    ndevice = 0
    # dict{device_name:diff}
    all_diffs = {}
    if work_to_do:
        for device in devices:
            if not device.configlets:
                continue

            ndevice += 1
            if device.vendor != 'juniper':
                if config.atomic:
                    print('atomic is only for juniper, but %s is %s'
                          % (device.name, device.vendor))
                    return

            if device.vendor not in config.DRY_RUN_VENDORS:
                if config.dry_run:
                    print('dry_run is not supported for %s (%s)'
                          % (device.name, device.vendor))
                    return

    try:
        PrintReminder().print_reminder(
                config.dry_run, ndevice, config.diff_dir)
    except Exception as ex:
        print('Cannot read reminder from configerator %s' % str(ex))

    if work_to_do:
        # create diff dir if not exist
        if config.diff_dir:
            try:
                os.makedirs(config.diff_dir)
            except:
                pass

        # force users must dry_run before real push
        if (dry_run_needed and
                not config.dry_run and
                not is_dry_run_prev(configlets) and
                not config.clowntown):
            PrintReminder.yellow(''.join([
                '\nYour previous "fbpush" is not dry_run for \n%s\n'
                % args, 'Or confilget(s) are modified? ',
                'Please dry_run and review the diff again.',
                ]))
            return

        if config.clowntown:
            PrintReminder.red(
                    'You by pass dry_run check, '
                    'I believe you have a legit reason')

        if config.stdin:
            # release previous piped program fd
            # otherwise curse lib will fail
            fd = open('/dev/tty')
            os.dup2(fd.fileno(), 0)

        if not config.username:
            config.username = getpass.getuser()
        while not config.password:
            config.password = getpass.getpass()
            if not config.password:
                print('Password cannot be empty!')

        connections = []
        try:
            stdscr = curses.initscr()
            screen = push.Screen(connections, stdscr)
            screen.fsm = FSM_PushCurses(init_state=FSM_PushCurses.ST_START)
            reactor.addReader(screen)

            start_time = int(time.time())

            # one single diff file
            if config.diff_dir and not config.multiple_diffs:
                diff_fh = open(os.path.join(
                               config.diff_dir,
                               config.FBPUSH_SINGLE_FILENAME), 'w', 0)
            for device in devices:
                if not device.configlets:
                    continue

                # multiple diff files
                if config.diff_dir and config.multiple_diffs:
                    diff_fh = open(os.path.join(
                        config.diff_dir, device.name + '.diff'), 'w', 0)

                if device.vendor == 'juniper':
                    jnx = push.JunoscriptRpc(
                        screen,
                        connections,
                        device,
                        config.username,
                        config.password,
                        start_time,
                        all_diffs,
                        diff_fh)
                    jnx.addConfiglets(device.configlets)
                    jnx.fsm = FSM_PushJnx(init_state=FSM_PushJnx.ST_START)

                    if config.diff_dir:
                        jnx.fsm.addDiffing(jnx)
                    else:  # bypass diffing
                        jnx.fsm.addCommits(jnx)

                    reactor.connectSSL(
                        # juniper is not connect thru fcr
                        # need to specify ip here
                        device.ip,
                        config.XNM_SSL_PORT,
                        jnx,
                        ssl.ClientContextFactory())
                    connections.append(jnx)

            logger.info('Launching reactor with %d connetions',
                        len(connections))
            reactor.run()
        finally:
            curses.nocbreak()
            stdscr.keypad(0)
            stdscr.standout()
            curses.echo()
            curses.endwin()

            # write md5 of all files (args) into a file,
            # to indicate user has dry_run
            if config.dry_run:
                update_dry_run_hash(configlets)

            for connection in connections:
                if not connection.diff_fh.closed:
                    connection.diff_fh.close()

            return all_diffs
    else:
        print('Nothing to do. No match between configlets and devices')
        config.parser.print_help()

Example 45

Project: PathPicker
Source File: processInput.py
View license
def getLineObjs(flags):
    inputLines = sys.stdin.readlines()
    return getLineObjsFromLines(inputLines,
                                validateFileExists=not flags.getDisableFileChecks(),
                                allInput=flags.getAllInput())

Example 46

Project: fedmsg
Source File: announce.py
View license
    def run(self):
        # This specifies that a special certificate should be used to sign this
        # message.  At the sysadmin level, you are responsible for taking care
        # of two things:
        #   1) That the announce cert is readable only by appropriate persons.
        #   2) That the routing_policy is setup so that "announce.announcement"
        #      messages are valid only if signed by such a certificate.
        self.config['cert_prefix'] = "announce"

        # This just specifies that we should be talking to the fedmsg-relay.
        self.config['active'] = True
        self.config['name'] = 'relay_inbound'
        fedmsg.init(**self.config)

        # Read in and setup our message.  Include --link, even if it is None.
        message = "\n".join(map(str.strip, sys.stdin.readlines()))
        msg = dict(message=message, link=self.config['link'])

        # Fire!
        fedmsg.publish(modname="announce", topic="announcement", msg=msg)

Example 47

Project: datafari
Source File: ndiff.py
View license
def restore(which):
    restored = difflib.restore(sys.stdin.readlines(), which)
    sys.stdout.writelines(restored)

Example 48

Project: bashplotlib
Source File: histogram.py
View license
def main():

    parser = optparse.OptionParser(usage=hist['usage'])

    parser.add_option(
        '-f', '--file', help='a file containing a column of numbers', default=None, dest='f')
    parser.add_option('-t', '--title', help='title for the chart', default="", dest='t')
    parser.add_option(
        '-b', '--bins', help='number of bins in the histogram', type='int', default=None, dest='b')
    parser.add_option('-w', '--binwidth', help='width of bins in the histogram',
                      type='float', default=None, dest='binwidth')
    parser.add_option('-s', '--height', help='height of the histogram (in lines)',
                      type='int', default=None, dest='h')
    parser.add_option('-p', '--pch', help='shape of each bar', default='o', dest='p')
    parser.add_option('-x', '--xlab', help='label bins on x-axis',
                      default=None, action="store_true", dest='x')
    parser.add_option('-c', '--colour', help='colour of the plot (%s)' %
                      colour_help, default='default', dest='colour')
    parser.add_option('-d', '--demo', help='run demos', action='store_true', dest='demo')
    parser.add_option('-n', '--nosummary', help='hide summary',
                      action='store_false', dest='showSummary', default=True)
    parser.add_option('-r', '--regular',
                      help='use regular y-scale (0 - maximum y value), instead of truncated y-scale (minimum y-value - maximum y-value)',
                      default=False, action="store_true", dest='regular')

    opts, args = parser.parse_args()

    if opts.f is None:
        if len(args) > 0:
            opts.f = args[0]
        elif opts.demo is None or opts.demo is False:
            opts.f = sys.stdin.readlines()

    if opts.demo:
        run_demo()
    elif opts.f:
        plot_hist(opts.f, opts.h, opts.b, opts.binwidth, opts.p, opts.colour,
                  opts.t, opts.x, opts.showSummary, opts.regular)
    else:
        print("nothing to plot!")

Example 49

Project: bashplotlib
Source File: scatterplot.py
View license
def main():

    parser = optparse.OptionParser(usage=scatter['usage'])

    parser.add_option('-f', '--file', help='a csv w/ x and y coordinates', default=None, dest='f')
    parser.add_option('-t', '--title', help='title for the chart', default="", dest='t')
    parser.add_option('-x', help='x coordinates', default=None, dest='x')
    parser.add_option('-y', help='y coordinates', default=None, dest='y')
    parser.add_option('-s', '--size', help='y coordinates', default=20, dest='size', type='int')
    parser.add_option('-p', '--pch', help='shape of point', default="x", dest='pch')
    parser.add_option('-c', '--colour', help='colour of the plot (%s)' %
                      colour_help, default='default', dest='colour')

    opts, args = parser.parse_args()

    if opts.f is None and (opts.x is None or opts.y is None):
        opts.f = sys.stdin.readlines()

    if opts.f or (opts.x and opts.y):
        plot_scatter(opts.f, opts.x, opts.y, opts.size, opts.pch, opts.colour, opts.t)
    else:
        print("nothing to plot!")

Example 50

Project: brenda
Source File: config.py
View license
    def __init__(self, config_file, env_prefix=None, default_stdin=False, use_s3cfg=True):
        # load and parse config file
        if config_file:
            with open(config_file) as f:
                for line in f.readlines():
                    self._process_line(line)
        elif default_stdin:
            for line in sys.stdin.readlines():
                self._process_line(line)

        # load environmental vars
        self._load_from_env(env_prefix)

        # get access_key and secret_key from ~/.s3cfg (it it exists)
        if use_s3cfg:
            for k, s3k in (('AWS_ACCESS_KEY', 'access_key'), ('AWS_SECRET_KEY', 'secret_key')):
                if not self.get(k):
                    v = self._s3cfg_get(s3k)
                    if v:
                        self[k] = v