logging.getLogger

Here are the examples of the python api logging.getLogger taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: omnivore
Source File: application.py
View license
def run(plugins=[], use_eggs=True, egg_path=[], image_path=[], startup_task="", application_name="", debug_log=False):
    """Start the application
    
    :param plugins: list of user plugins
    :param use_eggs Boolean: search for setuptools plugins and plugins in local eggs?
    :param egg_path: list of user-specified paths to search for more plugins
    :param startup_task string: task factory identifier for task shown in initial window
    :param application_name string: change application name instead of default Omnivore
    """
    # Enthought library imports.
    from envisage.api import PluginManager
    from envisage.core_plugin import CorePlugin
    
    # Local imports.
    from omnivore.framework.plugin import OmnivoreTasksPlugin, OmnivoreMainPlugin
    from omnivore.file_type.plugin import FileTypePlugin
    from omnivore import get_image_path
    from omnivore.utils.jobs import get_global_job_manager
    
    # Include standard plugins
    core_plugins = [ CorePlugin(), OmnivoreTasksPlugin(), OmnivoreMainPlugin(), FileTypePlugin() ]
    if sys.platform == "darwin":
        from omnivore.framework.osx_plugin import OSXMenuBarPlugin
        core_plugins.append(OSXMenuBarPlugin())
    
    import omnivore.file_type.recognizers
    core_plugins.extend(omnivore.file_type.recognizers.plugins)
    
    import omnivore.plugins
    core_plugins.extend(omnivore.plugins.plugins)
    
    # Add the user's plugins
    core_plugins.extend(plugins)
    
    # Check basic command line args
    default_parser = argparse.ArgumentParser(description="Default Parser")
    default_parser.add_argument("--no-eggs", dest="use_eggs", action="store_false", default=True, help="Do not load plugins from python eggs")
    options, extra_args = default_parser.parse_known_args()

    # The default is to use the specified plugins as well as any found
    # through setuptools and any local eggs (if an egg_path is specified).
    # Egg/setuptool plugin searching is turned off by the use_eggs parameter.
    default = PluginManager(
        plugins = core_plugins,
    )
    if use_eggs and options.use_eggs:
        from pkg_resources import Environment, working_set
        from envisage.api import EggPluginManager
        from envisage.composite_plugin_manager import CompositePluginManager
        
        # Find all additional eggs and add them to the working set
        environment = Environment(egg_path)
        distributions, errors = working_set.find_plugins(environment)
        if len(errors) > 0:
            raise SystemError('cannot add eggs %s' % errors)
        logger = logging.getLogger()
        logger.debug('added eggs %s' % distributions)
        map(working_set.add, distributions)

        # The plugin manager specifies which eggs to include and ignores all others
        egg = EggPluginManager(
            include = [
                'omnivore.tasks',
            ]
        )
        
        plugin_manager = CompositePluginManager(
            plugin_managers=[default, egg]
        )
    else:
        plugin_manager = default

    # Add omnivore icons after all image paths to allow user icon themes to take
    # precidence
    from pyface.resource_manager import resource_manager
    import os
    image_paths = image_path[:]
    image_paths.append(get_image_path("icons"))
    resource_manager.extra_paths.extend(image_paths)

    kwargs = {}
    if startup_task:
        kwargs['startup_task'] = startup_task
    if application_name:
        kwargs['name'] = application_name
    app = FrameworkApplication(plugin_manager=plugin_manager, command_line_args=extra_args, **kwargs)
    
    # Create a debugging log
    if debug_log:
        filename = app.get_log_file_name("debug")
        handler = logging.FileHandler(filename)
        logger = logging.getLogger('')
        logger.addHandler(handler)
        logger.setLevel(logging.DEBUG)
    
    # Turn off omnivore log debug messages by default
    log = logging.getLogger("omnivore")
    log.setLevel(logging.INFO)

    app.run()
    
    job_manager = get_global_job_manager()
    if job_manager is not None:
        job_manager.shutdown()

Example 2

Project: piston
Source File: __main__.py
View license
def main() :
    global args

    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description="Command line tool to interact with the Steem network"
    )

    """
        Default settings for all tools
    """
    parser.add_argument(
        '--node',
        type=str,
        default=config["node"],
        help='Websocket URL for public Steem API (default: "wss://this.piston.rocks/")'
    )
    parser.add_argument(
        '--rpcuser',
        type=str,
        default=config["rpcuser"],
        help='Websocket user if authentication is required'
    )
    parser.add_argument(
        '--rpcpassword',
        type=str,
        default=config["rpcpassword"],
        help='Websocket password if authentication is required'
    )
    parser.add_argument(
        '--nobroadcast', '-d',
        action='store_true',
        help='Do not broadcast anything'
    )
    parser.add_argument(
        '--nowallet', '-p',
        action='store_true',
        help='Do not load the wallet'
    )
    parser.add_argument(
        '--unsigned', '-x',
        action='store_true',
        help='Do not try to sign the transaction'
    )
    parser.add_argument(
        '--expires', '-e',
        default=30,
        help='Expiration time in seconds (defaults to 30)'
    )
    parser.add_argument(
        '--verbose', '-v',
        type=int,
        default=3,
        help='Verbosity'
    )
    parser.add_argument('--version', action='version',
                        version='%(prog)s {version}'.format(version=__VERSION__))

    subparsers = parser.add_subparsers(help='sub-command help')

    """
        Command "set"
    """
    setconfig = subparsers.add_parser('set', help='Set configuration')
    setconfig.add_argument(
        'key',
        type=str,
        choices=availableConfigurationKeys,
        help='Configuration key'
    )
    setconfig.add_argument(
        'value',
        type=str,
        help='Configuration value'
    )
    setconfig.set_defaults(command="set")

    """
        Command "config"
    """
    configconfig = subparsers.add_parser('config', help='Show local configuration')
    configconfig.set_defaults(command="config")

    """
        Command "info"
    """
    parser_info = subparsers.add_parser('info', help='Show infos about piston and Steem')
    parser_info.set_defaults(command="info")

    """
        Command "changewalletpassphrase"
    """
    changepasswordconfig = subparsers.add_parser('changewalletpassphrase', help='Change wallet password')
    changepasswordconfig.set_defaults(command="changewalletpassphrase")

    """
        Command "addkey"
    """
    addkey = subparsers.add_parser('addkey', help='Add a new key to the wallet')
    addkey.add_argument(
        'wifkeys',
        nargs='*',
        type=str,
        help='the private key in wallet import format (wif)'
    )
    addkey.set_defaults(command="addkey")

    """
        Command "delkey"
    """
    delkey = subparsers.add_parser('delkey', help='Delete keys from the wallet')
    delkey.add_argument(
        'pub',
        nargs='*',
        type=str,
        help='the public key to delete from the wallet'
    )
    delkey.set_defaults(command="delkey")

    """
        Command "getkey"
    """
    getkey = subparsers.add_parser('getkey', help='Dump the privatekey of a pubkey from the wallet')
    getkey.add_argument(
        'pub',
        type=str,
        help='the public key for which to show the private key'
    )
    getkey.set_defaults(command="getkey")

    """
        Command "listkeys"
    """
    listkeys = subparsers.add_parser('listkeys', help='List available keys in your wallet')
    listkeys.set_defaults(command="listkeys")

    """
        Command "listaccounts"
    """
    listaccounts = subparsers.add_parser('listaccounts', help='List available accounts in your wallet')
    listaccounts.set_defaults(command="listaccounts")

    """
        Command "list"
    """
    parser_list = subparsers.add_parser('list', help='List posts on Steem')
    parser_list.set_defaults(command="list")
    parser_list.add_argument(
        '--start',
        type=str,
        help='Start list from this identifier (pagination)'
    )
    parser_list.add_argument(
        '--category',
        type=str,
        help='Only posts with in this category'
    )
    parser_list.add_argument(
        '--sort',
        type=str,
        default=config["list_sorting"],
        choices=["trending", "created", "active", "cashout", "payout", "votes", "children", "hot"],
        help='Sort posts'
    )
    parser_list.add_argument(
        '--limit',
        type=int,
        default=config["limit"],
        help='Limit posts by number'
    )
    parser_list.add_argument(
        '--columns',
        type=str,
        nargs="+",
        help='Display custom columns'
    )

    """
        Command "categories"
    """
    parser_categories = subparsers.add_parser('categories', help='Show categories')
    parser_categories.set_defaults(command="categories")
    parser_categories.add_argument(
        '--sort',
        type=str,
        default=config["categories_sorting"],
        choices=["trending", "best", "active", "recent"],
        help='Sort categories'
    )
    parser_categories.add_argument(
        'category',
        nargs="?",
        type=str,
        help='Only categories used by this author'
    )
    parser_categories.add_argument(
        '--limit',
        type=int,
        default=config["limit"],
        help='Limit categories by number'
    )

    """
        Command "read"
    """
    parser_read = subparsers.add_parser('read', help='Read a post on Steem')
    parser_read.set_defaults(command="read")
    parser_read.add_argument(
        'post',
        type=str,
        help='@author/permlink-identifier of the post to read (e.g. @xeroc/python-steem-0-1)'
    )
    parser_read.add_argument(
        '--full',
        action='store_true',
        help='Show full header information (YAML formated)'
    )
    parser_read.add_argument(
        '--comments',
        action='store_true',
        help='Also show all comments'
    )
    parser_read.add_argument(
        '--parents',
        type=int,
        default=0,
        help='Show x parents for the reply'
    )
    parser_read.add_argument(
        '--format',
        type=str,
        default=config["format"],
        help='Format post',
        choices=["markdown", "raw"],
    )

    """
        Command "post"
    """
    parser_post = subparsers.add_parser('post', help='Post something new')
    parser_post.set_defaults(command="post")
    parser_post.add_argument(
        '--author',
        type=str,
        required=False,
        default=config["default_author"],
        help='Publish post as this user (requires to have the key installed in the wallet)'
    )
    parser_post.add_argument(
        '--permlink',
        type=str,
        required=False,
        help='The permlink (together with the author identifies the post uniquely)'
    )
    parser_post.add_argument(
        '--category',
        default=config["post_category"],
        type=str,
        help='Specify category'
    )
    parser_post.add_argument(
        '--tags',
        default=[],
        help='Specify tags',
        nargs='*',
    )
    parser_post.add_argument(
        '--title',
        type=str,
        required=False,
        help='Title of the post'
    )
    parser_post.add_argument(
        '--file',
        type=str,
        default=None,
        help='Filename to open. If not present, or "-", stdin will be used'
    )

    """
        Command "reply"
    """
    reply = subparsers.add_parser('reply', help='Reply to an existing post')
    reply.set_defaults(command="reply")
    reply.add_argument(
        'replyto',
        type=str,
        help='@author/permlink-identifier of the post to reply to (e.g. @xeroc/python-steem-0-1)'
    )
    reply.add_argument(
        '--author',
        type=str,
        required=False,
        default=config["default_author"],
        help='Publish post as this user (requires to have the key installed in the wallet)'
    )
    reply.add_argument(
        '--permlink',
        type=str,
        required=False,
        help='The permlink (together with the author identifies the post uniquely)'
    )
    reply.add_argument(
        '--title',
        type=str,
        required=False,
        help='Title of the post'
    )
    reply.add_argument(
        '--file',
        type=str,
        required=False,
        help='Send file as responds. If "-", read from stdin'
    )

    """
        Command "edit"
    """
    parser_edit = subparsers.add_parser('edit', help='Edit to an existing post')
    parser_edit.set_defaults(command="edit")
    parser_edit.add_argument(
        'post',
        type=str,
        help='@author/permlink-identifier of the post to edit to (e.g. @xeroc/python-steem-0-1)'
    )
    parser_edit.add_argument(
        '--author',
        type=str,
        required=False,
        default=config["default_author"],
        help='Post an edit as another author'
    )
    parser_edit.add_argument(
        '--file',
        type=str,
        required=False,
        help='Patch with content of this file'
    )
    parser_edit.add_argument(
        '--replace',
        action='store_true',
        help="Don't patch but replace original post (will make you lose votes)"
    )

    """
        Command "upvote"
    """
    parser_upvote = subparsers.add_parser('upvote', help='Upvote a post')
    parser_upvote.set_defaults(command="upvote")
    parser_upvote.add_argument(
        'post',
        type=str,
        help='@author/permlink-identifier of the post to upvote to (e.g. @xeroc/python-steem-0-1)'
    )
    parser_upvote.add_argument(
        '--voter',
        type=str,
        required=False,
        default=config["default_voter"],
        help='The voter account name'
    )
    parser_upvote.add_argument(
        '--weight',
        type=float,
        default=config["default_vote_weight"],
        required=False,
        help='Actual weight (from 0.1 to 100.0)'
    )

    """
        Command "downvote"
    """
    parser_downvote = subparsers.add_parser('downvote', help='Downvote a post')
    parser_downvote.set_defaults(command="downvote")
    parser_downvote.add_argument(
        '--voter',
        type=str,
        default=config["default_voter"],
        help='The voter account name'
    )
    parser_downvote.add_argument(
        'post',
        type=str,
        help='@author/permlink-identifier of the post to downvote to (e.g. @xeroc/python-steem-0-1)'
    )
    parser_downvote.add_argument(
        '--weight',
        type=float,
        default=config["default_vote_weight"],
        required=False,
        help='Actual weight (from 0.1 to 100.0)'
    )

    """
        Command "replies"
    """
    replies = subparsers.add_parser('replies', help='Show recent replies to your posts')
    replies.set_defaults(command="replies")
    replies.add_argument(
        '--author',
        type=str,
        required=False,
        default=config["default_author"],
        help='Show replies to this author'
    )
    replies.add_argument(
        '--limit',
        type=int,
        default=config["limit"],
        help='Limit posts by number'
    )

    """
        Command "transfer"
    """
    parser_transfer = subparsers.add_parser('transfer', help='Transfer STEEM')
    parser_transfer.set_defaults(command="transfer")
    parser_transfer.add_argument(
        'to',
        type=str,
        help='Recepient'
    )
    parser_transfer.add_argument(
        'amount',
        type=float,
        help='Amount to transfer'
    )
    parser_transfer.add_argument(
        'asset',
        type=str,
        choices=["STEEM", "SBD"],
        help='Asset to (i.e. STEEM or SDB)'
    )
    parser_transfer.add_argument(
        'memo',
        type=str,
        nargs="?",
        default="",
        help='Optional memo'
    )
    parser_transfer.add_argument(
        '--account',
        type=str,
        required=False,
        default=config["default_author"],
        help='Transfer from this account'
    )

    """
        Command "powerup"
    """
    parser_powerup = subparsers.add_parser('powerup', help='Power up (vest STEEM as STEEM POWER)')
    parser_powerup.set_defaults(command="powerup")
    parser_powerup.add_argument(
        'amount',
        type=str,
        help='Amount of VESTS to powerup'
    )
    parser_powerup.add_argument(
        '--account',
        type=str,
        required=False,
        default=config["default_author"],
        help='Powerup from this account'
    )
    parser_powerup.add_argument(
        '--to',
        type=str,
        required=False,
        default=config["default_author"],
        help='Powerup this account'
    )

    """
        Command "powerdown"
    """
    parser_powerdown = subparsers.add_parser('powerdown', help='Power down (start withdrawing STEEM from STEEM POWER)')
    parser_powerdown.set_defaults(command="powerdown")
    parser_powerdown.add_argument(
        'amount',
        type=str,
        help='Amount of VESTS to powerdown'
    )
    parser_powerdown.add_argument(
        '--account',
        type=str,
        required=False,
        default=config["default_author"],
        help='powerdown from this account'
    )

    """
        Command "powerdownroute"
    """
    parser_powerdownroute = subparsers.add_parser('powerdownroute', help='Setup a powerdown route')
    parser_powerdownroute.set_defaults(command="powerdownroute")
    parser_powerdownroute.add_argument(
        'to',
        type=str,
        default=config["default_author"],
        help='The account receiving either VESTS/SteemPower or STEEM.'
    )
    parser_powerdownroute.add_argument(
        '--percentage',
        type=float,
        default=100,
        help='The percent of the withdraw to go to the "to" account'
    )
    parser_powerdownroute.add_argument(
        '--account',
        type=str,
        default=config["default_author"],
        help='The account which is powering down'
    )
    parser_powerdownroute.add_argument(
        '--auto_vest',
        action='store_true',
        help=('Set to true if the from account should receive the VESTS as'
              'VESTS, or false if it should receive them as STEEM.')
    )

    """
        Command "convert"
    """
    parser_convert = subparsers.add_parser('convert', help='Convert STEEMDollars to Steem (takes a week to settle)')
    parser_convert.set_defaults(command="convert")
    parser_convert.add_argument(
        'amount',
        type=float,
        help='Amount of SBD to convert'
    )
    parser_convert.add_argument(
        '--account',
        type=str,
        required=False,
        default=config["default_author"],
        help='Convert from this account'
    )

    """
        Command "balance"
    """
    parser_balance = subparsers.add_parser('balance', help='Show the balance of one more more accounts')
    parser_balance.set_defaults(command="balance")
    parser_balance.add_argument(
        'account',
        type=str,
        nargs="*",
        default=config["default_author"],
        help='balance of these account (multiple accounts allowed)'
    )

    """
        Command "history"
    """
    parser_history = subparsers.add_parser('history', help='Show the history of an account')
    parser_history.set_defaults(command="history")
    parser_history.add_argument(
        'account',
        type=str,
        nargs="?",
        default=config["default_author"],
        help='History of this account'
    )
    parser_history.add_argument(
        '--limit',
        type=int,
        default=config["limit"],
        help='Limit number of entries'
    )
    parser_history.add_argument(
        '--memos',
        action='store_true',
        help='Show (decode) memos'
    )
    parser_history.add_argument(
        '--first',
        type=int,
        default=99999999999999,
        help='Transactioon numer (#) of the last transaction to show.'
    )
    parser_history.add_argument(
        '--types',
        type=str,
        nargs="*",
        default=[],
        help='Show only these operation types'
    )

    """
        Command "interest"
    """
    interest = subparsers.add_parser('interest', help='Get information about interest payment')
    interest.set_defaults(command="interest")
    interest.add_argument(
        'account',
        type=str,
        nargs="*",
        default=config["default_author"],
        help='Inspect these accounts'
    )

    """
        Command "permissions"
    """
    parser_permissions = subparsers.add_parser('permissions', help='Show permissions of an account')
    parser_permissions.set_defaults(command="permissions")
    parser_permissions.add_argument(
        'account',
        type=str,
        nargs="?",
        default=config["default_author"],
        help='Account to show permissions for'
    )

    """
        Command "allow"
    """
    parser_allow = subparsers.add_parser('allow', help='Allow an account/key to interact with your account')
    parser_allow.set_defaults(command="allow")
    parser_allow.add_argument(
        '--account',
        type=str,
        nargs="?",
        default=config["default_author"],
        help='The account to allow action for'
    )
    parser_allow.add_argument(
        'foreign_account',
        type=str,
        nargs="?",
        help='The account or key that will be allowed to interact as your account'
    )
    parser_allow.add_argument(
        '--permission',
        type=str,
        default="posting",
        choices=["owner", "posting", "active"],
        help=('The permission to grant (defaults to "posting")')
    )
    parser_allow.add_argument(
        '--weight',
        type=int,
        default=None,
        help=('The weight to use instead of the (full) threshold. '
              'If the weight is smaller than the threshold, '
              'additional signatures are required')
    )
    parser_allow.add_argument(
        '--threshold',
        type=int,
        default=None,
        help=('The permission\'s threshold that needs to be reached '
              'by signatures to be able to interact')
    )

    """
        Command "disallow"
    """
    parser_disallow = subparsers.add_parser('disallow', help='Remove allowance an account/key to interact with your account')
    parser_disallow.set_defaults(command="disallow")
    parser_disallow.add_argument(
        '--account',
        type=str,
        nargs="?",
        default=config["default_author"],
        help='The account to disallow action for'
    )
    parser_disallow.add_argument(
        'foreign_account',
        type=str,
        help='The account or key whose allowance to interact as your account will be removed'
    )
    parser_disallow.add_argument(
        '--permission',
        type=str,
        default="posting",
        choices=["owner", "posting", "active"],
        help=('The permission to remove (defaults to "posting")')
    )
    parser_disallow.add_argument(
        '--threshold',
        type=int,
        default=None,
        help=('The permission\'s threshold that needs to be reached '
              'by signatures to be able to interact')
    )

    """
        Command "newaccount"
    """
    parser_newaccount = subparsers.add_parser('newaccount', help='Create a new account')
    parser_newaccount.set_defaults(command="newaccount")
    parser_newaccount.add_argument(
        'accountname',
        type=str,
        help='New account name'
    )
    parser_newaccount.add_argument(
        '--account',
        type=str,
        required=False,
        default=config["default_author"],
        help='Account that pays the fee'
    )

    """
        Command "importaccount"
    """
    parser_importaccount = subparsers.add_parser('importaccount', help='Import an account using a passphrase')
    parser_importaccount.set_defaults(command="importaccount")
    parser_importaccount.add_argument(
        'account',
        type=str,
        help='Account name'
    )

    """
        Command "updateMemoKey"
    """
    parser_updateMemoKey = subparsers.add_parser('updatememokey', help='Update an account\'s memo key')
    parser_updateMemoKey.set_defaults(command="updatememokey")
    parser_updateMemoKey.add_argument(
        '--account',
        type=str,
        nargs="?",
        default=config["default_author"],
        help='The account to updateMemoKey action for'
    )
    parser_updateMemoKey.add_argument(
        '--key',
        type=str,
        default=None,
        help='The new memo key'
    )

    """
        Command "sign"
    """
    parser_sign = subparsers.add_parser('sign', help='Sign a provided transaction with available and required keys')
    parser_sign.set_defaults(command="sign")
    parser_sign.add_argument(
        '--file',
        type=str,
        required=False,
        help='Load transaction from file. If "-", read from stdin (defaults to "-")'
    )

    """
        Command "broadcast"
    """
    parser_broadcast = subparsers.add_parser('broadcast', help='broadcast a signed transaction')
    parser_broadcast.set_defaults(command="broadcast")
    parser_broadcast.add_argument(
        '--file',
        type=str,
        required=False,
        help='Load transaction from file. If "-", read from stdin (defaults to "-")'
    )

    """
        Command "web"
    """
    webconfig = subparsers.add_parser('web', help='Launch web version of piston')
    webconfig.set_defaults(command="web")
    webconfig.add_argument(
        '--port',
        type=int,
        default=config["web:port"],
        help='Port to open for internal web requests'
    )
    webconfig.add_argument(
        '--host',
        type=str,
        default=config["web:host"],
        help='Host address to listen to'
    )

    """
        Command "orderbook"
    """
    orderbook = subparsers.add_parser('orderbook', help='Obtain orderbook of the internal market')
    orderbook.set_defaults(command="orderbook")
    orderbook.add_argument(
        '--chart',
        action='store_true',
        help="Enable charting (requires matplotlib)"
    )

    """
        Command "buy"
    """
    parser_buy = subparsers.add_parser('buy', help='Buy STEEM or SBD from the internal market')
    parser_buy.set_defaults(command="buy")
    parser_buy.add_argument(
        'amount',
        type=float,
        help='Amount to buy'
    )
    parser_buy.add_argument(
        'asset',
        type=str,
        choices=["STEEM", "SBD"],
        help='Asset to buy (i.e. STEEM or SDB)'
    )
    parser_buy.add_argument(
        'price',
        type=float,
        help='Limit buy price denoted in (SBD per STEEM)'
    )
    parser_buy.add_argument(
        '--account',
        type=str,
        required=False,
        default=config["default_account"],
        help='Buy with this account (defaults to "default_account")'
    )

    """
        Command "sell"
    """
    parser_sell = subparsers.add_parser('sell', help='Sell STEEM or SBD from the internal market')
    parser_sell.set_defaults(command="sell")
    parser_sell.add_argument(
        'amount',
        type=float,
        help='Amount to sell'
    )
    parser_sell.add_argument(
        'asset',
        type=str,
        choices=["STEEM", "SBD"],
        help='Asset to sell (i.e. STEEM or SDB)'
    )
    parser_sell.add_argument(
        'price',
        type=float,
        help='Limit sell price denoted in (SBD per STEEM)'
    )
    parser_sell.add_argument(
        '--account',
        type=str,
        required=False,
        default=config["default_account"],
        help='Sell from this account (defaults to "default_account")'
    )

    """
        Parse Arguments
    """
    args = parser.parse_args()

    # Logging
    log = logging.getLogger(__name__)
    verbosity = ["critical",
                 "error",
                 "warn",
                 "info",
                 "debug"][int(min(args.verbose, 4))]
    log.setLevel(getattr(logging, verbosity.upper()))
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    ch = logging.StreamHandler()
    ch.setLevel(getattr(logging, verbosity.upper()))
    ch.setFormatter(formatter)
    log.addHandler(ch)

    # GrapheneAPI logging
    if args.verbose > 4:
        verbosity = ["critical",
                     "error",
                     "warn",
                     "info",
                     "debug"][int(min((args.verbose - 4), 4))]
        gphlog = logging.getLogger("graphenebase")
        gphlog.setLevel(getattr(logging, verbosity.upper()))
        gphlog.addHandler(ch)
    if args.verbose > 8:
        verbosity = ["critical",
                     "error",
                     "warn",
                     "info",
                     "debug"][int(min((args.verbose - 8), 4))]
        gphlog = logging.getLogger("grapheneapi")
        gphlog.setLevel(getattr(logging, verbosity.upper()))
        gphlog.addHandler(ch)

    if not hasattr(args, "command"):
        parser.print_help()
        sys.exit(2)

    # We don't require RPC for these commands
    rpc_not_required = [
        "set",
        "config",
        "web",
        ""]
    if args.command not in rpc_not_required and args.command:
        options = {
            "node": args.node,
            "rpcuser": args.rpcuser,
            "rpcpassword": args.rpcpassword,
            "nobroadcast": args.nobroadcast,
            "unsigned": args.unsigned,
            "expires": args.expires
        }

        # preload wallet with empty keys
        if args.nowallet:
            options.update({"wif": []})

        # Signing only requires the wallet, no connection
        # essential for offline/coldstorage signing
        if args.command == "sign":
            options.update({"offline": True})

        steem = SteemConnector(**options).getSteem()

    if args.command == "set":
        if (args.key in ["default_author",
                         "default_voter",
                         "default_account"] and
                args.value[0] == "@"):
            args.value = args.value[1:]
        config[args.key] = args.value

    elif args.command == "config":
        t = PrettyTable(["Key", "Value"])
        t.align = "l"
        for key in config:
            if key in availableConfigurationKeys:  # hide internal config data
                t.add_row([key, config[key]])
        print(t)

    elif args.command == "info":
        t = PrettyTable(["Key", "Value"])
        t.align = "l"
        info = steem.rpc.get_dynamic_global_properties()
        median_price = steem.rpc.get_current_median_history_price()
        steem_per_mvest = (
            float(info["total_vesting_fund_steem"].split(" ")[0]) /
            (float(info["total_vesting_shares"].split(" ")[0]) / 1e6)
        )
        price = (
            float(median_price["base"].split(" ")[0]) /
            float(median_price["quote"].split(" ")[0])
        )
        for key in info:
            t.add_row([key, info[key]])
        t.add_row(["steem per mvest", steem_per_mvest])
        t.add_row(["internal price", price])
        print(t)

    elif args.command == "changewalletpassphrase":
        steem.wallet.changePassphrase()

    elif args.command == "addkey":
        pub = None
        if len(args.wifkeys):
            for wifkey in args.wifkeys:
                pub = (steem.wallet.addPrivateKey(wifkey))
                if pub:
                    print(pub)
        else:
            import getpass
            wifkey = ""
            while True:
                wifkey = getpass.getpass('Private Key (wif) [Enter to quit]:')
                if not wifkey:
                    break
                pub = (steem.wallet.addPrivateKey(wifkey))
                if pub:
                    print(pub)

        if pub:
            name = steem.wallet.getAccountFromPublicKey(pub)
            print("Setting new default user: %s" % name)
            print("You can change these settings with:")
            print("    piston set default_author x")
            print("    piston set default_voter x")
            config["default_author"] = name
            config["default_voter"] = name

    elif args.command == "delkey":
        if confirm(
            "Are you sure you want to delete keys from your wallet?\n"
            "This step is IRREVERSIBLE! If you don't have a backup, "
            "You may lose access to your account!"
        ):
            for pub in args.pub:
                steem.wallet.removePrivateKeyFromPublicKey(pub)

    elif args.command == "getkey":
        print(steem.wallet.getPrivateKeyForPublicKey(args.pub))

    elif args.command == "listkeys":
        t = PrettyTable(["Available Key"])
        t.align = "l"
        for key in steem.wallet.getPublicKeys():
            t.add_row([key])
        print(t)

    elif args.command == "listaccounts":
        t = PrettyTable(["Name", "Type", "Available Key"])
        t.align = "l"
        for account in steem.wallet.getAccounts():
            t.add_row([
                account["name"] or "n/a",
                account["type"] or "n/a",
                account["pubkey"]
            ])
        print(t)

    elif args.command == "reply":
        from textwrap import indent
        parent = steem.get_content(args.replyto)
        if parent["id"] == "0.0.0":
            print("Can't find post %s" % args.replyto)
            return

        reply_message = indent(parent["body"], "> ")

        post = frontmatter.Post(reply_message, **{
            "title": args.title if args.title else "Re: " + parent["title"],
            "author": args.author if args.author else "required",
            "replyto": args.replyto,
        })

        meta, json_meta, message = yaml_parse_file(args, initial_content=post)

        for required in ["author", "title"]:
            if (required not in meta or
                    not meta[required] or
                    meta[required] == "required"):
                print("'%s' required!" % required)
                # TODO, instead of terminating here, send the user back
                # to the EDITOR
                return

        pprint(steem.reply(
            meta["replyto"],
            message,
            title=meta["title"],
            author=meta["author"],
            meta=json_meta,
        ))

    elif args.command == "post" or args.command == "yaml":
        initmeta = {
            "title": args.title if args.title else "required",
            "author": args.author if args.author else "required",
            "category": args.category if args.category else "required",
        }
        if args.tags:
            initmeta["tags"] = args.tags
        post = frontmatter.Post("", **initmeta)

        meta, json_meta, body = yaml_parse_file(args, initial_content=post)

        if not body:
            print("Empty body! Not posting!")
            return

        for required in ["author", "title", "category"]:
            if (required not in meta or
                    not meta[required] or
                    meta[required] == "required"):
                print("'%s' required!" % required)
                # TODO, instead of terminating here, send the user back
                # to the EDITOR
                return

        pprint(steem.post(
            meta["title"],
            body,
            author=meta["author"],
            category=meta["category"],
            meta=json_meta,
        ))

    elif args.command == "edit":
        original_post = steem.get_content(args.post)

        edited_message = None
        if original_post["id"] == "0.0.0":
            print("Can't find post %s" % args.post)
            return

        post = frontmatter.Post(original_post["body"], **{
            "title": original_post["title"] + " (immutable)",
            "author": original_post["author"] + " (immutable)",
            "tags": original_post["_tags"]
        })

        meta, json_meta, edited_message = yaml_parse_file(args, initial_content=post)
        pprint(steem.edit(
            args.post,
            edited_message,
            replace=args.replace,
            meta=json_meta,
        ))

    elif args.command == "upvote" or args.command == "downvote":
        post = Post(steem, args.post)
        if args.command == "downvote":
            weight = -float(args.weight)
        else:
            weight = +float(args.weight)
        if not args.voter:
            print("Not voter provided!")
            return
        pprint(post.vote(weight, voter=args.voter))

    elif args.command == "read":
        post_author, post_permlink = resolveIdentifier(args.post)

        if args.parents:
            # FIXME inconsistency, use @author/permlink instead!
            dump_recursive_parents(
                steem.rpc,
                post_author,
                post_permlink,
                args.parents,
                format=args.format
            )

        if not args.comments and not args.parents:
            post = steem.get_content(args.post)

            if post["id"] == "0.0.0":
                print("Can't find post %s" % args.post)
                return
            if args.format == "markdown":
                body = markdownify(post["body"])
            else:
                body = post["body"]

            if args.full:
                meta = {}
                for key in post:
                    if key in ["steem", "body"]:
                        continue
                    meta[key] = post[key]
                yaml = frontmatter.Post(body, **meta)
                print(frontmatter.dumps(yaml))
            else:
                print(body)

        if args.comments:
            dump_recursive_comments(
                steem.rpc,
                post_author,
                post_permlink,
                format=args.format
            )

    elif args.command == "categories":
        categories = steem.get_categories(
            sort=args.sort,
            begin=args.category,
            limit=args.limit
        )
        t = PrettyTable(["name", "discussions", "payouts"])
        t.align = "l"
        for category in categories:
            t.add_row([
                category["name"],
                category["discussions"],
                category["total_payouts"],
            ])
        print(t)

    elif args.command == "list":
        list_posts(
            steem.get_posts(
                limit=args.limit,
                sort=args.sort,
                category=args.category,
                start=args.start
            ),
            args.columns
        )

    elif args.command == "replies":
        if not args.author:
            print("Please specify an author via --author\n "
                  "or define your default author with:\n"
                  "   piston set default_author x")
        else:
            discussions = steem.get_replies(args.author)
            list_posts(discussions[0:args.limit])

    elif args.command == "transfer":
        pprint(steem.transfer(
            args.to,
            args.amount,
            args.asset,
            memo=args.memo,
            account=args.account
        ))

    elif args.command == "powerup":
        pprint(steem.transfer_to_vesting(
            args.amount,
            account=args.account,
            to=args.to
        ))

    elif args.command == "powerdown":
        pprint(steem.withdraw_vesting(
            args.amount,
            account=args.account,
        ))

    elif args.command == "convert":
        pprint(steem.convert(
            args.amount,
            account=args.account,
        ))

    elif args.command == "powerdownroute":
        pprint(steem.set_withdraw_vesting_route(
            args.to,
            percentage=args.percentage,
            account=args.account,
            auto_vest=args.auto_vest
        ))

    elif args.command == "balance":
        t = PrettyTable(["Account", "STEEM", "SBD", "VESTS", "VESTS (in STEEM)"])
        t.align = "r"
        if isinstance(args.account, str):
            args.account = [args.account]
        for a in args.account:
            b = steem.get_balances(a)
            t.add_row([
                a,
                b["balance"],
                b["sbd_balance"],
                b["vesting_shares"],
                b["vesting_shares_steem"]
            ])
        print(t)

    elif args.command == "history":
        t = PrettyTable(["#", "time/block", "Operation", "Details"])
        t.align = "r"
        if isinstance(args.account, str):
            args.account = [args.account]
        if isinstance(args.types, str):
            args.types = [args.types]

        for a in args.account:
            for b in steem.rpc.account_history(
                a,
                args.first,
                limit=args.limit,
                only_ops=args.types
            ):
                t.add_row([
                    b[0],
                    "%s (%s)" % (b[1]["timestamp"], b[1]["block"]),
                    b[1]["op"][0],
                    format_operation_details(b[1]["op"], memos=args.memos),
                ])
        print(t)

    elif args.command == "interest":
        t = PrettyTable(["Account",
                         "Last Interest Payment",
                         "Next Payment",
                         "Interest rate",
                         "Interest"])
        t.align = "r"
        if isinstance(args.account, str):
            args.account = [args.account]
        for a in args.account:
            i = steem.interest(a)

            t.add_row([
                a,
                i["last_payment"],
                "in %s" % strfage(i["next_payment_duration"]),
                "%.1f%%" % i["interest_rate"],
                "%.3f SBD" % i["interest"],
            ])
        print(t)

    elif args.command == "permissions":
        account = steem.rpc.get_account(args.account)
        print_permissions(account)

    elif args.command == "allow":
        if not args.foreign_account:
            from steembase.account import PasswordKey
            pwd = get_terminal(text="Password for Key Derivation: ", confirm=True)
            args.foreign_account = format(PasswordKey(args.account, pwd, args.permission).get_public(), "STM")
        pprint(steem.allow(
            args.foreign_account,
            weight=args.weight,
            account=args.account,
            permission=args.permission,
            threshold=args.threshold
        ))

    elif args.command == "disallow":
        pprint(steem.disallow(
            args.foreign_account,
            account=args.account,
            permission=args.permission,
            threshold=args.threshold
        ))

    elif args.command == "updatememokey":
        if not args.key:
            # Loop until both match
            from steembase.account import PasswordKey
            pw = get_terminal(text="Password for Memo Key: ", confirm=True, allowedempty=False)
            memo_key = PasswordKey(args.account, pw, "memo")
            args.key  = format(memo_key.get_public_key(), "STM")
            memo_privkey = memo_key.get_private_key()
            # Add the key to the wallet
            if not args.nobroadcast:
                steem.wallet.addPrivateKey(memo_privkey)
        pprint(steem.update_memo_key(
            args.key,
            account=args.account
        ))

    elif args.command == "newaccount":
        import getpass
        while True :
            pw = getpass.getpass("New Account Passphrase: ")
            if not pw:
                print("You cannot chosen an empty password!")
                continue
            else:
                pwck = getpass.getpass(
                    "Confirm New Account Passphrase: "
                )
                if (pw == pwck) :
                    break
                else :
                    print("Given Passphrases do not match!")
        pprint(steem.create_account(
            args.accountname,
            creator=args.account,
            password=pw,
        ))

    elif args.command == "importaccount":
        from steembase.account import PasswordKey
        import getpass
        password = getpass.getpass("Account Passphrase: ")

        posting_key = PasswordKey(args.account, password, role="posting")
        active_key  = PasswordKey(args.account, password, role="active")
        memo_key    = PasswordKey(args.account, password, role="memo")
        posting_pubkey = format(posting_key.get_public_key(), "STM")
        active_pubkey  = format(active_key.get_public_key(), "STM")
        memo_pubkey    = format(memo_key.get_public_key(), "STM")

        account = steem.rpc.get_account(args.account)

        imported = False
        if active_pubkey in [x[0] for x in account["active"]["key_auths"]]:
            active_privkey = active_key.get_private_key()
            steem.wallet.addPrivateKey(active_privkey)
            imported = True

        if posting_pubkey in [x[0] for x in account["posting"]["key_auths"]]:
            posting_privkey = posting_key.get_private_key()
            steem.wallet.addPrivateKey(posting_privkey)
            imported = True

        if memo_pubkey == account["memo_key"]:
            memo_privkey = memo_key.get_private_key()
            steem.wallet.addPrivateKey(memo_privkey)
            imported = True

        if not imported:
            print("No keys matched! Invalid password?")

    elif args.command == "sign":
        if args.file and args.file != "-":
            if not os.path.isfile(args.file):
                raise Exception("File %s does not exist!" % args.file)
            with open(args.file) as fp:
                tx = fp.read()
        else:
            tx = sys.stdin.read()
        tx = eval(tx)
        pprint(steem.sign(tx))

    elif args.command == "broadcast":
        if args.file and args.file != "-":
            if not os.path.isfile(args.file):
                raise Exception("File %s does not exist!" % args.file)
            with open(args.file) as fp:
                tx = fp.read()
        else:
            tx = sys.stdin.read()
        tx = eval(tx)
        steem.broadcast(tx)

    elif args.command == "web":
        SteemConnector(node=args.node,
                       rpcuser=args.rpcuser,
                       rpcpassword=args.rpcpassword,
                       nobroadcast=args.nobroadcast,
                       num_retries=1)
        from . import web
        web.run(port=args.port, host=args.host)

    elif args.command == "orderbook":
        if args.chart:
            try:
                import numpy
                import Gnuplot
                from itertools import accumulate
            except:
                print("To use --chart, you need gnuplot and gnuplot-py installed")
                sys.exit(1)
        orderbook = steem.dex().returnOrderBook()

        if args.chart:
            g = Gnuplot.Gnuplot()
            g.title("Steem internal market - SBD:STEEM")
            g.xlabel("price")
            g.ylabel("volume")
            g("""
                set style data line
                set term xterm
                set border 15
            """)
            xbids = [x["price"] for x in orderbook["bids"]]
            ybids = list(accumulate([x["sbd"] for x in orderbook["bids"]]))
            dbids = Gnuplot.Data(xbids, ybids, with_="lines")
            xasks = [x["price"] for x in orderbook["asks"]]
            yasks = list(accumulate([x["sbd"] for x in orderbook["asks"]]))
            dasks = Gnuplot.Data(xasks, yasks, with_="lines")
            g("set terminal dumb")
            g.plot(dbids, dasks)  # write SVG data directly to stdout ...

        t = PrettyTable(["bid SBD", "sum bids SBD", "bid STEEM", "sum bids STEEM",
                         "bid price", "+", "ask price",
                         "ask STEEM", "sum asks steem", "ask SBD", "sum asks SBD"])
        t.align = "r"
        bidssteem = 0
        bidssbd = 0
        askssteem = 0
        askssbd = 0
        for i, o in enumerate(orderbook["asks"]):
            bidssbd += orderbook["bids"][i]["sbd"]
            bidssteem += orderbook["bids"][i]["steem"]
            askssbd += orderbook["asks"][i]["sbd"]
            askssteem += orderbook["asks"][i]["steem"]
            t.add_row([
                "%.3f Ṩ" % orderbook["bids"][i]["sbd"],
                "%.3f ∑" % bidssbd,
                "%.3f ȿ" % orderbook["bids"][i]["steem"],
                "%.3f ∑" % bidssteem,
                "%.3f Ṩ/ȿ" % orderbook["bids"][i]["price"],
                "|",
                "%.3f Ṩ/ȿ" % orderbook["asks"][i]["price"],
                "%.3f ȿ" % orderbook["asks"][i]["steem"],
                "%.3f ∑" % askssteem,
                "%.3f Ṩ" % orderbook["asks"][i]["sbd"],
                "%.3f ∑" % askssbd])
        print(t)

    elif args.command == "buy":
        if args.asset == "SBD":
            price = 1.0 / args.price
        else:
            price = args.price
        pprint(steem.buy(
            args.amount,
            args.asset,
            price,
            account=args.account
        ))

    elif args.command == "sell":
        if args.asset == "SBD":
            price = 1.0 / args.price
        else:
            price = args.price
        pprint(steem.sell(
            args.amount,
            args.asset,
            price,
            account=args.account
        ))

    else:
        print("No valid command given")

Example 3

Project: crazyflie-clients-python
Source File: gui.py
View license
def main():
    """
    Check starting conditions and start GUI.

    First, check command line arguments and start loggers. Set log levels. Try
    all imports and exit verbosely if a library is not found. Disable outputs
    to stdout and start the GUI.
    """

    # Set ERROR level for PyQt4 logger
    qtlogger = logging.getLogger('PyQt4')
    qtlogger.setLevel(logging.ERROR)

    parser = argparse.ArgumentParser(
        description="cfclient - Crazyflie graphical control client")
    parser.add_argument('--debug', '-d', nargs=1, default='info', type=str,
                        help="set debug level "
                             "[minimal, info, debug, debugfile]")
    args = parser.parse_args()
    debug = args.debug

    cflogger = logging.getLogger('')

    # Set correct logging fuctionality according to commandline
    if ("debugfile" in debug):
        logging.basicConfig(level=logging.DEBUG)
        # Add extra format options for file logger (thread and time)
        formatter = logging.Formatter('%(asctime)s:%(threadName)s:%(name)'
                                      's:%(levelname)s:%(message)s')
        filename = "debug-%s.log" % datetime.datetime.now()
        filehandler = logging.FileHandler(filename)
        filehandler.setLevel(logging.DEBUG)
        filehandler.setFormatter(formatter)
        cflogger.addHandler(filehandler)
    elif ("debug" in debug):
        logging.basicConfig(level=logging.DEBUG)
    elif ("minimal" in debug):
        logging.basicConfig(level=logging.WARNING)
    elif ("info" in debug):
        logging.basicConfig(level=logging.INFO)

    logger = logging.getLogger(__name__)

    logger.debug("Using config path {}".format(cfclient.config_path))
    logger.debug("sys.path={}".format(sys.path))

    # Try all the imports used in the project here to control what happens....
    try:
        import usb  # noqa
    except ImportError:
        logger.critical("No pyusb installation found, exiting!")
        sys.exit(1)

    if not sys.platform.startswith('linux'):
        try:
            import sdl2  # noqa
        except ImportError:
            logger.critical("No pysdl2 installation found, exiting!")
            sys.exit(1)

    try:
        import PyQt4  # noqa
    except ImportError:
        logger.critical("No PyQT4 installation found, exiting!")
        sys.exit(1)

    # Disable printouts from STL
    if os.name == 'posix':
        stdout = os.dup(1)
        os.dup2(os.open('/dev/null', os.O_WRONLY), 1)
        sys.stdout = os.fdopen(stdout, 'w')
        logger.info("Disabling STL printouts")

    if os.name == 'nt':
        stdout = os.dup(1)
        os.dup2(os.open('NUL', os.O_WRONLY), 1)
        sys.stdout = os.fdopen(stdout, 'w')
        logger.info("Disabling STL printouts")

    if sys.platform == 'darwin':
        try:
            import Foundation
            bundle = Foundation.NSBundle.mainBundle()
            if bundle:
                info = (bundle.localizedInfoDictionary() or
                        bundle.infoDictionary())
                if info:
                    info['CFBundleName'] = 'Crazyflie'
        except ImportError:
            logger.info("Foundation not found. Menu will show python as "
                        "application name")

    # Start up the main user-interface
    from .ui.main import MainUI
    from PyQt4.QtGui import QApplication, QIcon

    app = QApplication(sys.argv)

    app.setWindowIcon(QIcon(cfclient.module_path + "/icon-256.png"))
    # Make sure the right icon is set in Windows 7+ taskbar
    if os.name == 'nt':
        import ctypes

        try:
            myappid = 'mycompany.myproduct.subproduct.version'
            ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(
                myappid)
        except Exception:
            pass

    main_window = MainUI()
    main_window.show()
    sys.exit(app.exec_())

Example 4

View license
def gather_isrcs(disc, backend, device):
    """read the disc in the device with the backend and extract the ISRCs
    """
    backend_output = []
    devnull = open(os.devnull, "w")

    if backend == "libdiscid":
        pattern = r'[A-Z]{2}[A-Z0-9]{3}\d{2}\d{5}'
        for track in disc.tracks:
            if track.isrc:
                match = re.match(pattern, track.isrc)
                if match is None:
                    print("no valid ISRC: %s" % track.isrc)
                else:
                    backend_output.append((track.number, track.isrc))

    # redundant to "libdiscid", but this might be handy for prerelease testing
    elif backend == "discisrc":
        pattern = \
            r'Track\s+([0-9]+)\s+:\s+([A-Z]{2})-?([A-Z0-9]{3})-?(\d{2})-?(\d{5})'
        try:
            if sys.platform == "darwin":
                device = get_real_mac_device(device)
            proc = Popen([backend, device], stdout=PIPE)
            isrcout = proc.stdout
        except OSError as err:
            backend_error(err)
        for line in isrcout:
            line = decode(line) # explicitely decode from pipe
            ext_logger = logging.getLogger("discisrc")
            ext_logger.debug(line.rstrip())    # rstrip newline
            if line.startswith("Track") and len(line) > 12:
                match = re.search(pattern, line)
                if match is None:
                    print("can't find ISRC in: %s" % line)
                    continue
                track_number = int(match.group(1))
                isrc = ("%s%s%s%s" % (match.group(2), match.group(3),
                                      match.group(4), match.group(5)))
                backend_output.append((track_number, isrc))

    # media_info is a preview version of mediatools, both are for Windows
    # this does some kind of raw read
    elif backend in ["mediatools", "media_info"]:
        pattern = \
            r'ISRC\s+([0-9]+)\s+([A-Z]{2})-?([A-Z0-9]{3})-?(\d{2})-?(\d{5})'
        if backend == "mediatools":
            args = [backend, "drive", device, "isrc"]
        else:
            args = [backend, device]
        try:
            proc = Popen(args, stdout=PIPE)
            isrcout = proc.stdout
        except OSError as err:
            backend_error(err)
        for line in isrcout:
            line = decode(line) # explicitely decode from pipe
            ext_logger = logging.getLogger("mediatools")
            ext_logger.debug(line.rstrip())    # rstrip newline
            if line.startswith("ISRC") and not line.startswith("ISRCS"):
                match = re.search(pattern, line)
                if match is None:
                    print("can't find ISRC in: %s" % line)
                    continue
                track_number = int(match.group(1))
                isrc = ("%s%s%s%s" % (match.group(2), match.group(3),
                                      match.group(4), match.group(5)))
                backend_output.append((track_number, isrc))

    # cdrdao will create a temp file and we delete it afterwards
    # cdrdao is also available for windows
    # this will also fetch ISRCs from CD-TEXT
    elif backend == "cdrdao":
        # no byte pattern, file is opened as unicode
        pattern = r'[A-Z]{2}[A-Z0-9]{3}\d{2}\d{5}'
        tmpname = "cdrdao-%s.toc" % datetime.now()
        tmpname = tmpname.replace(":", "-")     # : is invalid on windows
        tmpfile = os.path.join(tempfile.gettempdir(), tmpname)
        logger.info("Saving toc in %s..", tmpfile)
        if os.name == "nt":
            if device != discid.get_default_device():
                logger.warning("cdrdao uses the default device")
            args = [backend, "read-toc", "--fast-toc", "-v", "0", tmpfile]
        else:
            args = [backend, "read-toc", "--fast-toc", "--device", device,
                "-v", "0", tmpfile]
        try:
            if options.debug:
                proc = Popen(args, stdout=devnull)
            else:
                proc = Popen(args, stdout=devnull, stderr=devnull)
            if proc.wait() != 0:
                print_error("%s returned with %i" % (backend, proc.returncode))
                sys.exit(1)
        except OSError as err:
            backend_error(err)
        else:
            # that file seems to be opened in Unicode mode in Python 3
            with open(tmpfile, "r") as toc:
                track_number = None
                for line in toc:
                    ext_logger = logging.getLogger("cdrdao")
                    ext_logger.debug(line.rstrip())    # rstrip newline
                    words = line.split()
                    if words:
                        if words[0] == "//":
                            track_number = int(words[2])
                        elif words[0] == "ISRC" and track_number is not None:
                            isrc = "".join(words[1:]).strip('"- ')
                            match = re.match(pattern, isrc)
                            if match is None:
                                print("no valid ISRC: %s" % isrc)
                            else:
                                backend_output.append((track_number, isrc))
                                # safeguard against missing trackNumber lines
                                # or duplicated ISRC tags (like in CD-Text)
                                track_number = None
        finally:
            try:
                os.unlink(tmpfile)
            except OSError:
                pass

    devnull.close()
    return backend_output

Example 5

View license
def gather_isrcs(disc, backend, device):
    """read the disc in the device with the backend and extract the ISRCs
    """
    backend_output = []
    devnull = open(os.devnull, "w")

    if backend == "libdiscid":
        pattern = r'[A-Z]{2}[A-Z0-9]{3}\d{2}\d{5}'
        for track in disc.tracks:
            if track.isrc:
                match = re.match(pattern, track.isrc)
                if match is None:
                    print("no valid ISRC: %s" % track.isrc)
                else:
                    backend_output.append((track.number, track.isrc))

    # redundant to "libdiscid", but this might be handy for prerelease testing
    elif backend == "discisrc":
        pattern = \
            r'Track\s+([0-9]+)\s+:\s+([A-Z]{2})-?([A-Z0-9]{3})-?(\d{2})-?(\d{5})'
        try:
            if sys.platform == "darwin":
                device = get_real_mac_device(device)
            proc = Popen([backend, device], stdout=PIPE)
            isrcout = proc.stdout
        except OSError as err:
            backend_error(err)
        for line in isrcout:
            line = decode(line) # explicitely decode from pipe
            ext_logger = logging.getLogger("discisrc")
            ext_logger.debug(line.rstrip())    # rstrip newline
            if line.startswith("Track") and len(line) > 12:
                match = re.search(pattern, line)
                if match is None:
                    print("can't find ISRC in: %s" % line)
                    continue
                track_number = int(match.group(1))
                isrc = ("%s%s%s%s" % (match.group(2), match.group(3),
                                      match.group(4), match.group(5)))
                backend_output.append((track_number, isrc))

    # media_info is a preview version of mediatools, both are for Windows
    # this does some kind of raw read
    elif backend in ["mediatools", "media_info"]:
        pattern = \
            r'ISRC\s+([0-9]+)\s+([A-Z]{2})-?([A-Z0-9]{3})-?(\d{2})-?(\d{5})'
        if backend == "mediatools":
            args = [backend, "drive", device, "isrc"]
        else:
            args = [backend, device]
        try:
            proc = Popen(args, stdout=PIPE)
            isrcout = proc.stdout
        except OSError as err:
            backend_error(err)
        for line in isrcout:
            line = decode(line) # explicitely decode from pipe
            ext_logger = logging.getLogger("mediatools")
            ext_logger.debug(line.rstrip())    # rstrip newline
            if line.startswith("ISRC") and not line.startswith("ISRCS"):
                match = re.search(pattern, line)
                if match is None:
                    print("can't find ISRC in: %s" % line)
                    continue
                track_number = int(match.group(1))
                isrc = ("%s%s%s%s" % (match.group(2), match.group(3),
                                      match.group(4), match.group(5)))
                backend_output.append((track_number, isrc))

    # cdrdao will create a temp file and we delete it afterwards
    # cdrdao is also available for windows
    # this will also fetch ISRCs from CD-TEXT
    elif backend == "cdrdao":
        # no byte pattern, file is opened as unicode
        pattern = r'[A-Z]{2}[A-Z0-9]{3}\d{2}\d{5}'
        tmpname = "cdrdao-%s.toc" % datetime.now()
        tmpname = tmpname.replace(":", "-")     # : is invalid on windows
        tmpfile = os.path.join(tempfile.gettempdir(), tmpname)
        logger.info("Saving toc in %s..", tmpfile)
        if os.name == "nt":
            if device != discid.get_default_device():
                logger.warning("cdrdao uses the default device")
            args = [backend, "read-toc", "--fast-toc", "-v", "0", tmpfile]
        else:
            args = [backend, "read-toc", "--fast-toc", "--device", device,
                "-v", "0", tmpfile]
        try:
            if options.debug:
                proc = Popen(args, stdout=devnull)
            else:
                proc = Popen(args, stdout=devnull, stderr=devnull)
            if proc.wait() != 0:
                print_error("%s returned with %i" % (backend, proc.returncode))
                sys.exit(1)
        except OSError as err:
            backend_error(err)
        else:
            # that file seems to be opened in Unicode mode in Python 3
            with open(tmpfile, "r") as toc:
                track_number = None
                for line in toc:
                    ext_logger = logging.getLogger("cdrdao")
                    ext_logger.debug(line.rstrip())    # rstrip newline
                    words = line.split()
                    if words:
                        if words[0] == "//":
                            track_number = int(words[2])
                        elif words[0] == "ISRC" and track_number is not None:
                            isrc = "".join(words[1:]).strip('"- ')
                            match = re.match(pattern, isrc)
                            if match is None:
                                print("no valid ISRC: %s" % isrc)
                            else:
                                backend_output.append((track_number, isrc))
                                # safeguard against missing trackNumber lines
                                # or duplicated ISRC tags (like in CD-Text)
                                track_number = None
        finally:
            try:
                os.unlink(tmpfile)
            except OSError:
                pass

    devnull.close()
    return backend_output

Example 6

Project: cocotb
Source File: test_array.py
View license
@cocotb.test()
def test_read_write(dut):
    """Test handle inheritance"""
    tlog = logging.getLogger("cocotb.test")

    cocotb.fork(Clock(dut.clk, 1000).start())

    yield Timer(1000)

    tlog.info("Checking Generics/Parameters:")
    _check_logic(tlog, dut.param_logic    , 1)
    _check_logic(tlog, dut.param_logic_vec, 0xDA)

    if cocotb.LANGUAGE in ["vhdl"]:
        _check_int (tlog, dut.param_bool, 1)
        _check_int (tlog, dut.param_int , 6)
        _check_real(tlog, dut.param_real, 3.14)
        _check_int (tlog, dut.param_char, ord('p'))
        _check_str (tlog, dut.param_str , "ARRAYMOD")

        if not cocotb.SIM_NAME.lower().startswith(("riviera")):
            _check_logic(tlog, dut.param_rec.a        , 0)
            _check_logic(tlog, dut.param_rec.b[0]     , 0)
            _check_logic(tlog, dut.param_rec.b[1]     , 0)
            _check_logic(tlog, dut.param_rec.b[2]     , 0)
            _check_logic(tlog, dut.param_cmplx[0].a   , 0)
            _check_logic(tlog, dut.param_cmplx[0].b[0], 0)
            _check_logic(tlog, dut.param_cmplx[0].b[1], 0)
            _check_logic(tlog, dut.param_cmplx[0].b[2], 0)
            _check_logic(tlog, dut.param_cmplx[1].a   , 0)
            _check_logic(tlog, dut.param_cmplx[1].b[0], 0)
            _check_logic(tlog, dut.param_cmplx[1].b[1], 0)
            _check_logic(tlog, dut.param_cmplx[1].b[2], 0)

    tlog.info("Checking Constants:")
    _check_logic(tlog, dut.const_logic    , 0)
    _check_logic(tlog, dut.const_logic_vec, 0x3D)

    if cocotb.LANGUAGE in ["vhdl"]:
        _check_int (tlog, dut.const_bool, 0)
        _check_int (tlog, dut.const_int , 12)
        _check_real(tlog, dut.const_real, 6.28)
        _check_int (tlog, dut.const_char, ord('c'))
        _check_str (tlog, dut.const_str , "MODARRAY")

        if not cocotb.SIM_NAME.lower().startswith(("riviera")):
            _check_logic(tlog, dut.const_rec.a        , 1)
            _check_logic(tlog, dut.const_rec.b[0]     , 0xFF)
            _check_logic(tlog, dut.const_rec.b[1]     , 0xFF)
            _check_logic(tlog, dut.const_rec.b[2]     , 0xFF)
            _check_logic(tlog, dut.const_cmplx[1].a   , 1)
            _check_logic(tlog, dut.const_cmplx[1].b[0], 0xFF)
            _check_logic(tlog, dut.const_cmplx[1].b[1], 0xFF)
            _check_logic(tlog, dut.const_cmplx[1].b[2], 0xFF)
            _check_logic(tlog, dut.const_cmplx[2].a   , 1)
            _check_logic(tlog, dut.const_cmplx[2].b[0], 0xFF)
            _check_logic(tlog, dut.const_cmplx[2].b[1], 0xFF)
            _check_logic(tlog, dut.const_cmplx[2].b[2], 0xFF)

    dut.select_in         = 2

    yield Timer(1000)

    tlog.info("Writing the signals!!!")
    dut.sig_logic         = 1
    dut.sig_logic_vec     = 0xCC
    if cocotb.LANGUAGE in ["vhdl"]:
        dut.sig_bool          = 1
        dut.sig_int           = 5000
        dut.sig_real          = 22.54
        dut.sig_char          = ord('Z')
        dut.sig_str           = "Testing"
        dut.sig_rec.a         = 1
        dut.sig_rec.b[0]      = 0x01
        dut.sig_rec.b[1]      = 0x23
        dut.sig_rec.b[2]      = 0x45
        dut.sig_cmplx[0].a    = 0
        dut.sig_cmplx[0].b[0] = 0x67
        dut.sig_cmplx[0].b[1] = 0x89
        dut.sig_cmplx[0].b[2] = 0xAB
        dut.sig_cmplx[1].a    = 1
        dut.sig_cmplx[1].b[0] = 0xCD
        dut.sig_cmplx[1].b[1] = 0xEF
        dut.sig_cmplx[1].b[2] = 0x55

    yield Timer(1000)

    tlog.info("Checking writes:")
    _check_logic(tlog, dut.port_logic_out    , 1)
    _check_logic(tlog, dut.port_logic_vec_out, 0xCC)

    if cocotb.LANGUAGE in ["vhdl"]:
        _check_int (tlog, dut.port_bool_out, 1)
        _check_int (tlog, dut.port_int_out , 5000)
        _check_real(tlog, dut.port_real_out, 22.54)
        _check_int (tlog, dut.port_char_out, ord('Z'))
        _check_str (tlog, dut.port_str_out , "Testing")

        _check_logic(tlog, dut.port_rec_out.a        , 1)
        _check_logic(tlog, dut.port_rec_out.b[0]     , 0x01)
        _check_logic(tlog, dut.port_rec_out.b[1]     , 0x23)
        _check_logic(tlog, dut.port_rec_out.b[2]     , 0x45)
        _check_logic(tlog, dut.port_cmplx_out[0].a   , 0)
        _check_logic(tlog, dut.port_cmplx_out[0].b[0], 0x67)
        _check_logic(tlog, dut.port_cmplx_out[0].b[1], 0x89)
        _check_logic(tlog, dut.port_cmplx_out[0].b[2], 0xAB)
        _check_logic(tlog, dut.port_cmplx_out[1].a   , 1)
        _check_logic(tlog, dut.port_cmplx_out[1].b[0], 0xCD)
        _check_logic(tlog, dut.port_cmplx_out[1].b[1], 0xEF)
        _check_logic(tlog, dut.port_cmplx_out[1].b[2], 0x55)

    tlog.info("Writing a few signal sub-indices!!!")
    dut.sig_logic_vec[2]     = 0
    if cocotb.LANGUAGE in ["vhdl"] or not cocotb.SIM_NAME.lower().startswith(("ncsim")):
        dut.sig_t6[1][3][2]      = 1
        dut.sig_t6[0][2][7]      = 0

    if cocotb.LANGUAGE in ["vhdl"]:
        dut.sig_str[2]           = ord('E')
        dut.sig_rec.b[1][7]      = 1
        dut.sig_cmplx[1].b[1][0] = 0

    yield Timer(1000)

    tlog.info("Checking writes (2):")
    _check_logic(tlog, dut.port_logic_vec_out, 0xC8)
    if cocotb.LANGUAGE in ["vhdl"] or not cocotb.SIM_NAME.lower().startswith(("ncsim")):
        _check_logic(tlog, dut.sig_t6[1][3][2], 1)
        _check_logic(tlog, dut.sig_t6[0][2][7], 0)

    if cocotb.LANGUAGE in ["vhdl"]:
        _check_str (tlog, dut.port_str_out , "TEsting")

        _check_logic(tlog, dut.port_rec_out.b[1]     , 0xA3)
        _check_logic(tlog, dut.port_cmplx_out[1].b[1], 0xEE)

Example 7

Project: powerline
Source File: __init__.py
View license
def check(paths=None, debug=False, echoerr=echoerr, require_ext=None):
	'''Check configuration sanity

	:param list paths:
		Paths from which configuration should be loaded.
	:param bool debug:
		Determines whether some information useful for debugging linter should 
		be output.
	:param function echoerr:
		Function that will be used to echo the error(s). Should accept four 
		optional keyword parameters: ``problem`` and ``problem_mark``, and 
		``context`` and ``context_mark``.
	:param str require_ext:
		Require configuration for some extension to be present.

	:return:
		``False`` if user configuration seems to be completely sane and ``True`` 
		if some problems were found.
	'''
	hadproblem = False

	register_common_names()
	search_paths = paths or get_config_paths()
	find_config_files = generate_config_finder(lambda: search_paths)

	logger = logging.getLogger('powerline-lint')
	logger.setLevel(logging.DEBUG if debug else logging.ERROR)
	logger.addHandler(logging.StreamHandler())

	ee = EchoErr(echoerr, logger)

	if require_ext:
		used_main_spec = main_spec.copy()
		try:
			used_main_spec['ext'][require_ext].required()
		except KeyError:
			used_main_spec['ext'][require_ext] = ext_spec()
	else:
		used_main_spec = main_spec

	lhadproblem = [False]
	load_json_config = generate_json_config_loader(lhadproblem)

	config_loader = ConfigLoader(run_once=True, load=load_json_config)

	lists = {
		'colorschemes': set(),
		'themes': set(),
		'exts': set(),
	}
	found_dir = {
		'themes': False,
		'colorschemes': False,
	}
	config_paths = defaultdict(lambda: defaultdict(dict))
	loaded_configs = defaultdict(lambda: defaultdict(dict))
	for d in chain(
		find_all_ext_config_files(search_paths, 'colorschemes'),
		find_all_ext_config_files(search_paths, 'themes'),
	):
		if d['error']:
			hadproblem = True
			ee(problem=d['error'])
			continue
		if d['hadproblem']:
			hadproblem = True
		if d['ext']:
			found_dir[d['type']] = True
			lists['exts'].add(d['ext'])
			if d['name'] == '__main__':
				pass
			elif d['name'].startswith('__') or d['name'].endswith('__'):
				hadproblem = True
				ee(problem='File name is not supposed to start or end with “__”: {0}'.format(
					d['path']))
			else:
				lists[d['type']].add(d['name'])
			config_paths[d['type']][d['ext']][d['name']] = d['path']
			loaded_configs[d['type']][d['ext']][d['name']] = d['config']
		else:
			config_paths[d['type']][d['name']] = d['path']
			loaded_configs[d['type']][d['name']] = d['config']

	for typ in ('themes', 'colorschemes'):
		if not found_dir[typ]:
			hadproblem = True
			ee(problem='Subdirectory {0} was not found in paths {1}'.format(typ, ', '.join(search_paths)))

	diff = set(config_paths['colorschemes']) - set(config_paths['themes'])
	if diff:
		hadproblem = True
		for ext in diff:
			typ = 'colorschemes' if ext in config_paths['themes'] else 'themes'
			if not config_paths['top_' + typ] or typ == 'themes':
				ee(problem='{0} extension {1} not present in {2}'.format(
					ext,
					'configuration' if (
						ext in loaded_configs['themes'] and ext in loaded_configs['colorschemes']
					) else 'directory',
					typ,
				))

	try:
		main_config = load_config('config', find_config_files, config_loader)
	except IOError:
		main_config = {}
		ee(problem='Configuration file not found: config.json')
		hadproblem = True
	except MarkedError as e:
		main_config = {}
		ee(problem=str(e))
		hadproblem = True
	else:
		if used_main_spec.match(
			main_config,
			data={'configs': config_paths, 'lists': lists},
			context=Context(main_config),
			echoerr=ee
		)[1]:
			hadproblem = True

	import_paths = [os.path.expanduser(path) for path in main_config.get('common', {}).get('paths', [])]

	try:
		colors_config = load_config('colors', find_config_files, config_loader)
	except IOError:
		colors_config = {}
		ee(problem='Configuration file not found: colors.json')
		hadproblem = True
	except MarkedError as e:
		colors_config = {}
		ee(problem=str(e))
		hadproblem = True
	else:
		if colors_spec.match(colors_config, context=Context(colors_config), echoerr=ee)[1]:
			hadproblem = True

	if lhadproblem[0]:
		hadproblem = True

	top_colorscheme_configs = dict(loaded_configs['top_colorschemes'])
	data = {
		'ext': None,
		'top_colorscheme_configs': top_colorscheme_configs,
		'ext_colorscheme_configs': {},
		'colors_config': colors_config
	}
	for colorscheme, config in loaded_configs['top_colorschemes'].items():
		data['colorscheme'] = colorscheme
		if top_colorscheme_spec.match(config, context=Context(config), data=data, echoerr=ee)[1]:
			hadproblem = True

	ext_colorscheme_configs = dict2(loaded_configs['colorschemes'])
	for ext, econfigs in ext_colorscheme_configs.items():
		data = {
			'ext': ext,
			'top_colorscheme_configs': top_colorscheme_configs,
			'ext_colorscheme_configs': ext_colorscheme_configs,
			'colors_config': colors_config,
		}
		for colorscheme, config in econfigs.items():
			data['colorscheme'] = colorscheme
			if ext == 'vim':
				spec = vim_colorscheme_spec
			elif ext == 'shell':
				spec = shell_colorscheme_spec
			else:
				spec = colorscheme_spec
			if spec.match(config, context=Context(config), data=data, echoerr=ee)[1]:
				hadproblem = True

	colorscheme_configs = {}
	for ext in lists['exts']:
		colorscheme_configs[ext] = {}
		for colorscheme in lists['colorschemes']:
			econfigs = ext_colorscheme_configs[ext]
			ecconfigs = econfigs.get(colorscheme)
			mconfigs = (
				top_colorscheme_configs.get(colorscheme),
				econfigs.get('__main__'),
				ecconfigs,
			)
			if not (mconfigs[0] or mconfigs[2]):
				continue
			config = None
			for mconfig in mconfigs:
				if not mconfig:
					continue
				if config:
					config = mergedicts_copy(config, mconfig)
				else:
					config = mconfig
			colorscheme_configs[ext][colorscheme] = config

	theme_configs = dict2(loaded_configs['themes'])
	top_theme_configs = dict(loaded_configs['top_themes'])
	for ext, configs in theme_configs.items():
		data = {
			'ext': ext,
			'colorscheme_configs': colorscheme_configs,
			'import_paths': import_paths,
			'main_config': main_config,
			'top_themes': top_theme_configs,
			'ext_theme_configs': configs,
			'colors_config': colors_config
		}
		for theme, config in configs.items():
			data['theme'] = theme
			if theme == '__main__':
				data['theme_type'] = 'main'
				spec = main_theme_spec
			else:
				data['theme_type'] = 'regular'
				spec = theme_spec
			if spec.match(config, context=Context(config), data=data, echoerr=ee)[1]:
				hadproblem = True

	for top_theme, config in top_theme_configs.items():
		data = {
			'ext': None,
			'colorscheme_configs': colorscheme_configs,
			'import_paths': import_paths,
			'main_config': main_config,
			'theme_configs': theme_configs,
			'ext_theme_configs': None,
			'colors_config': colors_config
		}
		data['theme_type'] = 'top'
		data['theme'] = top_theme
		if top_theme_spec.match(config, context=Context(config), data=data, echoerr=ee)[1]:
			hadproblem = True

	return hadproblem

Example 8

Project: commissaire-mvp
Source File: clusterexec.py
View license
def clusterexec(store_manager, cluster_name, command, kwargs={}):
    """
    Remote executes a shell commands across a cluster.

    :param store_manager: Proxy object for remtote stores
    :type store_manager: commissaire.store.StoreHandlerManager
    :param cluster_name: Name of the cluster to act on
    :type cluster_name: str
    :param command: Top-level command to execute
    :type command: str
    :param kwargs: Keyword arguments for the command
    :type kwargs: dict
    """
    logger = logging.getLogger('clusterexec')

    # TODO: This is a hack and should really be done elsewhere
    command_args = ()
    if command == 'upgrade':
        finished_hosts_key = 'upgraded'
        model_instance = ClusterUpgrade.new(
            name=cluster_name,
            status='in_process',
            started_at=datetime.datetime.utcnow().isoformat(),
            upgraded=[],
            in_process=[],
        )
    elif command == 'restart':
        finished_hosts_key = 'restarted'
        model_instance = ClusterRestart.new(
            name=cluster_name,
            status='in_process',
            started_at=datetime.datetime.utcnow().isoformat(),
            restarted=[],
            in_process=[],
        )
    elif command == 'deploy':
        finished_hosts_key = 'deployed'
        version = kwargs.get('version', '')
        command_args = (version,)
        model_instance = ClusterDeploy.new(
            name=cluster_name,
            status='in_process',
            started_at=datetime.datetime.utcnow().isoformat(),
            version=version,
            deployed=[],
            in_process=[],
        )

    end_status = 'finished'

    try:
        # Set the initial status in the store
        logger.info('Setting initial status.')
        logger.debug('Status={0}'.format(model_instance.to_json()))
        store_manager.save(model_instance)
    except Exception as error:
        logger.error(
            'Unable to save initial state for "{0}" clusterexec due to '
            '{1}: {2}'.format(cluster_name, type(error), error))
        return

    # Collect all host addresses in the cluster
    try:
        cluster = store_manager.get(Cluster.new(
            name=cluster_name, status='', hostset=[]))
    except Exception as error:
        logger.warn(
            'Unable to continue for cluster "{0}" due to '
            '{1}: {2}. Returning...'.format(cluster_name, type(error), error))
        return

    if cluster.hostset:
        logger.debug(
            '{0} hosts in cluster "{1}"'.format(
                len(cluster.hostset), cluster_name))
    else:
        logger.warn('No hosts in cluster "{0}"'.format(cluster_name))

    # TODO: Find better way to do this
    try:
        hosts = store_manager.list(Hosts(hosts=[]))
    except Exception as error:
        logger.warn(
            'No hosts in the cluster. Error: {0}. Exiting clusterexec'.format(
                error))
        return

    for host in hosts.hosts:
        if host.address not in cluster.hostset:
            logger.debug(
                'Skipping {0} as it is not in this cluster.'.format(
                    host.address))
            continue  # Move on to the next one
        oscmd = get_oscmd(host.os)

        # command_list is only used for logging
        command_list = getattr(oscmd, command)(*command_args)
        logger.info('Executing {0} on {1}...'.format(
            command_list, host.address))

        model_instance.in_process.append(host.address)
        try:
            store_manager.save(model_instance)
        except Exception as error:
            logger.error(
                'Unable to save in_process state for "{0}" clusterexec due to '
                '{1}: {2}'.format(cluster_name, type(error), error))
            return

        key = TemporarySSHKey(host, logger)
        key.create()

        try:
            transport = ansibleapi.Transport(host.remote_user)
            exe = getattr(transport, command)
            result, facts = exe(
                host.address, key.path, oscmd, kwargs)
        # XXX: ansibleapi explicitly raises Exception()
        except Exception as ex:
            # If there was a failure set the end_status and break out
            end_status = 'failed'
            logger.error('Clusterexec {0} for {1} failed: {2}: {3}'.format(
                command, host.address, type(ex), ex))
            break
        finally:
            try:
                key.remove()
                logger.debug('Removed temporary key file {0}'.format(key.path))
            except:
                logger.warn(
                    'Unable to remove the temporary key file: {0}'.format(
                        key.path))

        # Set the finished hosts
        new_finished_hosts = getattr(
            model_instance, finished_hosts_key) + [host.address]
        setattr(
            model_instance,
            finished_hosts_key,
            new_finished_hosts)
        try:
            idx = model_instance.in_process.index(host.address)
            model_instance.in_process.pop(idx)
        except ValueError:
            logger.warn('Host {0} was not in_process for {1} {2}'.format(
                host['address'], command, cluster_name))
        try:
            store_manager.save(model_instance)
            logger.info('Finished executing {0} for {1} in {2}'.format(
                command, host.address, cluster_name))
        except Exception as error:
            logger.error(
                'Unable to save cluster state for "{0}" clusterexec due to '
                '{1}: {2}'.format(cluster_name, type(error), error))
            return

    # Final set of command result
    model_instance.finished_at = datetime.datetime.utcnow().isoformat()
    model_instance.status = end_status

    logger.info('Cluster {0} final {1} status: {2}'.format(
        cluster_name, command, model_instance.to_json()))

    try:
        store_manager.save(model_instance)
    except Exception as error:
        logger.error(
            'Unable to save final state for "{0}" clusterexec due to '
            '{1}: {2}'.format(cluster_name, type(error), error))

    logger.info('Clusterexec stopping')

Example 9

Project: PyPXE
Source File: server.py
View license
def main():
    global SETTINGS, args
    try:
        # warn the user that they are starting PyPXE as non-root user
        if os.getuid() != 0:
            print '\nWARNING: Not root. Servers will probably fail to bind.\n'

        # configure
        args = parse_cli_arguments()
        if args.JSON_CONFIG: # load from configuration file if specified
            try:
                config_file = open(args.JSON_CONFIG, 'rb')
            except IOError:
                sys.exit('Failed to open {0}'.format(args.JSON_CONFIG))
            try:
                loaded_config = json.load(config_file)
                config_file.close()
            except ValueError:
                sys.exit('{0} does not contain valid JSON'.format(args.JSON_CONFIG))
            for setting in loaded_config:
                if type(loaded_config[setting]) is unicode:
                    loaded_config[setting] = loaded_config[setting].encode('ascii')
            SETTINGS.update(loaded_config) # update settings with JSON config
            args = parse_cli_arguments() # re-parse, CLI options take precedence

        # ideally this would be in dhcp itself, but the chroot below *probably*
        # breaks the ability to open the config file.
        if args.STATIC_CONFIG:
            try:
                static_config = open(args.STATIC_CONFIG, 'rb')
            except IOError:
                sys.exit("Failed to open {0}".format(args.STATIC_CONFIG))
            try:
                loaded_statics = json.load(static_config)
                static_config.close()
            except ValueError:
                sys.exit("{0} does not contain valid json".format(args.STATIC_CONFIG))
        else:
            loaded_statics = dict()

        # setup main logger
        sys_logger = logging.getLogger('PyPXE')
        if args.SYSLOG_SERVER:
            handler = logging.handlers.SysLogHandler(address = (args.SYSLOG_SERVER, int(args.SYSLOG_PORT)))
        else:
            handler = logging.StreamHandler()
        formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(name)s %(message)s')
        handler.setFormatter(formatter)
        sys_logger.addHandler(handler)
        sys_logger.setLevel(logging.INFO)

        # pass warning to user regarding starting HTTP server without iPXE
        if args.USE_HTTP and not args.USE_IPXE and not args.USE_DHCP:
            sys_logger.warning('HTTP selected but iPXE disabled. PXE ROM must support HTTP requests.')

        # if the argument was pased to enabled ProxyDHCP then enable the DHCP server
        if args.DHCP_MODE_PROXY:
            args.USE_DHCP = True

        # if the network boot file name was not specified in the argument,
        # set it based on what services were enabled/disabled
        if args.NETBOOT_FILE == '':
            if not args.USE_IPXE:
                args.NETBOOT_FILE = 'pxelinux.0'
            elif not args.USE_HTTP:
                args.NETBOOT_FILE = 'boot.ipxe'
            else:
                args.NETBOOT_FILE = 'boot.http.ipxe'

        if args.NBD_WRITE and not args.NBD_COW:
            sys_logger.warning('NBD Write enabled but copy-on-write is not. Multiple clients may cause corruption')

        if args.NBD_COW_IN_MEM or args.NBD_COPY_TO_RAM:
            sys_logger.warning('NBD cowinmem and copytoram can cause high RAM usage')

        if args.NBD_COW and not args.NBD_WRITE:
            # cow implies write
            args.NBD_WRITE = True

        # make a list of running threads for each service
        running_services = []

        # configure/start TFTP server
        if args.USE_TFTP:

            # setup TFTP logger
            tftp_logger = sys_logger.getChild('TFTP')
            sys_logger.info('Starting TFTP server...')

            # setup the thread
            tftp_server = tftp.TFTPD(mode_debug = do_debug('tftp'), mode_verbose = do_verbose('tftp'), logger = tftp_logger, netboot_directory = args.NETBOOT_DIR)
            tftpd = threading.Thread(target = tftp_server.listen)
            tftpd.daemon = True
            tftpd.start()
            running_services.append(tftpd)

        # configure/start DHCP server
        if args.USE_DHCP:

            # setup DHCP logger
            dhcp_logger = sys_logger.getChild('DHCP')
            if args.DHCP_MODE_PROXY:
                sys_logger.info('Starting DHCP server in ProxyDHCP mode...')
            else:
                sys_logger.info('Starting DHCP server...')

            # setup the thread
            dhcp_server = dhcp.DHCPD(
                ip = args.DHCP_SERVER_IP,
                port = args.DHCP_SERVER_PORT,
                offer_from = args.DHCP_OFFER_BEGIN,
                offer_to = args.DHCP_OFFER_END,
                subnet_mask = args.DHCP_SUBNET,
                router = args.DHCP_ROUTER,
                dns_server = args.DHCP_DNS,
                broadcast = args.DHCP_BROADCAST,
                file_server = args.DHCP_FILESERVER,
                file_name = args.NETBOOT_FILE,
                use_ipxe = args.USE_IPXE,
                use_http = args.USE_HTTP,
                mode_proxy = args.DHCP_MODE_PROXY,
                mode_debug = do_debug('dhcp'),
                mode_verbose = do_verbose('dhcp'),
                whitelist = args.DHCP_WHITELIST,
                static_config = loaded_statics,
                logger = dhcp_logger,
                saveleases = args.LEASES_FILE)
            dhcpd = threading.Thread(target = dhcp_server.listen)
            dhcpd.daemon = True
            dhcpd.start()
            running_services.append(dhcpd)

        # configure/start HTTP server
        if args.USE_HTTP:

            # setup HTTP logger
            http_logger = sys_logger.getChild('HTTP')
            sys_logger.info('Starting HTTP server...')

            # setup the thread
            http_server = http.HTTPD(mode_debug = do_debug('http'), mode_verbose = do_debug('http'), logger = http_logger, netboot_directory = args.NETBOOT_DIR)
            httpd = threading.Thread(target = http_server.listen)
            httpd.daemon = True
            httpd.start()
            running_services.append(httpd)

        # configure/start NBD server
        if args.NBD_BLOCK_DEVICE:
            # setup NBD logger
            nbd_logger = sys_logger.getChild('NBD')
            sys_logger.info('Starting NBD server...')
            nbd_server = nbd.NBD(
                block_device = args.NBD_BLOCK_DEVICE,
                write = args.NBD_WRITE,
                cow = args.NBD_COW,
                in_mem = args.NBD_COW_IN_MEM,
                copy_to_ram = args.NBD_COPY_TO_RAM,
                ip = args.NBD_SERVER_IP,
                port = args.NBD_PORT,
                mode_debug = do_debug('nbd'),
                mode_verbose = do_verbose('nbd'),
                logger = nbd_logger,
                netboot_directory = args.NETBOOT_DIR)
            nbdd = threading.Thread(target = nbd_server.listen)
            nbdd.daemon = True
            nbdd.start()
            running_services.append(nbdd)

        sys_logger.info('PyPXE successfully initialized and running!')

        while map(lambda x: x.isAlive(), running_services):
            sleep(1)

    except KeyboardInterrupt:
        sys.exit('\nShutting down PyPXE...\n')

Example 10

Project: pupil
Source File: main.py
View license
def launcher():
    """Starts eye processes. Hosts the IPC Backbone and Logging functions.

    Reacts to notifications:
       ``launcher_process.should_stop``: Stops the launcher process
       ``eye_process.should_start``: Starts the eye process
    """


    #Reliable msg dispatch to the IPC via push bridge.
    def pull_pub(ipc_pub_url,pull):
        ctx = zmq.Context.instance()
        pub = ctx.socket(zmq.PUB)
        pub.connect(ipc_pub_url)

        while True:
            m = pull.recv_multipart()
            pub.send_multipart(m)


    #The delay proxy handles delayed notififications.
    def delay_proxy(ipc_pub_url,ipc_sub_url):
        ctx = zmq.Context.instance()
        sub = zmq_tools.Msg_Receiver(ctx,ipc_sub_url,('delayed_notify',))
        pub = zmq_tools.Msg_Dispatcher(ctx,ipc_pub_url)
        poller = zmq.Poller()
        poller.register(sub.socket, zmq.POLLIN)
        waiting_notifications = {}

        while True:
            if poller.poll(timeout=250):
                #Recv new delayed notification and store it.
                topic,n = sub.recv()
                n['_notify_time_'] = time()+n['delay']
                waiting_notifications[n['subject']] = n
            #When a notifications time has come, pop from dict and send it as notification
            for n in waiting_notifications.values():
                if n['_notify_time_'] < time():
                    del n['_notify_time_']
                    del n['delay']
                    del waiting_notifications[n['subject']]
                    pub.notify(n)


    #Recv log records from other processes.
    def log_loop(ipc_sub_url,log_level_debug):
        import logging
        #Get the root logger
        logger = logging.getLogger()
        #set log level
        if log_level_debug:
            logger.setLevel(logging.DEBUG)
        else:
            logger.setLevel(logging.INFO)
        #Stream to file
        fh = logging.FileHandler(os.path.join(user_dir,'capture.log'),mode='w')
        fh.setFormatter(logging.Formatter('%(asctime)s - %(processName)s - [%(levelname)s] %(name)s: %(message)s'))
        logger.addHandler(fh)
        #Stream to console.
        ch = logging.StreamHandler()
        ch.setFormatter(logging.Formatter('%(processName)s - [%(levelname)s] %(name)s: %(message)s'))
        logger.addHandler(ch)
        # IPC setup to receive log messages. Use zmq_tools.ZMQ_handler to send messages to here.
        sub = zmq_tools.Msg_Receiver(zmq_ctx,ipc_sub_url,topics=("logging",))
        while True:
            topic,msg = sub.recv()
            record = logging.makeLogRecord(msg)
            logger.handle(record)


    ## IPC
    timebase = Value(c_double,0)
    eyes_are_alive = Value(c_bool,0),Value(c_bool,0)

    zmq_ctx = zmq.Context()

    #Let the OS choose the IP and PORT
    ipc_pub_url = 'tcp://*:*'
    ipc_sub_url = 'tcp://*:*'
    ipc_push_url = 'tcp://*:*'

    # Binding IPC Backbone Sockets to URLs.
    # They are used in the threads started below.
    # Using them in the main thread is not allowed.
    xsub_socket = zmq_ctx.socket(zmq.XSUB)
    xsub_socket.bind(ipc_pub_url)
    ipc_pub_url = xsub_socket.last_endpoint.replace("0.0.0.0","127.0.0.1")

    xpub_socket = zmq_ctx.socket(zmq.XPUB)
    xpub_socket.bind(ipc_sub_url)
    ipc_sub_url = xpub_socket.last_endpoint.replace("0.0.0.0","127.0.0.1")

    pull_socket = zmq_ctx.socket(zmq.PULL)
    pull_socket.bind(ipc_push_url)
    ipc_push_url = pull_socket.last_endpoint.replace("0.0.0.0","127.0.0.1")


    # Starting communication threads:
    # A ZMQ Proxy Device serves as our IPC Backbone
    ipc_backbone_thread = Thread(target=zmq.proxy, args=(xsub_socket,xpub_socket))
    ipc_backbone_thread.setDaemon(True)
    ipc_backbone_thread.start()

    pull_pub = Thread(target=pull_pub, args=(ipc_pub_url,pull_socket))
    pull_pub.setDaemon(True)
    pull_pub.start()

    log_thread = Thread(target=log_loop, args=(ipc_sub_url,'debug'in sys.argv))
    log_thread.setDaemon(True)
    log_thread.start()

    delay_thread = Thread(target=delay_proxy, args=(ipc_push_url,ipc_sub_url))
    delay_thread.setDaemon(True)
    delay_thread.start()

    del xsub_socket,xpub_socket,pull_socket
    sleep(0.2)

    topics = (  'notify.eye_process.',
                'notify.launcher_process.',
                'notify.meta.should_doc')
    cmd_sub = zmq_tools.Msg_Receiver(zmq_ctx,ipc_sub_url,topics=topics )
    cmd_push = zmq_tools.Msg_Dispatcher(zmq_ctx,ipc_push_url)

    if app == 'service':
        Process(target=service,
                      name= 'service',
                      args=(timebase,
                            eyes_are_alive,
                            ipc_pub_url,
                            ipc_sub_url,
                            ipc_push_url,
                            user_dir,
                            app_version
                            )).start()
    else:
        Process(target=world,
                      name= 'world',
                      args=(timebase,
                            eyes_are_alive,
                            ipc_pub_url,
                            ipc_sub_url,
                            ipc_push_url,
                            user_dir,
                            app_version,
                            )).start()

    with Prevent_Idle_Sleep():
        while True:
            #block and listen for relevant messages.
            topic,n = cmd_sub.recv()
            if "notify.eye_process.should_start" in topic:
                eye_id = n['eye_id']
                if not eyes_are_alive[eye_id].value:
                    Process(target=eye,
                                name='eye%s'%eye_id,
                                args=(timebase,
                                    eyes_are_alive[eye_id],
                                    ipc_pub_url,
                                    ipc_sub_url,
                                    ipc_push_url,
                                    user_dir,
                                    app_version,
                                    eye_id
                                    )).start()
            elif "notify.launcher_process.should_stop" in topic:
                break
            elif "notify.meta.should_doc" in topic:
                cmd_push.notify({
                    'subject':'meta.doc',
                    'actor':'launcher',
                    'doc':launcher.__doc__})

        for p in active_children(): p.join()

Example 11

Project: PyClassLessons
Source File: base.py
View license
    def get_response(self, request):
        "Returns an HttpResponse object for the given HttpRequest"

        # Setup default url resolver for this thread, this code is outside
        # the try/except so we don't get a spurious "unbound local
        # variable" exception in the event an exception is raised before
        # resolver is set
        urlconf = settings.ROOT_URLCONF
        urlresolvers.set_urlconf(urlconf)
        resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
        try:
            response = None
            # Apply request middleware
            for middleware_method in self._request_middleware:
                response = middleware_method(request)
                if response:
                    break

            if response is None:
                if hasattr(request, 'urlconf'):
                    # Reset url resolver with a custom urlconf.
                    urlconf = request.urlconf
                    urlresolvers.set_urlconf(urlconf)
                    resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)

                resolver_match = resolver.resolve(request.path_info)
                callback, callback_args, callback_kwargs = resolver_match
                request.resolver_match = resolver_match

                # Apply view middleware
                for middleware_method in self._view_middleware:
                    response = middleware_method(request, callback, callback_args, callback_kwargs)
                    if response:
                        break

            if response is None:
                wrapped_callback = self.make_view_atomic(callback)
                try:
                    response = wrapped_callback(request, *callback_args, **callback_kwargs)
                except Exception as e:
                    # If the view raised an exception, run it through exception
                    # middleware, and if the exception middleware returns a
                    # response, use that. Otherwise, reraise the exception.
                    for middleware_method in self._exception_middleware:
                        response = middleware_method(request, e)
                        if response:
                            break
                    if response is None:
                        raise

            # Complain if the view returned None (a common error).
            if response is None:
                if isinstance(callback, types.FunctionType):    # FBV
                    view_name = callback.__name__
                else:                                           # CBV
                    view_name = callback.__class__.__name__ + '.__call__'
                raise ValueError("The view %s.%s didn't return an HttpResponse object. It returned None instead."
                                 % (callback.__module__, view_name))

            # If the response supports deferred rendering, apply template
            # response middleware and then render the response
            if hasattr(response, 'render') and callable(response.render):
                for middleware_method in self._template_response_middleware:
                    response = middleware_method(request, response)
                response = response.render()

        except http.Http404 as e:
            logger.warning('Not Found: %s', request.path,
                        extra={
                            'status_code': 404,
                            'request': request
                        })
            if settings.DEBUG:
                response = debug.technical_404_response(request, e)
            else:
                try:
                    callback, param_dict = resolver.resolve404()
                    response = callback(request, **param_dict)
                except:
                    signals.got_request_exception.send(sender=self.__class__, request=request)
                    response = self.handle_uncaught_exception(request, resolver, sys.exc_info())

        except PermissionDenied:
            logger.warning(
                'Forbidden (Permission denied): %s', request.path,
                extra={
                    'status_code': 403,
                    'request': request
                })
            try:
                callback, param_dict = resolver.resolve403()
                response = callback(request, **param_dict)
            except:
                signals.got_request_exception.send(
                    sender=self.__class__, request=request)
                response = self.handle_uncaught_exception(request,
                    resolver, sys.exc_info())

        except SuspiciousOperation as e:
            # The request logger receives events for any problematic request
            # The security logger receives events for all SuspiciousOperations
            security_logger = logging.getLogger('django.security.%s' %
                            e.__class__.__name__)
            security_logger.error(
                force_text(e),
                extra={
                    'status_code': 400,
                    'request': request
                })

            try:
                callback, param_dict = resolver.resolve400()
                response = callback(request, **param_dict)
            except:
                signals.got_request_exception.send(
                    sender=self.__class__, request=request)
                response = self.handle_uncaught_exception(request,
                    resolver, sys.exc_info())

        except SystemExit:
            # Allow sys.exit() to actually exit. See tickets #1023 and #4701
            raise

        except:  # Handle everything else.
            # Get the exception info now, in case another exception is thrown later.
            signals.got_request_exception.send(sender=self.__class__, request=request)
            response = self.handle_uncaught_exception(request, resolver, sys.exc_info())

        try:
            # Apply response middleware, regardless of the response
            for middleware_method in self._response_middleware:
                response = middleware_method(request, response)
            response = self.apply_response_fixes(request, response)
        except:  # Any exception should be gathered and handled
            signals.got_request_exception.send(sender=self.__class__, request=request)
            response = self.handle_uncaught_exception(request, resolver, sys.exc_info())

        response._closable_objects.append(request)

        return response

Example 12

Project: python-beaver
Source File: config.py
View license
    def __init__(self, args, logger=None):
        self._logger = logger or logging.getLogger(__name__)
        self._logger.debug('Processing beaver portion of config file %s' % args.config)

        self._section_defaults = {
            'add_field': '',
            'add_field_env': '',
            'debug': '0',
            'discover_interval': '15',
            'encoding': 'utf_8',

            # should be a python regex of files to remove
            'exclude': '',
            'format': '',

            # throw out empty lines instead of shipping them
            'ignore_empty': '0',

            # allow ignoring copytruncate results
            'ignore_truncate': '0',

            # buffered tokenization
            # we string-escape the delimiter later so that we can put escaped characters in our config file
            'delimiter': '\n',
            'size_limit': '',

            # multiline events support. Default is disabled
            'multiline_regex_after': '',
            'multiline_regex_before': '',

            'message_format': '',
            'sincedb_write_interval': '15',
            'stat_interval': '1',
            'start_position': 'end',
            'tags': '',
            'tail_lines': '0',
            'type': '',
            # Redis specific namespace
            'redis_namespace': ''
        }

        self._main_defaults = {
            'kafka_client_id': os.environ.get('KAFKA_CLIENT_ID', 'beaver-kafka'),
            'kafka_hosts': os.environ.get('KAFKA_HOSTS', 'localhost:9092'),
            'kafka_async': os.environ.get('KAFKA_ASYNC', True),
            'kafka_topic': os.environ.get('KAFKA_TOPIC', 'logstash-topic'),
            'kafka_key': os.environ.get('KAFKA_KEY'),
            'kafka_codec': os.environ.get('KAFKA_CODEC'),
            'kafka_ack_timeout': os.environ.get('KAFKA_ACK_TIMEOUT', 2000),
            'kafka_batch_n': os.environ.get('KAFKA_BATCH_N', 10),
            'kafka_batch_t': os.environ.get('KAFKA_BATCH_T', 10),
            'kafka_round_robin': os.environ.get('KAFKA_ROUND_ROBIN', False),
            'mqtt_clientid': 'paho',
            'mqtt_host': 'localhost',
            'mqtt_port': '1883',
            'mqtt_topic': '/logstash',
            'mqtt_keepalive': '60',
            'rabbitmq_host': os.environ.get('RABBITMQ_HOST', 'localhost'),
            'rabbitmq_port': os.environ.get('RABBITMQ_PORT', '5672'),
            'rabbitmq_ssl': '0',
            'rabbitmq_ssl_key': '',
            'rabbitmq_ssl_cert': '',
            'rabbitmq_ssl_cacert': '',
            'rabbitmq_vhost': os.environ.get('RABBITMQ_VHOST', '/'),
            'rabbitmq_username': os.environ.get('RABBITMQ_USERNAME', 'guest'),
            'rabbitmq_password': os.environ.get('RABBITMQ_PASSWORD', 'guest'),
            'rabbitmq_queue': os.environ.get('RABBITMQ_QUEUE', 'logstash-queue'),
            'rabbitmq_exchange_type': os.environ.get('RABBITMQ_EXCHANGE_TYPE', 'direct'),
            'rabbitmq_exchange_durable': os.environ.get('RABBITMQ_EXCHANGE_DURABLE', '0'),
            'rabbitmq_queue_durable': os.environ.get('RABBITMQ_QUEUE_DURABLE', '0'),
            'rabbitmq_ha_queue': os.environ.get('RABBITMQ_HA_QUEUE', '0'),
            'rabbitmq_key': os.environ.get('RABBITMQ_KEY', 'logstash-key'),
            'rabbitmq_exchange': os.environ.get('RABBITMQ_EXCHANGE', 'logstash-exchange'),
            'rabbitmq_timeout': '1',
            'rabbitmq_delivery_mode': 1,
            'redis_url': os.environ.get('REDIS_URL', 'redis://localhost:6379/0'),
            'redis_namespace': os.environ.get('REDIS_NAMESPACE', 'logstash:beaver'),
            'redis_data_type': os.environ.get('REDIS_DATA_TYPE', 'list'),
            'redis_password': '',
            'sns_aws_access_key': '',
            'sns_aws_secret_key': '',
            'sns_aws_profile_name': '',
            'sns_aws_region': 'us-east-1',
            'sns_aws_topic_arn': '',
            'sqs_aws_access_key': '',
            'sqs_aws_secret_key': '',
            'sqs_aws_profile_name': '',
            'sqs_aws_region': 'us-east-1',
            'sqs_aws_queue': '',
            'sqs_aws_queue_owner_acct_id': '',
            'sqs_bulk_lines': False,
            'kinesis_aws_access_key': '', 
            'kinesis_aws_secret_key': '', 
            'kinesis_aws_region': 'us-east-1', 
            'kinesis_aws_stream': '', 
            'kinesis_aws_batch_size_max': '512000',
            'tcp_host': '127.0.0.1',
            'tcp_port': '9999',
            'tcp_ssl_enabled': '0',
            'tcp_ssl_verify': '0',
            'tcp_ssl_cacert': '',
            'tcp_ssl_cert': '',
            'tcp_ssl_key':'',
            'udp_host': os.environ.get('UDP_HOST', '127.0.0.1'),
            'udp_port': os.environ.get('UDP_PORT', '9999'),
            'zeromq_address': os.environ.get('ZEROMQ_ADDRESS', 'tcp://localhost:2120'),
            'zeromq_pattern': 'push',
            'zeromq_hwm': os.environ.get('ZEROMQ_HWM', ''),
            'stomp_host' : 'localhost',
            'stomp_port' : '61613',
            'stomp_user' : 'user',
            'stomp_password' : None,
            'stomp_queue' : 'queue/logstash',

            # exponential backoff
            'respawn_delay': '3',
            'max_failure': '7',

            # consumer processes
            'number_of_consumer_processes': '1',

            # interprocess queue max size before puts block
            'max_queue_size': '100',

            # time in seconds before updating the file mapping
            'update_file_mapping_time': '',  # deprecated
            'discover_interval': '15',

            # time in seconds from last command sent before a queue kills itself
            'queue_timeout': '60',

            # kill and respawn worker process after given number of seconds
            'refresh_worker_process': '',

            # time in seconds to wait on queue.get() block before raising Queue.Empty exception
            'wait_timeout': '5',

            # path to sincedb sqlite db
            'sincedb_path': '',

            # 0 for logstash version < 1.2, 1 for logstash >= 1.2
            'logstash_version': '',

            # ssh tunnel support
            'ssh_key_file': '',
            'ssh_tunnel': '',
            'ssh_tunnel_port': '',
            'ssh_remote_host': '',
            'ssh_remote_port': '',
            'ssh_options': '',
            'subprocess_poll_sleep': '1',

            # the following can be passed via argparse
            'zeromq_bind': os.environ.get('BEAVER_MODE', 'bind' if os.environ.get('BIND', False) else 'connect'),
            'files': os.environ.get('BEAVER_FILES', ''),
            'format': os.environ.get('BEAVER_FORMAT', 'json'),
            'fqdn': '0',
            'hostname': '',
            'output': '',
            'path': os.environ.get('BEAVER_PATH', '/var/log'),
            'transport': os.environ.get('BEAVER_TRANSPORT', 'stdout'),  # this needs to be passed to the import class somehow

            # Path to individual file configs. These override any sections in the main beaver.ini file
            'confd_path': '/etc/beaver/conf.d',

            # the following are parsed before the config file is parsed
            # but may be useful at runtime
            'config': '/dev/null',
            'debug': '0',
            'daemonize': '0',
            'pid': '',

            # Ignore files older then n days, use 0 to disable
            'ignore_old_files': 0
        }

        self._configfile = args.config
        self._config_parser = GlobSafeConfigParser
        self._globbed = []
        self._parse(args)
        for key in self._beaver_config:
            self._logger.debug('[CONFIG] "{0}" => "{1}"'.format(key, self._beaver_config.get(key)))

        self._update_files()
        self._check_for_deprecated_usage()

Example 13

Project: allianceauth
Source File: authenticator.py
View license
def do_main_program():
    #
    # --- Authenticator implementation
    #    All of this has to go in here so we can correctly daemonize the tool
    #    without loosing the file descriptors opened by the Ice module
    slicedir = Ice.getSliceDir()
    if not slicedir:
        slicedir = ["-I/usr/share/Ice/slice", "-I/usr/share/slice"]
    else:
        slicedir = ['-I' + slicedir]
    Ice.loadSlice('', slicedir + [cfg.ice.slice])
    import Murmur

    class allianceauthauthenticatorApp(Ice.Application):
        def run(self, args):
            self.shutdownOnInterrupt()

            if not self.initializeIceConnection():
                return 1

            if cfg.ice.watchdog > 0:
                self.failedWatch = True
                self.checkConnection()

            # Serve till we are stopped
            self.communicator().waitForShutdown()
            self.watchdog.cancel()

            if self.interrupted():
                warning('Caught interrupt, shutting down')

            threadDB.disconnect()
            return 0

        def initializeIceConnection(self):
            """
            Establishes the two-way Ice connection and adds the authenticator to the
            configured servers
            """
            ice = self.communicator()

            if cfg.ice.secret:
                debug('Using shared ice secret')
                ice.getImplicitContext().put("secret", cfg.ice.secret)
            elif not cfg.glacier.enabled:
                warning('Consider using an ice secret to improve security')

            if cfg.glacier.enabled:
                # info('Connecting to Glacier2 server (%s:%d)', glacier_host, glacier_port)
                error('Glacier support not implemented yet')
                # TODO: Implement this

            info('Connecting to Ice server (%s:%d)', cfg.ice.host, cfg.ice.port)
            base = ice.stringToProxy('Meta:tcp -h %s -p %d' % (cfg.ice.host, cfg.ice.port))
            self.meta = Murmur.MetaPrx.uncheckedCast(base)

            adapter = ice.createObjectAdapterWithEndpoints('Callback.Client', 'tcp -h %s' % cfg.ice.host)
            adapter.activate()

            metacbprx = adapter.addWithUUID(metaCallback(self))
            self.metacb = Murmur.MetaCallbackPrx.uncheckedCast(metacbprx)

            authprx = adapter.addWithUUID(allianceauthauthenticator())
            self.auth = Murmur.ServerUpdatingAuthenticatorPrx.uncheckedCast(authprx)

            return self.attachCallbacks()

        def attachCallbacks(self, quiet=False):
            """
            Attaches all callbacks for meta and authenticators
            """

            # Ice.ConnectionRefusedException
            # debug('Attaching callbacks')
            try:
                if not quiet: info('Attaching meta callback')

                self.meta.addCallback(self.metacb)

                for server in self.meta.getBootedServers():
                    if not cfg.murmur.servers or server.id() in cfg.murmur.servers:
                        if not quiet: info('Setting authenticator for virtual server %d', server.id())
                        server.setAuthenticator(self.auth)

            except (Murmur.InvalidSecretException, Ice.UnknownUserException, Ice.ConnectionRefusedException) as e:
                if isinstance(e, Ice.ConnectionRefusedException):
                    error('Server refused connection')
                elif isinstance(e, Murmur.InvalidSecretException) or \
                                isinstance(e, Ice.UnknownUserException) and (
                                    e.unknown == 'Murmur::InvalidSecretException'):
                    error('Invalid ice secret')
                else:
                    # We do not actually want to handle this one, re-raise it
                    raise e

                self.connected = False
                return False

            self.connected = True
            return True

        def checkConnection(self):
            """
            Tries reapplies all callbacks to make sure the authenticator
            survives server restarts and disconnects.
            """
            # debug('Watchdog run')

            try:
                if not self.attachCallbacks(quiet=not self.failedWatch):
                    self.failedWatch = True
                else:
                    self.failedWatch = False
            except Ice.Exception as e:
                error('Failed connection check, will retry in next watchdog run (%ds)', cfg.ice.watchdog)
                debug(str(e))
                self.failedWatch = True

            # Renew the timer
            self.watchdog = Timer(cfg.ice.watchdog, self.checkConnection)
            self.watchdog.start()

    def checkSecret(func):
        """
        Decorator that checks whether the server transmitted the right secret
        if a secret is supposed to be used.
        """
        if not cfg.ice.secret:
            return func

        def newfunc(*args, **kws):
            if 'current' in kws:
                current = kws["current"]
            else:
                current = args[-1]

            if not current or 'secret' not in current.ctx or current.ctx['secret'] != cfg.ice.secret:
                error('Server transmitted invalid secret. Possible injection attempt.')
                raise Murmur.InvalidSecretException()

            return func(*args, **kws)

        return newfunc

    def fortifyIceFu(retval=None, exceptions=(Ice.Exception,)):
        """
        Decorator that catches exceptions,logs them and returns a safe retval
        value. This helps preventing the authenticator getting stuck in
        critical code paths. Only exceptions that are instances of classes
        given in the exceptions list are not caught.
        
        The default is to catch all non-Ice exceptions.
        """

        def newdec(func):
            def newfunc(*args, **kws):
                try:
                    return func(*args, **kws)
                except Exception as e:
                    catch = True
                    for ex in exceptions:
                        if isinstance(e, ex):
                            catch = False
                            break

                    if catch:
                        critical('Unexpected exception caught')
                        exception(e)
                        return retval
                    raise

            return newfunc

        return newdec

    class metaCallback(Murmur.MetaCallback):
        def __init__(self, app):
            Murmur.MetaCallback.__init__(self)
            self.app = app

        @fortifyIceFu()
        @checkSecret
        def started(self, server, current=None):
            """
            This function is called when a virtual server is started
            and makes sure an authenticator gets attached if needed.
            """
            if not cfg.murmur.servers or server.id() in cfg.murmur.servers:
                info('Setting authenticator for virtual server %d', server.id())
                try:
                    server.setAuthenticator(app.auth)
                # Apparently this server was restarted without us noticing
                except (Murmur.InvalidSecretException, Ice.UnknownUserException) as e:
                    if hasattr(e, "unknown") and e.unknown != "Murmur::InvalidSecretException":
                        # Special handling for Murmur 1.2.2 servers with invalid slice files
                        raise e

                    error('Invalid ice secret')
                    return
            else:
                debug('Virtual server %d got started', server.id())

        @fortifyIceFu()
        @checkSecret
        def stopped(self, server, current=None):
            """
            This function is called when a virtual server is stopped
            """
            if self.app.connected:
                # Only try to output the server id if we think we are still connected to prevent
                # flooding of our thread pool
                try:
                    if not cfg.murmur.servers or server.id() in cfg.murmur.servers:
                        info('Authenticated virtual server %d got stopped', server.id())
                    else:
                        debug('Virtual server %d got stopped', server.id())
                    return
                except Ice.ConnectionRefusedException:
                    self.app.connected = False

            debug('Server shutdown stopped a virtual server')

    if cfg.user.reject_on_error:  # Python 2.4 compat
        authenticateFortifyResult = (-1, None, None)
    else:
        authenticateFortifyResult = (-2, None, None)

    class allianceauthauthenticator(Murmur.ServerUpdatingAuthenticator):
        texture_cache = {}

        def __init__(self):
            Murmur.ServerUpdatingAuthenticator.__init__(self)

        @fortifyIceFu(authenticateFortifyResult)
        @checkSecret
        def authenticate(self, name, pw, certlist, certhash, strong, current=None):
            """
            This function is called to authenticate a user
            """

            # Search for the user in the database
            FALL_THROUGH = -2
            AUTH_REFUSED = -1

            if name == 'SuperUser':
                debug('Forced fall through for SuperUser')
                return (FALL_THROUGH, None, None)

            try:
                sql = 'SELECT id, pwhash, groups FROM %sservices_mumbleuser WHERE username = %%s' % cfg.database.prefix
                cur = threadDB.execute(sql, [name])
            except threadDbException:
                return (FALL_THROUGH, None, None)

            res = cur.fetchone()
            cur.close()
            if not res:
                info('Fall through for unknown user "%s"', name)
                return (FALL_THROUGH, None, None)

            uid, upwhash, ugroups = res

            if ugroups:
                groups = ugroups.split(',')
            else:
                groups = []

            if allianceauth_check_hash(pw, upwhash):
                info('User authenticated: "%s" (%d)', name, uid + cfg.user.id_offset)
                debug('Group memberships: %s', str(groups))
                return (uid + cfg.user.id_offset, entity_decode(name), groups)

            info('Failed authentication attempt for user: "%s" (%d)', name, uid + cfg.user.id_offset)
            return (AUTH_REFUSED, None, None)

        @fortifyIceFu((False, None))
        @checkSecret
        def getInfo(self, id, current=None):
            """
            Gets called to fetch user specific information
            """

            # We do not expose any additional information so always fall through
            debug('getInfo for %d -> denied', id)
            return (False, None)

        @fortifyIceFu(-2)
        @checkSecret
        def nameToId(self, name, current=None):
            """
            Gets called to get the id for a given username
            """

            FALL_THROUGH = -2
            if name == 'SuperUser':
                debug('nameToId SuperUser -> forced fall through')
                return FALL_THROUGH

            try:
                sql = 'SELECT id FROM %sservices_mumbleuser WHERE username = %%s' % cfg.database.prefix
                cur = threadDB.execute(sql, [name])
            except threadDbException:
                return FALL_THROUGH

            res = cur.fetchone()
            cur.close()
            if not res:
                debug('nameToId %s -> ?', name)
                return FALL_THROUGH

            debug('nameToId %s -> %d', name, (res[0] + cfg.user.id_offset))
            return res[0] + cfg.user.id_offset

        @fortifyIceFu("")
        @checkSecret
        def idToName(self, id, current=None):
            """
            Gets called to get the username for a given id
            """

            FALL_THROUGH = ""
            # Make sure the ID is in our range and transform it to the actual smf user id
            if id < cfg.user.id_offset:
                return FALL_THROUGH
            bbid = id - cfg.user.id_offset

            # Fetch the user from the database
            try:
                sql = 'SELECT username FROM %sservices_mumbleuser WHERE id = %%s' % cfg.database.prefix
                cur = threadDB.execute(sql, [bbid])
            except threadDbException:
                return FALL_THROUGH

            res = cur.fetchone()
            cur.close()
            if res:
                if res[0] == 'SuperUser':
                    debug('idToName %d -> "SuperUser" catched')
                    return FALL_THROUGH

                debug('idToName %d -> "%s"', id, res[0])
                return res[0]

            debug('idToName %d -> ?', id)
            return FALL_THROUGH

        @fortifyIceFu("")
        @checkSecret
        def idToTexture(self, id, current=None):
            """
            Gets called to get the corresponding texture for a user
            """

            FALL_THROUGH = ""

            debug('idToTexture "%s" -> fall through', id)
            return FALL_THROUGH

        @fortifyIceFu(-2)
        @checkSecret
        def registerUser(self, name, current=None):
            """
            Gets called when the server is asked to register a user.
            """

            FALL_THROUGH = -2
            debug('registerUser "%s" -> fall through', name)
            return FALL_THROUGH

        @fortifyIceFu(-1)
        @checkSecret
        def unregisterUser(self, id, current=None):
            """
            Gets called when the server is asked to unregister a user.
            """

            FALL_THROUGH = -1
            # Return -1 to fall through to internal server database, we will not modify the smf database
            # but we can make murmur delete all additional information it got this way.
            debug('unregisterUser %d -> fall through', id)
            return FALL_THROUGH

        @fortifyIceFu({})
        @checkSecret
        def getRegisteredUsers(self, filter, current=None):
            """
            Returns a list of usernames in the AllianceAuth database which contain
            filter as a substring.
            """

            if not filter:
                filter = '%'

            try:
                sql = 'SELECT id, username FROM %sservices_mumbleuser WHERE username LIKE %%s' % cfg.database.prefix
                cur = threadDB.execute(sql, [filter])
            except threadDbException:
                return {}

            res = cur.fetchall()
            cur.close()
            if not res:
                debug('getRegisteredUsers -> empty list for filter "%s"', filter)
                return {}
            debug('getRegisteredUsers -> %d results for filter "%s"', len(res), filter)
            return dict([(a + cfg.user.id_offset, b) for a, b in res])

        @fortifyIceFu(-1)
        @checkSecret
        def setInfo(self, id, info, current=None):
            """
            Gets called when the server is supposed to save additional information
            about a user to his database
            """

            FALL_THROUGH = -1
            # Return -1 to fall through to the internal server handler. We must not modify
            # the smf database so the additional information is stored in murmurs database
            debug('setInfo %d -> fall through', id)
            return FALL_THROUGH

        @fortifyIceFu(-1)
        @checkSecret
        def setTexture(self, id, texture, current=None):
            """
            Gets called when the server is asked to update the user texture of a user
            """

            FALL_THROUGH = -1

            debug('setTexture %d -> fall through', id)
            return FALL_THROUGH

    class CustomLogger(Ice.Logger):
        """
        Logger implementation to pipe Ice log messages into
        our own log
        """

        def __init__(self):
            Ice.Logger.__init__(self)
            self._log = getLogger('Ice')

        def _print(self, message):
            self._log.info(message)

        def trace(self, category, message):
            self._log.debug('Trace %s: %s', category, message)

        def warning(self, message):
            self._log.warning(message)

        def error(self, message):
            self._log.error(message)

    #
    # --- Start of authenticator
    #
    info('Starting AllianceAuth mumble authenticator')
    initdata = Ice.InitializationData()
    initdata.properties = Ice.createProperties([], initdata.properties)
    for prop, val in cfg.iceraw:
        initdata.properties.setProperty(prop, val)

    initdata.properties.setProperty('Ice.ImplicitContext', 'Shared')
    initdata.properties.setProperty('Ice.Default.EncodingVersion', '1.0')
    initdata.logger = CustomLogger()

    app = allianceauthauthenticatorApp()
    state = app.main(sys.argv[:1], initData=initdata)
    info('Shutdown complete')

Example 14

Project: mpdlcd
Source File: display_pattern.py
View license
    def parse_line(self, line):
        """Parse a line of text.

        A format contains fields, within curly brackets, and free text.
        Maximum one 'variable width' field is allowed per line.
        You cannot use the '{' or '}' characters in the various text/... fields.

        Format:
            '''{<field_kind>[ <field_option>,<field_option]} text {...}'''

        Example:
            '''{song text="%(artist)s",speed=4} {elapsed}'''
            '''{song text="%(title)s",speed=2} {mode}'''

        Args:
            line (str): the text to parse

        Returns:
            PatternLine: the parsed line pattern
        """
        logger.debug(u'Parsing line %s', line)

        OUT_FIELD = 0
        IN_FIELD_KIND = 1
        IN_FIELD_OPTION_NAME = 2
        IN_FIELD_OPTION_VALUE = 3

        class ParserState(object):
            """Holds the current state of the parser.

            Attributes:
                quote (str): the current quote character, or None
                escaping (bool): whether the next character should be escaped
                block (char list): the content of the current 'block'
                kind (str): the kind of the current field, or ''
                option_name (str): the name of the current option, or ''
                options (dict(str => str)): maps option name to option value for
                    the current field
                state (int): state of the parser,one of OUT_FIELD/IN_FIELD_*
                fields ((str, dict(str => str)) list): list of fields, as
                    (kind, options) tuples.
            """

            def __init__(self, logger=None):
                self.quote = None
                self.escaping = False
                self.block = []
                self.kind = ''
                self.option_name = ''
                self.options = {}
                self.state = OUT_FIELD
                self.fields = []
                if not logger:
                    logger = logging.getLogger('%s.parser' % __name__)
                self.logger = logger

            def _reset(self):
                """Reset buffered state (quote/escape/block)."""
                self.quote = None
                self.escaping = False
                self.block = []

            def _register_field(self, kind, options):
                """Register a completed field."""
                self.fields.append((kind, dict(options)))

            def debug(self, msg, *args, **kwargs):
                """Print a debug message."""
                self.logger.debug(msg, *args, **kwargs)

            def save_fixed_text(self):
                """Register a completed, fixed text, field."""
                assert self.state == OUT_FIELD
                self._register_field(FIXED_TEXT_FIELD,
                    {'text': u''.join(self.block)})

            def enter_field(self):
                """Enter a new field."""
                self.debug(u'Entering new field')
                self.state = IN_FIELD_KIND
                self.kind = ''
                self.options = {}
                self.option_name = ''
                self._reset()

            def leave_kind(self):
                """Leave the field kind."""
                self.state = IN_FIELD_OPTION_NAME
                self.kind = u''.join(self.block)
                self.debug(u"Got widget kind '%s'", self.kind)
                self._reset()

            def leave_option_name(self):
                """Leave an option name."""
                self.state = IN_FIELD_OPTION_VALUE
                self.option_name = u''.join(self.block)
                self.debug(u"Got option name '%s' for '%s'",
                    self.option_name, self.kind)
                self._reset()

            def leave_option_value(self):
                """Leave an option value."""
                self.state = IN_FIELD_OPTION_NAME
                option_value = u''.join(self.block)
                self.options[self.option_name] = option_value
                self.debug(u"Got option '%s=%s' for '%s'",
                    self.option_name, option_value, self.kind)
                self._reset()

            def leave_field(self):
                """Leave a field definition."""
                self.state = OUT_FIELD
                self._register_field(self.kind, self.options)
                self.debug(u"Got widget '%s(%s)'", self.kind,
                    u', '.join(u'%s=%r' % it for it in self.options.iteritems()))
                self._reset()

        st = ParserState()

        for pos, char in enumerate(line):

            # Escaping
            if st.escaping:
                st.escaping = False
                st.block.append(char)

            elif char == '\\':
                st.escaping = True

            # Quoting
            elif char in ['"', "'"]:
                if st.state == IN_FIELD_OPTION_VALUE:
                    if st.quote:  # Already in a quoted block
                        if char == st.quote:
                            st.leave_option_value()
                        else:
                            st.block.append(char)

                    elif not st.block:  # First char of the block
                        st.quote = char
                        continue

                    else:
                        raise FormatError("Unexpected '%s' at %d in %s" %
                            (char, pos, line))

                elif st.state == OUT_FIELD:
                    st.block.append(char)

                else:
                    raise FormatError("Unexpected '%s' at %d in %s" %
                        (char, pos, line))

            # Entering a field
            elif char == '{':
                if st.state == OUT_FIELD:
                    if st.block:
                        st.save_fixed_text()
                    st.enter_field()

                elif st.state == IN_FIELD_OPTION_VALUE and st.quote:
                    st.block.append(char)

                else:
                    raise FormatError("Unexpected '{' at %d in %s" % (pos, line))

            # Leaving a field
            elif char == '}':
                if st.state == IN_FIELD_KIND:
                    st.leave_kind()
                    st.leave_field()

                elif st.state == IN_FIELD_OPTION_NAME:
                    raise FormatError("Missing option value for %s at %d in %s"
                        % (''.join(st.block), pos, line))

                elif st.state == IN_FIELD_OPTION_VALUE:
                    if st.quote:
                        st.block.append(char)
                    else:
                        st.leave_option_value()
                        st.leave_field()

                elif st.state == OUT_FIELD:
                    raise FormatError("Unexpected '}' at %d in %s" % (pos, line))

            # Between kind and option name
            elif char == ' ':
                if st.state == IN_FIELD_KIND:
                    if not st.block:
                        raise FormatError("Missing field kind at %s in %s"
                            % (pos, line))

                    st.leave_kind()

                elif st.state == IN_FIELD_OPTION_VALUE and st.quote:
                    st.block.append(char)

                elif st.state == OUT_FIELD:
                    st.block.append(char)

                else:
                    raise FormatError("Unexpected ' ' at %d in %s" % (pos, line))

            # Between options
            elif char == ',':
                if st.state == IN_FIELD_OPTION_NAME:
                    if st.block:
                        raise FormatError("Missing option value for %s at %d in %s"
                            % (''.join(st.block), pos, line))
                    else:
                        # At the beginning of a new option
                        continue

                elif st.state == IN_FIELD_KIND:
                    raise FormatError(
                        "Unexpected ',' in field definition %s at %d in %s" %
                        (st.kind, pos, line))

                elif st.state == IN_FIELD_OPTION_VALUE:
                    if st.quote:
                        st.block.append(char)
                    elif st.block:
                        st.leave_option_value()
                    else:
                        raise FormatError(
                            "Missing option value for %s at %d in %s"
                            % (st.option_name, pos, line))

                else:  # OUT_FIELD
                    st.block.append(char)

            # Between option name and option value
            elif char == '=':
                if st.state == IN_FIELD_OPTION_NAME:
                    if st.block:
                        st.leave_option_name()

                    else:
                        raise FormatError(
                            "Missing option name at %d in %s" % (pos, line))

                elif st.state == OUT_FIELD:
                    st.block.append(char)

                elif st.state == IN_FIELD_OPTION_VALUE:
                    if st.quote:
                        st.block.append(char)

                    elif not st.block:
                        # At the beginning of an option
                        continue

                    else:
                        raise FormatError(
                            "Unexpected '=' in option value for %s at %d in %s"
                            % (st.option_name, pos, line))

                else:
                    raise FormatError("Unexpected '=' at %d in %s" % (pos, line))

            # Everything else
            else:
                st.block.append(char)

        # All input parsed
        if st.state != OUT_FIELD:
            raise FormatError("Unclosed field at %d in '%s'; block: '%s'"
                % (pos, line, ''.join(st.block)))

        if st.block:
            st.save_fixed_text()

        return st.fields

Example 15

Project: rdfextras
Source File: apigen.py
View license
    def generate_api_doc(self, uri):
        '''Make autodoc documentation template string for a module

        Parameters
        ----------
        uri : string
            python location of module - e.g 'sphinx.builder'

        Returns
        -------
        S : string
            Contents of API doc
        '''
        logger = logging.getLogger("mmf.sphinx.ext.apigen")

        # get the names of all classes and functions
        (modules, functions, classes, interfaces,
         others) = self._parse_module(uri)
        if not (len(modules) + len(functions) + len(classes) +
                len(interfaces)):
            logger.warning("Empty api - '%s'" % uri)

        # Make a shorter version of the uri that omits the package name for
        # titles 
        uri_short = re.sub(r'^%s\.' % self.package_name,'',uri)
        
        ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n'

        if False:
            # Old code
            chap_title = uri_short
            ad += (chap_title+'\n'+ self.rst_section_levels[1] * len(chap_title)
                   + '\n\n')

            # Set the chapter title to read 'module' for all modules
            # except for the main packages
            if '.' in uri:
                title = 'Module: :mod:`' + uri + '`'
            else:
                title = ':mod:`' + uri + '`'
            ad += title + '\n' + self.rst_section_levels[2] * len(title)
        else:
            title = ':mod:`' + uri + '`'
            ad += title + '\n' + self.rst_section_levels[1] * len(title)
        
        ad += '\n\n.. currentmodule:: ' + uri + '\n\n'

        if self.autosummary and len(modules + functions 
                                    + classes.keys() + interfaces.keys()):
            ad += "\n.. autosummary::\n"
            ad += "\n"
            ad += "\n".join(
                ["\n".join("   " + _f for _f in modules)] +
                ["\n".join("   " + _f for _f in interfaces)] +
                ["\n".join("   " + _f for _f in classes)] +
                ["\n".join("   " + _f for _f in functions)])
            ad += "\n"

        has_inheritance = False
        for _c in classes.values() + interfaces.values():
            for _b in _c.__bases__:
                if _b is object:
                    continue
                else:
                    has_inheritance = True
                    break

        if has_inheritance:
            ad += '\nInheritance diagram for ``%s``:\n\n' % uri
            ad += '.. inheritance-diagram:: %s \n' % uri
            ad += '   :parts: 3\n'

        ad += '\n.. automodule:: ' + uri + '\n'

        if self.labels and modules:
            ad += '\n' + 'Modules' + '\n' + \
                  self.rst_section_levels[2] * 7 + '\n'

        for m in modules:
            if self.labels:
                m = "~" + ".".join([uri, m])
            else:
                m = ".%s<%s>" % (m, ".".join([uri, m]))
            if self.headings:
                ad += '\n:mod:`' + m + '`\n' \
                    + self.rst_section_levels[self.labels + 2] * \
                    (len(m) + 7) + '\n\n'

        if self.labels and interfaces:
            ad += '\n' + 'Interfaces' + '\n' + \
                  self.rst_section_levels[2] * 10 + '\n'

        for i in interfaces:
            if self.headings:
                ad += '\n:class:`' + i + '`\n' \
                    + self.rst_section_levels[self.labels + 2] * \
                    (len(i) + 9) + '\n\n'
            ad += '\n.. autointerface:: ' + i + '\n'
            # must NOT exclude from index to keep cross-refs working
            ad += '  :members:\n' \
                  '  :undoc-members:\n' \
                  '  :show-inheritance:\n'

            if self.inherited_members:
                ad += '  :inherited-members:\n'

        if self.labels and classes:
            ad += '\n' + 'Classes' + '\n' + \
                  self.rst_section_levels[2] * 7 + '\n'

        for c in classes:
            if self.headings:
                ad += '\n:class:`' + c + '`\n' \
                    + self.rst_section_levels[self.labels + 2] * \
                    (len(c) + 9) + '\n\n'
            ad += '\n.. autoclass:: ' + c + '\n'
            # must NOT exclude from index to keep cross-refs working
            ad += '  :members:\n' \
                  '  :undoc-members:\n' \
                  '  :show-inheritance:\n'
            if self.inherited_members:
                ad += '  :inherited-members:\n'

            if isinstance(classes[c], type):
                # New style class
                ad += '\n' \
                      '  .. automethod:: __init__\n'

        if self.labels and functions:
            ad += '\n' + 'Functions' + '\n' + \
                  self.rst_section_levels[2] * 9 + '\n\n'

        for f in functions:
            if self.headings:
                ad += '\n:func:`' + f + '`\n' \
                    + self.rst_section_levels[self.labels + 2] * \
                    (len(f) + 8) + '\n\n'
            # must NOT exclude from index to keep cross-refs working
            ad += '\n.. autofunction:: ' + uri + '.' + f + '\n\n'
        return ad

Example 16

Project: bleachbit
Source File: Cleaner.py
View license
    def get_commands(self, option_id):
        # This variable will collect fully expanded file names, and
        # at the end of this function, they will be checked they exist
        # and processed through Command.Delete().
        files = []

        # cache
        if 'posix' == os.name and 'cache' == option_id:
            dirname = os.path.expanduser("~/.cache/")
            for filename in children_in_directory(dirname, True):
                if self.whitelisted(filename):
                    continue
                files += [filename]

        # custom
        if 'custom' == option_id:
            for (c_type, c_path) in options.get_custom_paths():
                if 'file' == c_type:
                    files += [c_path]
                elif 'folder' == c_type:
                    files += [c_path]
                    for path in children_in_directory(c_path, True):
                        files += [path]
                else:
                    raise RuntimeError(
                        'custom folder has invalid type %s' % c_type)

        # menu
        menu_dirs = ['~/.local/share/applications',
                     '~/.config/autostart',
                     '~/.gnome/apps/',
                     '~/.gnome2/panel2.d/default/launchers',
                     '~/.gnome2/vfolders/applications/',
                     '~/.kde/share/apps/RecentDocuments/',
                     '~/.kde/share/mimelnk',
                     '~/.kde/share/mimelnk/application/ram.desktop',
                     '~/.kde2/share/mimelnk/application/',
                     '~/.kde2/share/applnk']

        if 'posix' == os.name and 'desktop_entry' == option_id:
            for dirname in menu_dirs:
                for filename in [fn for fn in children_in_directory(dirname, False)
                                 if fn.endswith('.desktop')]:
                    if Unix.is_broken_xdg_desktop(filename):
                        yield Command.Delete(filename)

        # unwanted locales
        if 'posix' == os.name and 'localizations' == option_id:
            for path in Unix.locales.localization_paths(locales_to_keep=options.get_languages()):
                if os.path.isdir(path):
                    for f in FileUtilities.children_in_directory(path, True):
                        yield Command.Delete(f)
                yield Command.Delete(path)

        # Windows logs
        if 'nt' == os.name and 'logs' == option_id:
            paths = (
                '$ALLUSERSPROFILE\\Application Data\\Microsoft\\Dr Watson\\*.log',
                '$ALLUSERSPROFILE\\Application Data\\Microsoft\\Dr Watson\\user.dmp',
                '$LocalAppData\\Microsoft\\Windows\\WER\\ReportArchive\\*\\*',
                '$LocalAppData\\Microsoft\\Windows\WER\\ReportQueue\\*\\*',
                '$programdata\\Microsoft\\Windows\\WER\\ReportArchive\\*\\*',
                '$programdata\\Microsoft\\Windows\\WER\\ReportQueue\\*\\*',
                '$localappdata\\Microsoft\\Internet Explorer\\brndlog.bak',
                '$localappdata\\Microsoft\\Internet Explorer\\brndlog.txt',
                '$windir\\*.log',
                '$windir\\imsins.BAK',
                '$windir\\OEWABLog.txt',
                '$windir\\SchedLgU.txt',
                '$windir\\ntbtlog.txt',
                '$windir\\setuplog.txt',
                '$windir\\REGLOCS.OLD',
                '$windir\\Debug\\*.log',
                '$windir\\Debug\\Setup\\UpdSh.log',
                '$windir\\Debug\\UserMode\\*.log',
                '$windir\\Debug\\UserMode\\ChkAcc.bak',
                '$windir\\Debug\\UserMode\\userenv.bak',
                '$windir\\Microsoft.NET\Framework\*\*.log',
                '$windir\\pchealth\\helpctr\\Logs\\hcupdate.log',
                '$windir\\security\\logs\\*.log',
                '$windir\\security\\logs\\*.old',
                '$windir\\SoftwareDistribution\\*.log',
                '$windir\\SoftwareDistribution\\DataStore\\Logs\\*',
                '$windir\\system32\\TZLog.log',
                '$windir\\system32\\config\\systemprofile\\Application Data\\Microsoft\\Internet Explorer\\brndlog.bak',
                '$windir\\system32\\config\\systemprofile\\Application Data\\Microsoft\\Internet Explorer\\brndlog.txt',
                '$windir\\system32\\LogFiles\\AIT\\AitEventLog.etl.???',
                '$windir\\system32\\LogFiles\\Firewall\\pfirewall.log*',
                '$windir\\system32\\LogFiles\\Scm\\SCM.EVM*',
                '$windir\\system32\\LogFiles\\WMI\\Terminal*.etl',
                '$windir\\system32\\LogFiles\\WMI\\RTBackup\EtwRT.*etl',
                '$windir\\system32\\wbem\\Logs\\*.lo_',
                '$windir\\system32\\wbem\\Logs\\*.log', )

            for path in paths:
                expanded = expandvars(path)
                for globbed in glob.iglob(expanded):
                    files += [globbed]

        # memory
        if sys.platform.startswith('linux') and 'memory' == option_id:
            yield Command.Function(None, Memory.wipe_memory, _('Memory'))

        # memory dump
        # how to manually create this file
        # http://www.pctools.com/guides/registry/detail/856/
        if 'nt' == os.name and 'memory_dump' == option_id:
            fname = expandvars('$windir\\memory.dmp')
            if os.path.exists(fname):
                files += [fname]
            for fname in glob.iglob(expandvars('$windir\\Minidump\\*.dmp')):
                files += [fname]

        # most recently used documents list
        if 'posix' == os.name and 'recent_documents' == option_id:
            files += [os.path.expanduser("~/.recently-used")]
            # GNOME 2.26 (as seen on Ubuntu 9.04) will retain the list
            # in memory if it is simply deleted, so it must be shredded
            # (or at least truncated).
            #
            # GNOME 2.28.1 (Ubuntu 9.10) and 2.30 (10.04) do not re-read
            # the file after truncation, but do re-read it after
            # shredding.
            #
            # https://bugzilla.gnome.org/show_bug.cgi?id=591404
            for pathname in ["~/.recently-used.xbel", "~/.local/share/recently-used.xbel"]:
                pathname = os.path.expanduser(pathname)
                if os.path.lexists(pathname):
                    yield Command.Shred(pathname)
                    if HAVE_GTK:
                        gtk.RecentManager().purge_items()

        if 'posix' == os.name and 'rotated_logs' == option_id:
            for path in Unix.rotated_logs():
                yield Command.Delete(path)

        # temporary files
        if 'posix' == os.name and 'tmp' == option_id:
            dirnames = ['/tmp', '/var/tmp']
            for dirname in dirnames:
                for path in children_in_directory(dirname, True):
                    is_open = FileUtilities.openfiles.is_open(path)
                    ok = not is_open and os.path.isfile(path) and \
                        not os.path.islink(path) and \
                        FileUtilities.ego_owner(path) and \
                        not self.whitelisted(path)
                    if ok:
                        yield Command.Delete(path)

        # temporary files
        if 'nt' == os.name and 'tmp' == option_id:
            dirname = expandvars(
                "$USERPROFILE\\Local Settings\\Temp\\")
            # whitelist the folder %TEMP%\Low but not its contents
            # https://bugs.launchpad.net/bleachbit/+bug/1421726
            low = os.path.join(dirname, 'low').lower()
            for filename in children_in_directory(dirname, True):
                if not low == filename.lower():
                    yield Command.Delete(filename)
            dirname = expandvars("$windir\\temp\\")
            for filename in children_in_directory(dirname, True):
                yield Command.Delete(filename)

        # trash
        if 'posix' == os.name and 'trash' == option_id:
            dirname = os.path.expanduser("~/.Trash")
            for filename in children_in_directory(dirname, False):
                yield Command.Delete(filename)
            # fixme http://www.ramendik.ru/docs/trashspec.html
            # http://standards.freedesktop.org/basedir-spec/basedir-spec-0.6.html
            # ~/.local/share/Trash
            # * GNOME 2.22, Fedora 9
            # * KDE 4.1.3, Ubuntu 8.10
            dirname = os.path.expanduser("~/.local/share/Trash/files")
            for filename in children_in_directory(dirname, True):
                yield Command.Delete(filename)
            dirname = os.path.expanduser("~/.local/share/Trash/info")
            for filename in children_in_directory(dirname, True):
                yield Command.Delete(filename)
            dirname = os.path.expanduser("~/.local/share/Trash/expunged")
            # [email protected] tells me that the trash
            # backend puts files in here temporary, but in some situations
            # the files are stuck.
            for filename in children_in_directory(dirname, True):
                yield Command.Delete(filename)

        # clipboard
        if HAVE_GTK and 'clipboard' == option_id:
            def clear_clipboard():
                gtk.gdk.threads_enter()
                clipboard = gtk.clipboard_get()
                clipboard.set_text("")
                gtk.gdk.threads_leave()
                return 0
            yield Command.Function(None, clear_clipboard, _('Clipboard'))

        # overwrite free space
        shred_drives = options.get_list('shred_drives')
        if 'free_disk_space' == option_id and shred_drives:
            for pathname in shred_drives:
                # TRANSLATORS: 'Free' means 'unallocated.'
                # %s expands to a path such as C:\ or /tmp/
                display = _("Overwrite free disk space %s") % pathname

                def wipe_path_func():
                    for ret in FileUtilities.wipe_path(pathname, idle=True):
                        # Yield control to GTK idle because this process
                        # is very slow.  Also display progress.
                        yield ret
                    yield 0
                yield Command.Function(None, wipe_path_func, display)

        # MUICache
        if 'nt' == os.name and 'muicache' == option_id:
            keys = (
                'HKCU\\Software\\Microsoft\\Windows\\ShellNoRoam\\MUICache',
                'HKCU\\Software\\Classes\\Local Settings\\Software\\Microsoft\\Windows\\Shell\\MuiCache')
            for key in keys:
                yield Command.Winreg(key, None)

        # prefetch
        if 'nt' == os.name and 'prefetch' == option_id:
            for path in glob.iglob(expandvars('$windir\\Prefetch\\*.pf')):
                yield Command.Delete(path)

        # recycle bin
        if 'nt' == os.name and 'recycle_bin' == option_id:
            # This method allows shredding
            for path in Windows.get_recycle_bin():
                yield Command.Delete(path)
            # If there were any files deleted, Windows XP will show the
            # wrong icon for the recycle bin indicating it is not empty.
            # The icon will be incorrect until logging in to Windows again
            # or until it is emptied using the Windows API call for emptying
            # the recycle bin.

            # Windows 10 refreshes the recycle bin icon when the user
            # opens the recycle bin folder.

            # This is a hack to refresh the icon.
            import tempfile
            tmpdir = tempfile.mkdtemp()
            Windows.move_to_recycle_bin(tmpdir)
            try:
                Windows.empty_recycle_bin(None, True)
            except:
                logger = logging.getLogger(__name__)
                logger.info('error in empty_recycle_bin()', exc_info=True)

        # Windows Updates
        if 'nt' == os.name and 'updates' == option_id:
            for wu in Windows.delete_updates():
                yield wu

        # return queued files
        for filename in files:
            if os.path.lexists(filename):
                yield Command.Delete(filename)

Example 17

Project: bleachbit
Source File: FileUtilities.py
View license
def wipe_path(pathname, idle=False):
    """Wipe the free space in the path
    This function uses an iterator to update the GUI."""

    logger = logging.getLogger(__name__)

    def temporaryfile():
        # reference
        # http://en.wikipedia.org/wiki/Comparison_of_file_systems#Limits
        maxlen = 245
        f = None
        while True:
            try:
                kwargs = {
                    'dir': pathname, 'suffix': __random_string(maxlen)}
                if sys.hexversion >= 0x02060000:
                    kwargs['delete'] = False
                f = tempfile.NamedTemporaryFile(**kwargs)
                # In case the application closes prematurely, make sure this
                # file is deleted
                atexit.register(
                    delete, f.name, allow_shred=False, ignore_missing=True)
                break
            except OSError, e:
                if e.errno in (errno.ENAMETOOLONG, errno.ENOSPC, errno.ENOENT):
                    # ext3 on Linux 3.5 returns ENOSPC if the full path is greater than 264.
                    # Shrinking the size helps.

                    # Microsoft Windows returns ENOENT "No such file or directory"
                    # when the path is too long such as %TEMP% but not in C:\
                    if maxlen > 5:
                        maxlen -= 5
                        continue
                raise
        return f

    def estimate_completion():
        """Return (percent, seconds) to complete"""
        remaining_bytes = free_space(pathname)
        done_bytes = start_free_bytes - remaining_bytes
        if done_bytes < 0:
            # maybe user deleted large file after starting wipe
            done_bytes = 0
        if 0 == start_free_bytes:
            done_percent = 0
        else:
            done_percent = 1.0 * done_bytes / (start_free_bytes + 1)
        done_time = time.time() - start_time
        rate = done_bytes / (done_time + 0.0001)  # bytes per second
        remaining_seconds = int(remaining_bytes / (rate + 0.0001))
        return (1, done_percent, remaining_seconds)

    logger.debug("wipe_path('%s')", pathname)
    files = []
    total_bytes = 0
    start_free_bytes = free_space(pathname)
    start_time = time.time()
    # Because FAT32 has a maximum file size of 4,294,967,295 bytes,
    # this loop is sometimes necessary to create multiple files.
    while True:
        try:
            logger.debug('creating new, temporary file to wipe path')
            f = temporaryfile()
        except OSError, e:
            # Linux gives errno 24
            # Windows gives errno 28 No space left on device
            if e.errno in (errno.EMFILE, errno.ENOSPC):
                break
            else:
                raise
        last_idle = time.time()
        # Write large blocks to quickly fill the disk.
        blanks = chr(0) * 65535
        while True:
            try:
                f.write(blanks)
            except IOError, e:
                if e.errno == errno.ENOSPC:
                    if len(blanks) > 1:
                        # Try writing smaller blocks
                        blanks = blanks[0: (len(blanks) / 2)]
                    else:
                        break
                elif e.errno != errno.EFBIG:
                    raise
            if idle and (time.time() - last_idle) > 2:
                # Keep the GUI responding, and allow the user to abort.
                # Also display the ETA.
                yield estimate_completion()
                last_idle = time.time()
        # Write to OS buffer
        try:
            f.flush()
        except:
            # IOError: [Errno 28] No space left on device
            # seen on Microsoft Windows XP SP3 with ~30GB free space but
            # not on another XP SP3 with 64MB free space
            logger.info("info: exception on f.flush()")
        os.fsync(f.fileno())  # write to disk
        # Remember to delete
        files.append(f)
        # For statistics
        total_bytes += f.tell()
        # If no bytes were written, then quit
        if f.tell() < 1:
            break
    # sync to disk
    sync()
    # statistics
    elapsed_sec = time.time() - start_time
    rate_mbs = (total_bytes / (1000 * 1000)) / elapsed_sec
    logger.info('wrote %d files and %d bytes in %d seconds at %.2f MB/s',
                len(files), total_bytes, elapsed_sec, rate_mbs)
    # how much free space is left (should be near zero)
    if 'posix' == os.name:
        stats = os.statvfs(pathname)
        logger.info('%d bytes and %d inodes available to non-super-user',
                    stats.f_bsize * stats.f_bavail, stats.f_favail)
        logger.info('%d bytes and %d inodes available to super-user',
                    stats.f_bsize * stats.f_bfree, stats.f_ffree)
    # truncate and close files
    for f in files:
        f.truncate(0)

        while True:
            try:
                # Nikita: I noticed a bug that prevented file handles from
                # being closed on FAT32. It sometimes takes two .close() calls
                # to do actually close (and therefore delete) a temporary file
                f.close()
                break
            except IOError, e:
                if e.errno == 0:
                    logger.debug('handled unknown error 0')
                    time.sleep(0.1)
        # explicitly delete
        # Python 2.5.4 always deletes NamedTemporaryFile, so
        # ignore missing in older Python here.
        delete(f.name, ignore_missing=sys.hexversion < 0x02060000)

Example 18

Project: pyx12
Source File: node_iterator.py
View license
def x12n_iterator(param, src_file, map_path=None):
    logger = logging.getLogger('pyx12')
    errh = pyx12.error_handler.errh_null()

    # Get X12 DATA file
    try:
        src = pyx12.x12file.X12Reader(src_file)
    except pyx12.errors.X12Error:
        logger.error('"%s" does not look like an X12 data file' % (src_file))
        return False

    #Get Map of Control Segments
    map_file = 'x12.control.00501.xml' if src.icvn == '00501' else 'x12.control.00401.xml'
    logger.debug('X12 control file: %s' % (map_file))
    control_map = pyx12.map_if.load_map_file(map_file, param, map_path)
    map_index_if = pyx12.map_index.map_index(map_path)
    node = control_map.getnodebypath('/ISA_LOOP/ISA')
    walker = walk_tree()
    icvn = fic = vriic = tspc = None
    cur_map = None  # we do not initially know the X12 transaction type

    res = {}
    res_ordinal = 0
    last_x12_segment_path = None
    for seg in src:
        #find node
        orig_node = node

        if False:
            print('--------------------------------------------')
            print(seg)
            print('--------------------------------------------')
            # reset to control map for ISA and GS loops
            print('------- counters before --------')
            print(walker.counter._dict)
        if seg.get_seg_id() == 'ISA':
            node = control_map.getnodebypath('/ISA_LOOP/ISA')
            walker.forceWalkCounterToLoopStart('/ISA_LOOP', '/ISA_LOOP/ISA')
        elif seg.get_seg_id() == 'GS':
            node = control_map.getnodebypath('/ISA_LOOP/GS_LOOP/GS')
            walker.forceWalkCounterToLoopStart('/ISA_LOOP/GS_LOOP', '/ISA_LOOP/GS_LOOP/GS')
        else:
            # from the current node, find the map node matching the segment
            # keep track of the loops traversed
            try:
                (node, pop_loops, push_loops) = walker.walk(node, seg, errh, src.get_seg_count(), src.get_cur_line(), src.get_ls_id())
            except pyx12.errors.EngineError:
                logger.error('Source file line %i' % (src.get_cur_line()))
                raise

        if False:
            print('------- counters after --------')
            print(walker.counter._dict)
        if node is None:
            node = orig_node
        else:
            if seg.get_seg_id() == 'ISA':
                icvn = seg.get_value('ISA12')
            elif seg.get_seg_id() == 'IEA':
                pass
            elif seg.get_seg_id() == 'GS':
                fic = seg.get_value('GS01')
                vriic = seg.get_value('GS08')
                map_file_new = map_index_if.get_filename(icvn, vriic, fic)
                if map_file != map_file_new:
                    map_file = map_file_new
                    if map_file is None:
                        err_str = "Map not found.  icvn={}, fic={}, vriic={}".format(icvn, fic, vriic)
                        raise pyx12.errors.EngineError(err_str)
                    cur_map = pyx12.map_if.load_map_file(map_file, param, map_path)
                    src.check_837_lx = True if cur_map.id == '837' else False
                    logger.debug('Map file: %s' % (map_file))
                node = cur_map.getnodebypath('/ISA_LOOP/GS_LOOP/GS')
                pass
            elif seg.get_seg_id() == 'BHT':
                # special case for 4010 837P
                if vriic in ('004010X094', '004010X094A1'):
                    tspc = seg.get_value('BHT02')
                    logger.debug('icvn=%s, fic=%s, vriic=%s, tspc=%s' %
                                 (icvn, fic, vriic, tspc))
                    map_file_new = map_index_if.get_filename(icvn, vriic, fic, tspc)
                    logger.debug('New map file: %s' % (map_file_new))
                    if map_file != map_file_new:
                        map_file = map_file_new
                        if map_file is None:
                            err_str = "Map not found.  icvn={}, fic={}, vriic={}, tspc={}".format(
                                        icvn, fic, vriic, tspc)
                            raise pyx12.errors.EngineError(err_str)
                        cur_map = pyx12.map_if.load_map_file(map_file, param, map_path)
                        src.check_837_lx = True if cur_map.id == '837' else False
                        logger.debug('Map file: %s' % (map_file))
                        #apply_loop_count(node, cur_map)
                        node = cur_map.getnodebypath('/ISA_LOOP/GS_LOOP/ST_LOOP/HEADER/BHT')
            #elif seg.get_seg_id() == 'GE':
            #    pass
            #elif seg.get_seg_id() == 'ST':
            #    pass
            #elif seg.get_seg_id() == 'SE':
            #    pass
            else:
                pass

        x12path = node.get_path()
        #parent
        if x12path in res:
            res[x12path]['Count'] += 1
            if last_x12_segment_path not in res[x12path]['prefix_nodes']:
                res[x12path]['prefix_nodes'].append(last_x12_segment_path)
        else:
            res[x12path] = {
                'Ordinal': res_ordinal,
                'Count': 1,
                'NodeType': node.base_name,
                'Id': node.id,
                'Name': node.name,
                'FormattedName': clean_name(node.name),
                'ParentName': clean_name(node.parent.name),
                'LoopMaxUse': node.max_use,
                'ParentPath': node.parent.get_path(),
                'prefix_nodes': [last_x12_segment_path]
            }
            res_ordinal += 1
            
        for (refdes, ele_ord, comp_ord, val) in seg.values_iterator():
            elepath = node.parent.get_path() + '/' + refdes
            if elepath in res:
                res[elepath]['Count'] += 1
            else:
                ele_node = node.getnodebypath2(refdes)
                #node.get_child_node_by_ordinal(
                res[elepath] = {
                    'Ordinal': res_ordinal,
                    'Count': 1,
                    'NodeType': ele_node.base_name,
                    'Id': ele_node.id,
                    'Name': ele_node.name,
                    'FormattedName': clean_name(ele_node.name),
                    'ParentName': clean_name(ele_node.parent.name),
                    #'max_use': ele_node.max_use,
                    'ParentPath': ele_node.parent.get_path(),
                    'Usage': ele_node.usage,
                    'DataType': ele_node.data_type,
                    'MinLength': ele_node.min_len,
                    'MaxLength': ele_node.max_len,
                }
                res_ordinal += 1

            #print (refdes, val)
        last_x12_segment_path = x12path

    del node
    del src
    del control_map
    try:
        del cur_map
    except UnboundLocalError:
        pass
    return res

Example 19

Project: pyx12
Source File: x12metadata.py
View license
def get_x12file_metadata(param, src_file, map_path=None):
    logger = logging.getLogger('pyx12')
    errh = pyx12.error_handler.errh_null()

    # Get X12 DATA file
    try:
        src = pyx12.x12file.X12Reader(src_file)
    except pyx12.errors.X12Error:
        logger.error('"%s" does not look like an X12 data file' % (src_file))
        return (False, None, None)

    #Get Map of Control Segments
    map_file = 'x12.control.00501.xml' if src.icvn == '00501' else 'x12.control.00401.xml'
    logger.debug('X12 control file: %s' % (map_file))
    control_map = pyx12.map_if.load_map_file(map_file, param, map_path)
    map_index_if = pyx12.map_index.map_index(map_path)
    node = control_map.getnodebypath('/ISA_LOOP/ISA')
    walker = walk_tree()
    icvn = fic = vriic = tspc = None
    cur_map = None  # we do not initially know the X12 transaction type

    isa_data = {}
    node_summary = {}
    node_ordinal = 0
    last_x12_segment_path = None
    for seg in src:
        orig_node = node
        if seg.get_seg_id() == 'ISA':
            node = control_map.getnodebypath('/ISA_LOOP/ISA')
            walker.forceWalkCounterToLoopStart('/ISA_LOOP', '/ISA_LOOP/ISA')
        elif seg.get_seg_id() == 'GS':
            node = control_map.getnodebypath('/ISA_LOOP/GS_LOOP/GS')
            walker.forceWalkCounterToLoopStart('/ISA_LOOP/GS_LOOP', '/ISA_LOOP/GS_LOOP/GS')
        else:
            # from the current node, find the map node matching the segment
            # keep track of the loops traversed
            try:
                (node, pop_loops, push_loops) = walker.walk(node, seg, errh, src.get_seg_count(), src.get_cur_line(), src.get_ls_id())
            except pyx12.errors.EngineError:
                logger.error('Source file line %i' % (src.get_cur_line()))
                raise
        if node is None:
            raise pyx12.errors.EngineError("Node not found")
            node = orig_node
        
        if seg.get_seg_id() == 'ISA':
            icvn = seg.get_value('ISA12')
        elif seg.get_seg_id() == 'IEA':
            pass
        elif seg.get_seg_id() == 'GS':
            fic = seg.get_value('GS01')
            vriic = seg.get_value('GS08')
            map_file_new = map_index_if.get_filename(icvn, vriic, fic)
            if map_file != map_file_new:
                map_file = map_file_new
                if map_file is None:
                    err_str = "Map not found.  icvn={}, fic={}, vriic={}".format(icvn, fic, vriic)
                    raise pyx12.errors.EngineError(err_str)
                cur_map = pyx12.map_if.load_map_file(map_file, param, map_path)
                src.check_837_lx = True if cur_map.id == '837' else False
                logger.debug('Map file: %s' % (map_file))
            node = cur_map.getnodebypath('/ISA_LOOP/GS_LOOP/GS')
            pass
        elif seg.get_seg_id() == 'BHT':
            # special case for 4010 837P
            if vriic in ('004010X094', '004010X094A1'):
                tspc = seg.get_value('BHT02')
                logger.debug('icvn=%s, fic=%s, vriic=%s, tspc=%s' %
                                (icvn, fic, vriic, tspc))
                map_file_new = map_index_if.get_filename(icvn, vriic, fic, tspc)
                logger.debug('New map file: %s' % (map_file_new))
                if map_file != map_file_new:
                    map_file = map_file_new
                    if map_file is None:
                        err_str = "Map not found.  icvn={}, fic={}, vriic={}, tspc={}".format(
                                    icvn, fic, vriic, tspc)
                        raise pyx12.errors.EngineError(err_str)
                    cur_map = pyx12.map_if.load_map_file(map_file, param, map_path)
                    src.check_837_lx = True if cur_map.id == '837' else False
                    logger.debug('Map file: %s' % (map_file))
                    node = cur_map.getnodebypath('/ISA_LOOP/GS_LOOP/ST_LOOP/HEADER/BHT')

        if seg.get_seg_id() == 'ISA':
            isa_data = {
                'InterchangeSenderIDQualifier': seg.get_value('ISA05'),
                'InterchangeSenderID': seg.get_value('ISA06'),
                'InterchangeReceiverIDQualifier': seg.get_value('ISA07'),
                'InterchangeReceiverID': seg.get_value('ISA08'),
                'InterchangeDate': seg.get_value('ISA09'),
                'InterchangeTime': seg.get_value('ISA10'),
                'InterchangeControlStandardsIdentifier': seg.get_value('ISA11'),
                'InterchangeControlVersionNumber': seg.get_value('ISA12'),
                'InterchangeControlNumber': seg.get_value('ISA13'),
                'AcknowledgmentRequested': seg.get_value('ISA14'),
                'UsageIndicator': seg.get_value('ISA15'),
                'GSLoops': []
                }
            icvn = isa_data['InterchangeControlVersionNumber']
        elif seg.get_seg_id() == 'IEA':
            isa_data['NumberofIncludedFunctionalGroups'] = seg.get_value('IEA01')
        elif seg.get_seg_id() == 'GS':
            gs_data = {
                'FunctionalGroupHeader': seg.get_value('GS01'),
                'ApplicationSendersCode': seg.get_value('GS02'),
                'ApplicationReceiversCode': seg.get_value('GS03'),
                'FunctionalGroupDate': seg.get_value('GS04'),
                'FunctionalGroupTime': seg.get_value('GS05'),
                'GroupControlNumber': seg.get_value('GS06'),
                'ResponsibleAgencyCode': seg.get_value('GS07'),
                'VersionReleaseIndustryIdentifierCode': seg.get_value('GS08'),
                'STLoops': []
                }
        elif seg.get_seg_id() == 'GE':
            gs_data['NumberofTransactionSetsIncluded'] = seg.get_value('GE01')
            isa_data['GSLoops'].append(gs_data)
        elif seg.get_seg_id() == 'ST':
            st_data = {
                'TransactionSetIdentifierCode': seg.get_value('ST01'),
                'TransactionSetControlNumber': seg.get_value('ST02'),
                'ImplementationConventionReference': seg.get_value('ST03'),
                }
        elif seg.get_seg_id() == 'SE':
            st_data['TransactionSegmentCount'] = seg.get_value('SE01')
            gs_data['STLoops'].append(st_data)
        elif seg.get_seg_id() == 'BHT':
            st_data['HierarchicalStructureCode'] = seg.get_value('BHT01')
            st_data['TransactionSetPurposeCode'] = seg.get_value('BHT02')
            st_data['OriginatorApplicationTransactionIdentifier'] = seg.get_value('BHT03')
            st_data['TransactionSetCreationDate'] = seg.get_value('BHT04')
            st_data['TransactionSetCreationTime'] = seg.get_value('BHT05')
            st_data['ClaimorEncounterIdentifier'] = seg.get_value('BHT06')

        x12path = node.get_path()
        #parent
        if x12path in node_summary:
            node_summary[x12path]['Count'] += 1
            if last_x12_segment_path not in node_summary[x12path]['prefix_nodes']:
                node_summary[x12path]['prefix_nodes'].append(last_x12_segment_path)
        else:
            node_summary[x12path] = {
                'Ordinal': node_ordinal,
                'Count': 1,
                'NodeType': node.base_name,
                'Id': node.id,
                'Name': node.name,
                'ParentName': node.parent.name,
                'LoopMaxUse': node.max_use,
                'ParentPath': node.parent.get_path(),
                'prefix_nodes': [last_x12_segment_path]
            }
            node_ordinal += 1
            
        for (refdes, ele_ord, comp_ord, val) in seg.values_iterator():
            ele_node = node.getnodebypath2(refdes)
            if ele_node.is_composite():
                ele_node = ele_node.get_child_node_by_ordinal(1)
            elepath = ele_node.get_path()

            if elepath in node_summary:
                node_summary[elepath]['Count'] += 1
            else:
                node_summary[elepath] = {
                    'Ordinal': node_ordinal,
                    'Count': 1,
                    'NodeType': ele_node.base_name,
                    'Id': ele_node.id,
                    'Name': ele_node.name,
                    'ParentName': ele_node.parent.name,
                    'ParentPath': ele_node.parent.get_path(),
                    'Usage': ele_node.usage,
                    'DataType': ele_node.data_type,
                    'MinLength': ele_node.min_len,
                    'MaxLength': ele_node.max_len,
                }
                node_ordinal += 1
        last_x12_segment_path = x12path
    return (True, isa_data, node_summary)

Example 20

Project: pyx12
Source File: x12n_document.py
View license
def x12n_document(param, src_file, fd_997, fd_html,
                  fd_xmldoc=None, xslt_files=None, map_path=None):
    """
    Primary X12 validation function
    @param param: pyx12.param instance
    @param src_file: Source document
    @type src_file: string
    @param fd_997: 997/999 output document
    @type fd_997: file descriptor
    @param fd_html: HTML output document
    @type fd_html: file descriptor
    @param fd_xmldoc: XML output document
    @type fd_xmldoc: file descriptor
    @rtype: boolean
    """
    logger = logging.getLogger('pyx12')
    errh = pyx12.error_handler.err_handler()

    # Get X12 DATA file
    try:
        src = pyx12.x12file.X12Reader(src_file)
    except pyx12.errors.X12Error:
        logger.error('"%s" does not look like an X12 data file' % (src_file))
        return False

    #Get Map of Control Segments
    map_file = 'x12.control.00501.xml' if src.icvn == '00501' else 'x12.control.00401.xml'
    logger.debug('X12 control file: %s' % (map_file))
    control_map = pyx12.map_if.load_map_file(map_file, param, map_path)
    map_index_if = pyx12.map_index.map_index(map_path)
    node = control_map.getnodebypath('/ISA_LOOP/ISA')
    walker = walk_tree()
    icvn = fic = vriic = tspc = None
    cur_map = None  # we do not initially know the X12 transaction type
    #XXX Generate TA1 if needed.

    if fd_html:
        html = pyx12.error_html.error_html(errh, fd_html, src.get_term())
        html.header()
        err_iter = pyx12.error_handler.err_iter(errh)
    if fd_xmldoc:
        xmldoc = pyx12.x12xml_simple.x12xml_simple(fd_xmldoc, param.get('simple_dtd'))

    #basedir = os.path.dirname(src_file)
    #erx = errh_xml.err_handler(basedir=basedir)

    valid = True
    for seg in src:
        #find node
        orig_node = node

        if False:
            print('--------------------------------------------')
            print(seg)
            print('--------------------------------------------')
            # reset to control map for ISA and GS loops
            print('------- counters before --------')
            print(walker.counter._dict)
        if seg.get_seg_id() == 'ISA':
            node = control_map.getnodebypath('/ISA_LOOP/ISA')
            walker.forceWalkCounterToLoopStart('/ISA_LOOP', '/ISA_LOOP/ISA')
        elif seg.get_seg_id() == 'GS':
            node = control_map.getnodebypath('/ISA_LOOP/GS_LOOP/GS')
            walker.forceWalkCounterToLoopStart('/ISA_LOOP/GS_LOOP', '/ISA_LOOP/GS_LOOP/GS')
        else:
            # from the current node, find the map node matching the segment
            # keep track of the loops traversed
            try:
                (node, pop_loops, push_loops) = walker.walk(node, seg, errh,
                    src.get_seg_count(), src.get_cur_line(), src.get_ls_id())
            except pyx12.errors.EngineError:
                logger.error('Source file line %i' % (src.get_cur_line()))
                raise

        if False:
            print('------- counters after --------')
            print(walker.counter._dict)
        if node is None:
            node = orig_node
        else:
            if seg.get_seg_id() == 'ISA':
                errh.add_isa_loop(seg, src)
                icvn = seg.get_value('ISA12')
                errh.handle_errors(src.pop_errors())
            elif seg.get_seg_id() == 'IEA':
                errh.handle_errors(src.pop_errors())
                errh.close_isa_loop(node, seg, src)
                # Generate 997
                #XXX Generate TA1 if needed.
            elif seg.get_seg_id() == 'GS':
                fic = seg.get_value('GS01')
                vriic = seg.get_value('GS08')
                map_file_new = map_index_if.get_filename(icvn, vriic, fic)
                if map_file != map_file_new:
                    map_file = map_file_new
                    if map_file is None:
                        err_str = "Map not found.  icvn={}, fic={}, vriic={}".format(icvn, fic, vriic)
                        raise pyx12.errors.EngineError(err_str)
                    cur_map = pyx12.map_if.load_map_file(map_file, param, map_path)
                    src.check_837_lx = True if cur_map.id == '837' else False
                    logger.debug('Map file: %s' % (map_file))
                    #apply_loop_count(orig_node, cur_map)
                    #reset_isa_counts(cur_map)
                    #_reset_counter_to_isa_counts(walker)  # new counter
                #reset_gs_counts(cur_map)
                #_reset_counter_to_gs_counts(walker)  # new counter
                node = cur_map.getnodebypath('/ISA_LOOP/GS_LOOP/GS')
                errh.add_gs_loop(seg, src)
                errh.handle_errors(src.pop_errors())
            elif seg.get_seg_id() == 'BHT':
                # special case for 4010 837P
                if vriic in ('004010X094', '004010X094A1'):
                    tspc = seg.get_value('BHT02')
                    logger.debug('icvn=%s, fic=%s, vriic=%s, tspc=%s' %
                                 (icvn, fic, vriic, tspc))
                    map_file_new = map_index_if.get_filename(icvn, vriic, fic, tspc)
                    logger.debug('New map file: %s' % (map_file_new))
                    if map_file != map_file_new:
                        map_file = map_file_new
                        if map_file is None:
                            err_str = "Map not found.  icvn={}, fic={}, vriic={}, tspc={}".format(
                                        icvn, fic, vriic, tspc)
                            raise pyx12.errors.EngineError(err_str)
                        cur_map = pyx12.map_if.load_map_file(map_file, param, map_path)
                        src.check_837_lx = True if cur_map.id == '837' else False
                        logger.debug('Map file: %s' % (map_file))
                        #apply_loop_count(node, cur_map)
                        node = cur_map.getnodebypath('/ISA_LOOP/GS_LOOP/ST_LOOP/HEADER/BHT')
                errh.add_seg(node, seg, src.get_seg_count(), src.get_cur_line(), src.get_ls_id())
                errh.handle_errors(src.pop_errors())
            elif seg.get_seg_id() == 'GE':
                errh.handle_errors(src.pop_errors())
                errh.close_gs_loop(node, seg, src)
            elif seg.get_seg_id() == 'ST':
                errh.add_st_loop(seg, src)
                errh.handle_errors(src.pop_errors())
            elif seg.get_seg_id() == 'SE':
                errh.handle_errors(src.pop_errors())
                errh.close_st_loop(node, seg, src)
            else:
                errh.add_seg(node, seg, src.get_seg_count(), src.get_cur_line(), src.get_ls_id())
                errh.handle_errors(src.pop_errors())

            #errh.set_cur_line(src.get_cur_line())
            valid &= node.is_valid(seg, errh)
            #erx.handleErrors(src.pop_errors())
            #erx.handleErrors(errh.get_errors())
            #errh.reset()

        if fd_html:
            if node is not None and node.is_first_seg_in_loop():
                html.loop(node.get_parent())
            err_node_list = []
            while True:
                try:
                    next(err_iter)
                    err_node = err_iter.get_cur_node()
                    err_node_list.append(err_node)
                except pyx12.errors.IterOutOfBounds:
                    break
            html.gen_seg(seg, src, err_node_list)

        if fd_xmldoc:
            xmldoc.seg(node, seg)

        if False:
            print('\n\n')
        #erx.Write(src.cur_line)

    #erx.handleErrors(src.pop_errors())
    src.cleanup()  # Catch any skipped loop trailers
    errh.handle_errors(src.pop_errors())
    #erx.handleErrors(src.pop_errors())
    #erx.handleErrors(errh.get_errors())

    if fd_html:
        html.footer()
        del html

    if fd_xmldoc:
        del xmldoc

    #visit_debug = pyx12.error_debug.error_debug_visitor(sys.stdout)
    #errh.accept(visit_debug)

    #If this transaction is not a 997/999, generate one.
    if fd_997 and fic != 'FA':
        if vriic and vriic[:6] == '004010':
            try:
                visit_997 = pyx12.error_997.error_997_visitor(fd_997, src.get_term())
                errh.accept(visit_997)
                del visit_997
            except Exception:
                logger.exception('Failed to create 997 response')
        if vriic and vriic[:6] == '005010':
            try:
                visit_999 = pyx12.error_999.error_999_visitor(fd_997, src.get_term())
                errh.accept(visit_999)
                del visit_999
            except Exception:
                logger.exception('Failed to create 999 response')
    del node
    del src
    del control_map
    try:
        del cur_map
    except UnboundLocalError:
        pass
    try:
        if not valid or errh.get_error_count() > 0:
            return False
        else:
            return True
    except Exception:
        print(errh)
        return False

Example 21

Project: peasauce
Source File: test_testlib.py
View license
def generate_unit_tests():
    arch = archm68k.ArchM68k()
    arch.set_operand_type_table(archm68k.operand_type_table)
    table_instructions = util.process_instruction_list(arch, archm68k.instruction_table)
    for i, t in enumerate(table_instructions):
        instruction_syntax, operand_syntaxes = arch.parse_instruction_syntax(t[util.II_NAME])
        instruction_flags = t[util.II_FLAGS]
        instruction_spec = util._make_specification(instruction_syntax)

        operand_specs = []
        for syntax in operand_syntaxes:
            operand_spec = copy.deepcopy(util._make_specification(syntax))
            operand_idx = arch.dict_operand_label_to_index.get(operand_spec.key, None)
            if operand_idx != None:
                v = arch.table_operand_types[operand_idx][util.EAMI_FORMAT]
                if v is not None:
                    # Need to resolve the operand string template.
                    if operand_spec.key == "Imm":
                        if "xxx" not in operand_spec.mask_char_vars:
                            z_value = operand_spec.mask_char_vars["z"]
                            operand_spec.mask_char_vars["xxx"] = z_value
                            del operand_spec.mask_char_vars["z"]
                        xxx_value = operand_spec.mask_char_vars["xxx"]
                        idx = xxx_value.find(".")
                        xxx_size_value = xxx_value[idx:]
                        operand_spec.mask_char_vars["xxx"] = arch.get_bounds(xxx_size_value)
                    elif operand_spec.key == "AR":
                        if "Rn" in operand_spec.mask_char_vars:
                            operand_spec.mask_char_vars["xxx"] = arch.get_bounds(v)
                        else:
                            raise RuntimeError("fixme")
                    else:
                        raise RuntimeError("unhandled operand spec=", operand_spec.key, v, "full=", syntax, "vars=", operand_spec.mask_char_vars)
                operand_specs.append((operand_spec, v))
            else:
                raise RuntimeError("cccc")

        def integrate_possible_values(possible_values, old_combinations, new_combinations):
            for combination in old_combinations:
                for value in possible_values:
                    combination_copy = combination[:]
                    combination_copy.append(value)
                    new_combinations.append(combination_copy)

        # Operand N will have N variations that need to be tested.
        # If there are M operands, then the total number of variations will be N[0]*N[1]*...*N[M-1]
        # So we start with the first operand's variations, then we extend with the seconds, and so on..
        operand_idx = 0
        combinations = [ [] ]
        while operand_idx < len(operand_specs):
            operand_spec, operand_format = operand_specs[operand_idx]
            combinations_temp = []

            if len(operand_spec.mask_char_vars):
                format_variations = [ operand_format ]
                for k, v in operand_spec.mask_char_vars.iteritems():
                    format_variations_temp = []
                    for format in format_variations:
                        if k in format:
                            if v is None:
                                raise RuntimeError("bad instruction, no bounds", operand_format, instruction_syntax, operand_syntaxes)
                            for bounding_value in v[1]:
                                format_variations_temp.append(format.replace(k, str(bounding_value)))
                        else:
                            raise RuntimeError("Key", k, "not in format", format)
                    format_variations = format_variations_temp
                # Each of the existing combinations will be combined with each of the values.
                integrate_possible_values(format_variations, combinations, combinations_temp)
            else:
                integrate_possible_values([ operand_spec.key ], combinations, combinations_temp)

            combinations = combinations_temp
            operand_idx += 1


        # Determine which CPU to tell the assembler to target.
        cpu_match_data = [
            (archm68k.IF_060, testlib.constants.CPU_MC60060),
            (archm68k.IF_040, testlib.constants.CPU_MC60040),
            (archm68k.IF_030, testlib.constants.CPU_MC60030),
            (archm68k.IF_020, testlib.constants.CPU_MC60020),
            (archm68k.IF_010, testlib.constants.CPU_MC60010),
            (archm68k.IF_000, testlib.constants.CPU_MC60000),
        ]
        cpu_id = None
        for mask, value in cpu_match_data:
            if instruction_flags & mask:
                cpu_id = value
                break
        else:
            raise RuntimeError("Failed to identify CPU from instruction flags")

        # Assemble each identified instruction variation and obtain the machine code for disassembler testing.
        asm = testlib.tool_assembler_vasm.Assembler()
        for combination in combinations:
            text = instruction_syntax +" "+ ",".join(combination)
            result = asm.compile_text("a: "+ text, cpu_id, testlib.constants.ASM_SYNTAX_MOTOROLA)
            if result is None:
                print "X", text
            else:
                print " ", text, [ hex(ord(c)) for c in result ]
        if i == 15:
            break

    # NOTE: This works.
    if False:
        asm = testlib.tool_assembler_vasm.Assembler()
        result = asm.compile_text("a: movem.w d0-d6/a0-a6,-(sp)", testlib.constants.CPU_MC60000, testlib.constants.ASM_SYNTAX_MOTOROLA)
        print [ hex(ord(c)) for c in result ]

        result = asm.compile_text("a: moveq #0,d0", testlib.constants.CPU_MC60000, testlib.constants.ASM_SYNTAX_MOTOROLA)
        print [ hex(ord(c)) for c in result ]


if __name__ == "__main__":
    DISPLAY_LOGGING = True

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)
    if DISPLAY_LOGGING:
        ch = logging.StreamHandler()
        ch.setLevel(logging.INFO)
    else:
        ch = logging.NullHandler()
    logger.addHandler(ch)

    generate_unit_tests()

    if False:
        unittest.main()

Example 22

Project: yum
Source File: yummain.py
View license
def main(args):
    """Run the yum program from a command line interface."""

    yum.misc.setup_locale(override_time=True)

    def exUserCancel():
        logger.critical(_('\n\nExiting on user cancel'))
        if unlock(): return 200
        return 1

    def exIOError(e):
        if e.errno == 32:
            logger.critical(_('\n\nExiting on Broken Pipe'))
        else:
            logger.critical(_('\n\n%s') % exception2msg(e))
        if unlock(): return 200
        return 1

    def exPluginExit(e):
        '''Called when a plugin raises PluginYumExit.

        Log the plugin's exit message if one was supplied.
        ''' # ' xemacs hack
        exitmsg = exception2msg(e)
        if exitmsg:
            logger.warn('\n\n%s', exitmsg)
        if unlock(): return 200
        return 1

    def exFatal(e):
        logger.critical('\n\n%s', exception2msg(e.value))
        if unlock(): return 200
        return 1

    def exRepoError(e):
        # For RepoErrors ... help out by forcing new repodata next time.
        # XXX: clean only the repo that has failed?
        base.cleanExpireCache()

        msg = _("""\
 One of the configured repositories failed (%(repo)s),
 and yum doesn't have enough cached data to continue. At this point the only
 safe thing yum can do is fail. There are a few ways to work "fix" this:

     1. Contact the upstream for the repository and get them to fix the problem.

     2. Reconfigure the baseurl/etc. for the repository, to point to a working
        upstream. This is most often useful if you are using a newer
        distribution release than is supported by the repository (and the
        packages for the previous distribution release still work).

     3. Run the command with the repository temporarily disabled
            yum --disablerepo=%(repoid)s ...

     4. Disable the repository permanently, so yum won't use it by default. Yum
        will then just ignore the repository until you permanently enable it
        again or use --enablerepo for temporary usage:

            yum-config-manager --disable %(repoid)s
        or
            subscription-manager repos --disable=%(repoid)s

     5. Configure the failing repository to be skipped, if it is unavailable.
        Note that yum will try to contact the repo. when it runs most commands,
        so will have to try and fail each time (and thus. yum will be be much
        slower). If it is a very temporary problem though, this is often a nice
        compromise:

            yum-config-manager --save --setopt=%(repoid)s.skip_if_unavailable=true
""")

        repoui = _('Unknown')
        repoid = _('<repoid>')
        try:
            repoid = e.repo.id
            repoui = e.repo.name
        except AttributeError:
            pass

        msg = msg % {'repoid' : repoid, 'repo' : repoui}

        logger.critical('\n\n%s\n%s', msg, exception2msg(e))

        if unlock(): return 200
        return 1

    def unlock():
        try:
            base.closeRpmDB()
            base.doUnlock()
        except Errors.LockError, e:
            return 200
        return 0

    def rpmdb_warn_checks():
        try:
            probs = base._rpmdb_warn_checks(out=verbose_logger.info, warn=False)
        except Errors.YumBaseError, e:
            # This is mainly for PackageSackError from rpmdb.
            verbose_logger.info(_(" Yum checks failed: %s"), exception2msg(e))
            probs = []
        if not probs:
            verbose_logger.info(_(" You could try running: rpm -Va --nofiles --nodigest"))

    logger = logging.getLogger("yum.main")
    verbose_logger = logging.getLogger("yum.verbose.main")

    # Try to open the current directory to see if we have 
    # read and execute access. If not, chdir to /
    try:
        f = open(".")
    except IOError, e:
        if e.errno == errno.EACCES:
            logger.critical(_('No read/execute access in current directory, moving to /'))
            os.chdir("/")
    else:
        f.close()
    try:
        os.getcwd()
    except OSError, e:
        if e.errno == errno.ENOENT:
            logger.critical(_('No getcwd() access in current directory, moving to /'))
            os.chdir("/")

    # our core object for the cli
    base = cli.YumBaseCli()

    # do our cli parsing and config file setup
    # also sanity check the things being passed on the cli
    try:
        base.getOptionsConfig(args)
    except plugins.PluginYumExit, e:
        return exPluginExit(e)
    except Errors.YumBaseError, e:
        return exFatal(e)
    except (OSError, IOError), e:
        return exIOError(e)

    try:
        base.waitForLock()
    except Errors.YumBaseError, e:
        return exFatal(e)

    try:
        result, resultmsgs = base.doCommands()
    except plugins.PluginYumExit, e:
        return exPluginExit(e)
    except Errors.RepoError, e:
        return exRepoError(e)
    except Errors.YumBaseError, e:
        result = 1
        resultmsgs = [exception2msg(e)]
    except KeyboardInterrupt:
        return exUserCancel()
    except IOError, e:
        return exIOError(e)

    # Act on the command/shell result
    if result == 0:
        # Normal exit 
        for msg in resultmsgs:
            verbose_logger.log(logginglevels.INFO_2, '%s', msg)
        if unlock(): return 200
        return base.exit_code
    elif result == 1:
        # Fatal error
        for msg in resultmsgs:
            logger.critical(_('Error: %s'), msg)
        if unlock(): return 200
        return 1
    elif result == 2:
        # Continue on
        pass
    elif result == 100:
        if unlock(): return 200
        return 100
    else:
        logger.critical(_('Unknown Error(s): Exit Code: %d:'), result)
        for msg in resultmsgs:
            logger.critical(msg)
        if unlock(): return 200
        return 3

    # Mainly for ostree, but might be useful for others.
    if base.conf.usr_w_check:
        usrinstpath = base.conf.installroot + "/usr"
        usrinstpath = usrinstpath.replace('//', '/')
        if (os.path.exists(usrinstpath) and
            not os.access(usrinstpath, os.W_OK)):
            logger.critical(_('No write access to %s directory') % usrinstpath)
            logger.critical(_('  Maybe this is an ostree image?'))
            logger.critical(_('  To disable you can use --setopt=usr_w_check=false'))
            if unlock(): return 200
            return 1
            
    # Depsolve stage
    verbose_logger.log(logginglevels.INFO_2, _('Resolving Dependencies'))

    try:
        (result, resultmsgs) = base.buildTransaction() 
    except plugins.PluginYumExit, e:
        return exPluginExit(e)
    except Errors.RepoError, e:
        return exRepoError(e)
    except Errors.YumBaseError, e:
        result = 1
        resultmsgs = [exception2msg(e)]
    except KeyboardInterrupt:
        return exUserCancel()
    except IOError, e:
        return exIOError(e)
   
    # Act on the depsolve result
    if result == 0:
        # Normal exit
        if unlock(): return 200
        return base.exit_code
    elif result == 1:
        # Fatal error
        for prefix, msg in base.pretty_output_restring(resultmsgs):
            logger.critical(prefix, msg)
        if base._depsolving_failed:
            if not base.conf.skip_broken:
                verbose_logger.info(_(" You could try using --skip-broken to work around the problem"))
            rpmdb_warn_checks()
        if unlock(): return 200
        return 1
    elif result == 2:
        # Continue on
        pass
    else:
        logger.critical(_('Unknown Error(s): Exit Code: %d:'), result)
        for msg in resultmsgs:
            logger.critical(msg)
        if unlock(): return 200
        return 3

    verbose_logger.log(logginglevels.INFO_2, _('\nDependencies Resolved'))

    # Run the transaction
    try:
        inhibit = {'what' : 'shutdown:idle',
                   'who'  : 'yum cli',
                   'why'  : 'Running transaction', # i18n?
                   'mode' : 'block'}
        return_code = base.doTransaction(inhibit=inhibit)
    except plugins.PluginYumExit, e:
        return exPluginExit(e)
    except Errors.RepoError, e:
        return exRepoError(e)
    except Errors.YumBaseError, e:
        return exFatal(e)
    except KeyboardInterrupt:
        return exUserCancel()
    except IOError, e:
        return exIOError(e)

    # rpm ts.check() failed.
    if type(return_code) == type((0,)) and len(return_code) == 2:
        (result, resultmsgs) = return_code
        for msg in resultmsgs:
            logger.critical("%s", msg)
        rpmdb_warn_checks()
        return_code = result
        if base._ts_save_file:
            verbose_logger.info(_("Your transaction was saved, rerun it with:\n yum load-transaction %s") % base._ts_save_file)
    elif return_code < 0:
        return_code = 1 # Means the pre-transaction checks failed...
        #  This includes:
        # . No packages.
        # . Hitting N at the prompt.
        # . GPG check failures.
        if base._ts_save_file:
            verbose_logger.info(_("Your transaction was saved, rerun it with:\n yum load-transaction %s") % base._ts_save_file)
    else:
        verbose_logger.log(logginglevels.INFO_2, _('Complete!'))

    if unlock(): return 200
    return return_code or base.exit_code

Example 23

Project: fb2mobi
Source File: fb2mobi.py
View license
def process(args):
    infile = args.infile
    outfile = args.outfile
    config_file_name = "%s.config" % get_executable_name()
    application_path = get_executable_path()

    if os.path.exists(os.path.join(application_path, config_file_name)):
        config_file = os.path.join(application_path, config_file_name)
    else:
        if sys.platform == 'win32':
            config_file = os.path.join(os.path.expanduser('~'), 'fb2mobi', config_file_name)
        else:
            config_file = os.path.join(os.path.expanduser('~'), '.fb2mobi', config_file_name)

    config = ConverterConfig(config_file)

    if args.profilelist:

        print('Profile list in {0}:'.format(config.config_file))
        for p in config.profiles:
            print('\t{0}: {1}'.format(p, config.profiles[p]['description']))
        sys.exit(0)

    # Если указаны параметры в командной строке, переопределяем дефолтные параметры 1
    if args:
        if args.debug:
            config.debug = args.debug
        if args.log:
            config.log_file = args.log
        if args.loglevel:
            config.log_level = args.loglevel
        if args.consolelevel:
            config.console_level = args.consolelevel
        if args.recursive:
            config.recursive = True
        if args.nc:
            config.mhl = True

    log = logging.getLogger('fb2mobi')
    log.setLevel("DEBUG")

    log_stream_handler = logging.StreamHandler()
    log_stream_handler.setLevel(get_log_level(config.console_level))
    log_stream_handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
    log.addHandler(log_stream_handler)

    if config.log_file:
        log_file_handler = logging.FileHandler(filename=config.log_file, mode='a', encoding='utf-8')
        log_file_handler.setLevel(get_log_level(config.log_level))
        log_file_handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s'))
        log.addHandler(log_file_handler)

    config.log = log

    if args.profile:
        config.setCurrentProfile(args.profile)
    else:
        config.setCurrentProfile(config.default_profile)

    # Если указаны параметры в командной строке, переопределяем дефолтные параметры 2
    if args:
        if args.outputformat:
            config.output_format = args.outputformat
        if args.hyphenate is not None:
            config.current_profile['hyphens'] = args.hyphenate
        if args.transliterate is not None:
            config.transliterate = args.transliterate
        if args.screen_width is not None:
            config.screen_width = args.screen_width
        if args.screen_height is not None:
            config.screen_height = args.screen_height
        if args.kindlecompressionlevel:
            config.kindle_compression_level = args.kindlecompressionlevel
        if args.css:
            config.current_profile['css'] = args.css
        if args.xslt:
            config.current_profile['xslt'] = args.xslt
        if args.dropcaps is not None:
            config.current_profile['dropcaps'] = args.dropcaps
        if args.tocmaxlevel:
            config.current_profile['tocMaxLevel'] = args.tocmaxlevel
        if args.tocbeforebody is not None:
            config.current_profile['tocBeforeBody'] = args.tocbeforebody
        if args.notesmode:
            config.current_profile['notesMode'] = args.notesmode
        if args.notesbodies:
            config.current_profile['notesBodies'] = args.notesbodies
        if args.annotationtitle:
            config.current_profile['annotationTitle'] = args.annotationtitle
        if args.toctitle:
            config.current_profile['tocTitle'] = args.toctitle
        if args.chapteronnewpage is not None:
            config.current_profile['chapterOnNewPage'] = args.chapteronnewpage
        if args.removepngtransparency is not None:
            config.current_profile['removePngTransparency'] = args.removepngtransparency
        if args.noMOBIoptimization:
            config.noMOBIoptimization = args.noMOBIoptimization
        if args.sendtokindle is not None:
            config.send_to_kindle['send'] = args.sendtokindle

        if args.inputdir:
            config.input_dir = args.inputdir
        if args.outputdir:
            config.output_dir = args.outputdir
        if args.deletesourcefile:
            config.delete_source_file = args.deletesourcefile
        if args.savestructure:
            config.save_structure = args.savestructure

        if args.transliterateauthorandtitle is not None:
            config.transliterate_author_and_title = args.transliterateauthorandtitle

    if args.inputdir:
        process_folder(config, args.inputdir, args.outputdir)
        if args.deleteinputdir:
            try:
                rm_tmp_files(args.inputdir, False)
            except:
                log.error('Unable to remove directory "{0}"'.format(args.inputdir))

    elif infile:
        process_file(config, infile, outfile)
        if args.deletesourcefile:
            try:
                os.remove(infile)
            except:
                log.error('Unable to remove file "{0}"'.format(infile))
    else:
        print(argparser.description)
        argparser.print_usage()

Example 24

Project: Django--an-app-at-a-time
Source File: base.py
View license
    def get_response(self, request):
        "Returns an HttpResponse object for the given HttpRequest"

        # Setup default url resolver for this thread, this code is outside
        # the try/except so we don't get a spurious "unbound local
        # variable" exception in the event an exception is raised before
        # resolver is set
        urlconf = settings.ROOT_URLCONF
        urlresolvers.set_urlconf(urlconf)
        resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
        try:
            response = None
            # Apply request middleware
            for middleware_method in self._request_middleware:
                response = middleware_method(request)
                if response:
                    break

            if response is None:
                if hasattr(request, 'urlconf'):
                    # Reset url resolver with a custom urlconf.
                    urlconf = request.urlconf
                    urlresolvers.set_urlconf(urlconf)
                    resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)

                resolver_match = resolver.resolve(request.path_info)
                callback, callback_args, callback_kwargs = resolver_match
                request.resolver_match = resolver_match

                # Apply view middleware
                for middleware_method in self._view_middleware:
                    response = middleware_method(request, callback, callback_args, callback_kwargs)
                    if response:
                        break

            if response is None:
                wrapped_callback = self.make_view_atomic(callback)
                try:
                    response = wrapped_callback(request, *callback_args, **callback_kwargs)
                except Exception as e:
                    # If the view raised an exception, run it through exception
                    # middleware, and if the exception middleware returns a
                    # response, use that. Otherwise, reraise the exception.
                    for middleware_method in self._exception_middleware:
                        response = middleware_method(request, e)
                        if response:
                            break
                    if response is None:
                        raise

            # Complain if the view returned None (a common error).
            if response is None:
                if isinstance(callback, types.FunctionType):    # FBV
                    view_name = callback.__name__
                else:                                           # CBV
                    view_name = callback.__class__.__name__ + '.__call__'
                raise ValueError("The view %s.%s didn't return an HttpResponse object. It returned None instead."
                                 % (callback.__module__, view_name))

            # If the response supports deferred rendering, apply template
            # response middleware and then render the response
            if hasattr(response, 'render') and callable(response.render):
                for middleware_method in self._template_response_middleware:
                    response = middleware_method(request, response)
                    # Complain if the template response middleware returned None (a common error).
                    if response is None:
                        raise ValueError(
                            "%s.process_template_response didn't return an "
                            "HttpResponse object. It returned None instead."
                            % (middleware_method.__self__.__class__.__name__))
                response = response.render()

        except http.Http404 as e:
            logger.warning('Not Found: %s', request.path,
                        extra={
                            'status_code': 404,
                            'request': request
                        })
            if settings.DEBUG:
                response = debug.technical_404_response(request, e)
            else:
                response = self.get_exception_response(request, resolver, 404)

        except PermissionDenied:
            logger.warning(
                'Forbidden (Permission denied): %s', request.path,
                extra={
                    'status_code': 403,
                    'request': request
                })
            response = self.get_exception_response(request, resolver, 403)

        except MultiPartParserError:
            logger.warning(
                'Bad request (Unable to parse request body): %s', request.path,
                extra={
                    'status_code': 400,
                    'request': request
                })
            response = self.get_exception_response(request, resolver, 400)

        except SuspiciousOperation as e:
            # The request logger receives events for any problematic request
            # The security logger receives events for all SuspiciousOperations
            security_logger = logging.getLogger('django.security.%s' %
                            e.__class__.__name__)
            security_logger.error(
                force_text(e),
                extra={
                    'status_code': 400,
                    'request': request
                })
            if settings.DEBUG:
                return debug.technical_500_response(request, *sys.exc_info(), status_code=400)

            response = self.get_exception_response(request, resolver, 400)

        except SystemExit:
            # Allow sys.exit() to actually exit. See tickets #1023 and #4701
            raise

        except:  # Handle everything else.
            # Get the exception info now, in case another exception is thrown later.
            signals.got_request_exception.send(sender=self.__class__, request=request)
            response = self.handle_uncaught_exception(request, resolver, sys.exc_info())

        try:
            # Apply response middleware, regardless of the response
            for middleware_method in self._response_middleware:
                response = middleware_method(request, response)
                # Complain if the response middleware returned None (a common error).
                if response is None:
                    raise ValueError(
                        "%s.process_response didn't return an "
                        "HttpResponse object. It returned None instead."
                        % (middleware_method.__self__.__class__.__name__))
            response = self.apply_response_fixes(request, response)
        except:  # Any exception should be gathered and handled
            signals.got_request_exception.send(sender=self.__class__, request=request)
            response = self.handle_uncaught_exception(request, resolver, sys.exc_info())

        response._closable_objects.append(request)

        return response

Example 25

Project: quicktill
Source File: till.py
View license
def main():
    """Usual main entry point for the till software, unless you are doing
    something strange.  Reads the location of its global configuration,
    command line options, and the global configuration, and then starts
    the program.

    """
    
    try:
        with open(configurlfile) as f:
            configurl = f.readline()
    except FileNotFoundError:
        configurl = None
    parser=argparse.ArgumentParser(
        description="Figure out where all the money and stock went")
    parser.add_argument("--version", action="version", version=version)
    parser.add_argument("-u", "--config-url", action="store",
                        dest="configurl", default=configurl,
                        help="URL of global till configuration file; overrides "
                        "contents of %s"%configurlfile)
    parser.add_argument("-c", "--config-name", action="store",
                        dest="configname", default="default",
                        help="Till type to use from configuration file")
    parser.add_argument("-d", "--database", action="store",
                        dest="database",
                        help="Database connection string; overrides "
                        "database specified in configuration file")
    parser.add_argument("-f", "--user", action="store",
                        dest="user",type=int,default=None,
                        help="User ID to use when no other user information "
                        "is available (use 'listusers' command to check IDs)")
    loggroup=parser.add_mutually_exclusive_group()
    loggroup.add_argument("-y", "--log-config", help="Logging configuration file "
                          "in YAML", type=argparse.FileType('r'),
                          dest="logconfig")
    loggroup.add_argument("-l", "--logfile", type=argparse.FileType('a'),
                          dest="logfile", help="Simple logging output file")
    parser.add_argument("--debug", action="store_true", dest="debug",
                        help="Include debug output in log")
    parser.add_argument("--log-sql", action="store_true", dest="logsql",
                        help="Include SQL queries in logfile")
    parser.add_argument("--disable-printer", action="store_true",
                        dest="disable_printer",help="Use the null printer "
                        "instead of the configured printer")
    subparsers=parser.add_subparsers(title="commands")
    for c in cmdline.command._commands:
        c.add_arguments(subparsers)
    parser.set_defaults(configurl=configurl,configname="default",
                        database=None,logfile=None,debug=False,
                        interactive=False,disable_printer=False)
    args=parser.parse_args()

    if not hasattr(args, 'command'):
        parser.error("No command supplied")
    if not args.configurl:
        parser.error("No configuration URL provided in "
                     "%s or on command line"%configurlfile)
    tillconfig.configversion=args.configurl
    f=urllib.request.urlopen(args.configurl)
    globalconfig=f.read()
    f.close()

    # Logging configuration.  If we have a log configuration file,
    # read it and apply it.  This is done before the main
    # configuration file is imported so that log output from the
    # import can be directed appropriately.
    rootlog = logging.getLogger()
    if args.logconfig:
        logconfig = yaml.load(args.logconfig)
        args.logconfig.close()
        logging.config.dictConfig(logconfig)
    else:
        formatter = logging.Formatter(
            '%(asctime)s %(levelname)s %(name)s\n  %(message)s')
        handler = logging.StreamHandler()
        handler.setFormatter(formatter)
        handler.setLevel(logging.ERROR)
        rootlog.addHandler(handler)
    if args.logfile:
        loglevel = logging.DEBUG if args.debug else logging.INFO
        loghandler = logging.StreamHandler(args.logfile)
        loghandler.setFormatter(formatter)
        loghandler.setLevel(logging.DEBUG if args.debug else logging.INFO)
        rootlog.addHandler(loghandler)
        rootlog.setLevel(loglevel)
    if args.debug:
        rootlog.setLevel(logging.DEBUG)
    if args.logsql:
        logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
    # Set up handler to direct warnings to toaster UI
    toasthandler = ToastHandler()
    toastformatter = logging.Formatter('%(levelname)s: %(message)s')
    toasthandler.setFormatter(toastformatter)
    toasthandler.setLevel(logging.WARNING)
    rootlog.addHandler(toasthandler)

    import imp
    g=imp.new_module("globalconfig")
    g.configname=args.configname
    exec(globalconfig,g.__dict__)

    config=g.configurations.get(args.configname)
    if config is None:
        print(("Configuration \"%s\" does not exist.  "
               "Available configurations:"%args.configname))
        for i in list(g.configurations.keys()):
            print("%s: %s"%(i,g.configurations[i]['description']))
        sys.exit(1)

    if args.user:
        tillconfig.default_user=args.user
    if 'printer' in config:
        printer.driver=config['printer']
    else:
        log.info("no printer configured: using nullprinter()")
        printer.driver=pdrivers.nullprinter()
    if args.disable_printer:
        printer.driver=pdrivers.nullprinter(name="disabled-printer")
    if 'labelprinters' in config:
        printer.labelprinters=config['labelprinters']
    tillconfig.database=config.get('database')
    if args.database is not None: tillconfig.database=args.database
    if 'kitchenprinter' in config:
        foodorder.kitchenprinter=config['kitchenprinter']
    foodorder.menuurl=config.get('menuurl')
    tillconfig.pubname=config['pubname']
    tillconfig.pubnumber=config['pubnumber']
    tillconfig.pubaddr=config['pubaddr']
    tillconfig.currency=config['currency']
    tillconfig.all_payment_methods=config['all_payment_methods']
    tillconfig.payment_methods=config['payment_methods']
    if 'kbdriver' in config:
        # Perhaps we should support multiple filters...
        ui.keyboard_filter_stack.insert(0, config['kbdriver'])
    # XXX support kbdiff command temporarily
    if 'kbdriver' in config:
        tillconfig.kbdriver = config['kbdriver']
    # XXX support kbdiff command temporarily
    if 'altkbdriver' in config:
        tillconfig.altkbdriver = config['altkbdriver']
    if 'pricepolicy' in config:
        log.warning("Obsolete 'pricepolicy' key present in configuration")
    if 'format_currency' in config:
        tillconfig.fc=config['format_currency']
    if 'priceguess' in config:
        # Config files should subclass stocktype.PriceGuessHook
        # instead of specifying this
        log.warning("Obsolete 'priceguess' key present in configuration")
    if 'deptkeycheck' in config:
        log.warning("Obsolete 'deptkeycheck' key present in configuration")
    if 'checkdigit_print' in config:
        tillconfig.checkdigit_print=config['checkdigit_print']
    if 'checkdigit_on_usestock' in config:
        tillconfig.checkdigit_on_usestock=config['checkdigit_on_usestock']
    if 'usestock_hook' in config:
        # Config files should subclass usestock.UseStockRegularHook
        # instead of specifying this
        log.warning("Obsolete 'usestock_hook' key present in configuration")
    if 'hotkeys' in config:
        tillconfig.hotkeys=config['hotkeys']
    if 'firstpage' in config:
        tillconfig.firstpage=config['firstpage']
    else:
        tillconfig.firstpage=intropage
    if 'usertoken_handler' in config:
        tillconfig.usertoken_handler=config['usertoken_handler']
    if 'usertoken_listen' in config:
        tillconfig.usertoken_listen=config['usertoken_listen']
    if 'usertoken_listen_v6' in config:
        tillconfig.usertoken_listen_v6=config['usertoken_listen_v6']

    if os.uname()[0]=='Linux':
        if os.getenv('TERM')=='linux':
            tillconfig.unblank_screen=_linux_unblank_screen
        elif os.getenv('TERM')=='xterm':
            os.putenv('TERM','linux')

    if os.getenv('DISPLAY'):
        tillconfig.unblank_screen=_x_unblank_screen

    locale.setlocale(locale.LC_ALL,'')

    sys.exit(args.command(args))

Example 26

Project: homu
Source File: main.py
View license
def main():
    args = arguments()

    logger = logging.getLogger('homu')
    logger.setLevel(logging.DEBUG if args.verbose else logging.INFO)
    logger.addHandler(logging.StreamHandler())

    try:
        with open('cfg.toml') as fp:
            cfg = toml.loads(fp.read())
    except FileNotFoundError:
        with open('cfg.json') as fp:
            cfg = json.loads(fp.read())

    gh = github3.login(token=cfg['github']['access_token'])
    user = gh.user()
    try: user_email = [x for x in gh.iter_emails() if x['primary']][0]['email']
    except IndexError:
        raise RuntimeError('Primary email not set, or "user" scope not granted')

    states = {}
    repos = {}
    repo_cfgs = {}
    buildbot_slots = ['']
    my_username = user.login
    repo_labels = {}
    mergeable_que = Queue()
    git_cfg = {
        'name': user.name if user.name else user.login,
        'email': user_email,
        'ssh_key': cfg.get('git', {}).get('ssh_key', ''),
        'local_git': cfg.get('git', {}).get('local_git', False),
    }

    db_conn = sqlite3.connect('main.db', check_same_thread=False, isolation_level=None)
    db = db_conn.cursor()

    db_query(db, '''CREATE TABLE IF NOT EXISTS pull (
        repo TEXT NOT NULL,
        num INTEGER NOT NULL,
        status TEXT NOT NULL,
        merge_sha TEXT,
        title TEXT,
        body TEXT,
        head_sha TEXT,
        head_ref TEXT,
        base_ref TEXT,
        assignee TEXT,
        approved_by TEXT,
        priority INTEGER,
        try_ INTEGER,
        rollup INTEGER,
        delegate TEXT,
        UNIQUE (repo, num)
    )''')

    db_query(db, '''CREATE TABLE IF NOT EXISTS build_res (
        repo TEXT NOT NULL,
        num INTEGER NOT NULL,
        builder TEXT NOT NULL,
        res INTEGER,
        url TEXT NOT NULL,
        merge_sha TEXT NOT NULL,
        UNIQUE (repo, num, builder)
    )''')

    db_query(db, '''CREATE TABLE IF NOT EXISTS mergeable (
        repo TEXT NOT NULL,
        num INTEGER NOT NULL,
        mergeable INTEGER NOT NULL,
        UNIQUE (repo, num)
    )''')

    for repo_label, repo_cfg in cfg['repo'].items():
        repo_cfgs[repo_label] = repo_cfg
        repo_labels[repo_cfg['owner'], repo_cfg['name']] = repo_label

        repo_states = {}
        repos[repo_label] = None

        db_query(db, 'SELECT num, head_sha, status, title, body, head_ref, base_ref, assignee, approved_by, priority, try_, rollup, delegate, merge_sha FROM pull WHERE repo = ?', [repo_label])
        for num, head_sha, status, title, body, head_ref, base_ref, assignee, approved_by, priority, try_, rollup, delegate, merge_sha in db.fetchall():
            state = PullReqState(num, head_sha, status, db, repo_label, mergeable_que, gh, repo_cfg['owner'], repo_cfg['name'], repos)
            state.title = title
            state.body = body
            state.head_ref = head_ref
            state.base_ref = base_ref
            state.assignee = assignee

            state.approved_by = approved_by
            state.priority = int(priority)
            state.try_ = bool(try_)
            state.rollup = bool(rollup)
            state.delegate = delegate

            if merge_sha:
                if 'buildbot' in repo_cfg:
                    builders = repo_cfg['buildbot']['builders']
                elif 'travis' in repo_cfg:
                    builders = ['travis']
                elif 'status' in repo_cfg:
                    builders = ['status']
                else:
                    raise RuntimeError('Invalid configuration')

                state.init_build_res(builders, use_db=False)
                state.merge_sha = merge_sha

            elif state.status == 'pending':
                # FIXME: There might be a better solution
                state.status = ''

                state.save()

            repo_states[num] = state

        states[repo_label] = repo_states

    db_query(db, 'SELECT repo, num, builder, res, url, merge_sha FROM build_res')
    for repo_label, num, builder, res, url, merge_sha in db.fetchall():
        try:
            state = states[repo_label][num]
            if builder not in state.build_res: raise KeyError
            if state.merge_sha != merge_sha: raise KeyError
        except KeyError:
            db_query(db, 'DELETE FROM build_res WHERE repo = ? AND num = ? AND builder = ?', [repo_label, num, builder])
            continue

        state.build_res[builder] = {
            'res': bool(res) if res is not None else None,
            'url': url,
        }

    db_query(db, 'SELECT repo, num, mergeable FROM mergeable')
    for repo_label, num, mergeable in db.fetchall():
        try: state = states[repo_label][num]
        except KeyError:
            db_query(db, 'DELETE FROM mergeable WHERE repo = ? AND num = ?', [repo_label, num])
            continue

        state.mergeable = bool(mergeable) if mergeable is not None else None

    db_query(db, 'SELECT repo FROM pull GROUP BY repo')
    for repo_label, in db.fetchall():
        if repo_label not in repos:
            db_query(db, 'DELETE FROM pull WHERE repo = ?', [repo_label])

    queue_handler_lock = Lock()
    def queue_handler():
        with queue_handler_lock:
            return process_queue(states, repos, repo_cfgs, logger, buildbot_slots, db, git_cfg)

    os.environ['GIT_SSH'] = os.path.join(os.path.dirname(__file__), 'git_helper.py')
    os.environ['GIT_EDITOR'] = 'cat'

    from . import server
    Thread(target=server.start, args=[cfg, states, queue_handler, repo_cfgs, repos, logger, buildbot_slots, my_username, db, repo_labels, mergeable_que, gh]).start()

    Thread(target=fetch_mergeability, args=[mergeable_que]).start()
    Thread(target=check_timeout, args=[states, queue_handler]).start()

    queue_handler()

Example 27

Project: matador-deploy
Source File: main.py
View license
def main():
    # setup `logging` module
    logger = logging.getLogger('Rancher Deployment')
    # logger.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(message)s")  # same as default

    # setup `RainbowLoggingHandler`
    handler = RainbowLoggingHandler(sys.stderr,
        color_funcName=('black', 'yellow', True),
        color_module=('yellow', None, False))

    handler.setFormatter(formatter)
    logger.addHandler(handler)

    # ###################################
    # Importing Args Modules
    # -----------------------------------
    from src import arguments

    # #####################################################
    # 1. Confirming Command Config and Required Arguments
    # -----------------------------------------------------
    # Check to see if arguments have been passed at all
    if arguments.noSystemArgsExist(sys.argv):
        arguments.printHelpDocumentationThenExit()

    # Check if we are printing out the version information
    if arguments.isVersionCommandEntered(sys.argv):
        arguments.printVersionInformationThenExit()

    # Check for the existance of flags
    if arguments.doFlagsExist(sys.argv):
        flags = sys.argv[1]
        arguments.checkHelpFlag(flags)
        FORCE_MODE = arguments.setForceFlag(flags)
        VERBOSE_MODE = arguments.setVerboseFlag(flags)
        DEVELOPMENT_MODE = arguments.setDevelopmentFlag(flags)
    else:
        FORCE_MODE = False
        VERBOSE_MODE = False
        DEVELOPMENT_MODE = False

    if VERBOSE_MODE:
        logger.setLevel(logging.DEBUG)

    logger.info("INFO: Flag Configuration Set")
    logger.debug("DEBUG: Force Mode: %s", FORCE_MODE)
    logger.debug("DEBUG: Verbose Mode: %s", VERBOSE_MODE)
    logger.debug("DEBUG: Development Mode: %s\n", DEVELOPMENT_MODE)

    if not DEVELOPMENT_MODE:
        arguments.checkArgumentStructure(sys.argv)
        ENV_ARGUMENT = arguments.setEnvironment(sys.argv)
        RANCHER_URL = arguments.setRancherUrl(sys.argv)
        RANCHER_ACCESS_KEY = arguments.setRancherKey(sys.argv)
        RANCHER_SECRET_KEY = arguments.setRancherSecret(sys.argv)
    else:
        logger.info("INFO: Currently In Development Mode. Setting Default Parameters.")
        ENV_ARGUMENT = "staging"
        RANCHER_URL = 'http://localhost:8080/v1/'
        RANCHER_ACCESS_KEY = '9F68C78100A2CAA209EC'
        RANCHER_SECRET_KEY = 'pEkMsBYjcZNxhY4rzYuEfdLLj7mDBZ8EPYwbtgVZ'

    if not FORCE_MODE:
        print "Rancher Arguments Set"
        print "ENVIRONMENT: %s" % ENV_ARGUMENT
        logger.debug("DEBUG: RANCHER_URL: %s", RANCHER_URL)
        logger.debug("DEBUG: RANCHER_ACCESS_KEY: %s", RANCHER_ACCESS_KEY)
        logger.debug("DEBUG: RANCHER_SECRET_KEY: %s", RANCHER_SECRET_KEY)
        print "Would you like to continue?"
        var = raw_input("Please enter (Y|N): ")
        if var == "y" or var == "Y":
            print "User Confirmation Accepted. Performing Rancher Deployment"
            logger.debug("DEBUG: Please use the [-f] flag to force application execution and skip confirmation")
        elif var == "n" or var == "N":
            logger.error("ERROR: User stopped app execution.")
            logger.debug("DEBUG: Please use the [-f] flag to force application execution and skip confirmation")
            print sys.exit(0)
        else:
            logger.error("ERROR: Invalid User Input")
            logger.error("ERROR: Please use the [-f] flag to force application execution and skip confirmation")
            print sys.exit(0)
    else:
        logger.info("INFO: Force Mode Enabled. Skipping Flag Confirmation")


    print "Starting Matador Deploy..."
    # ##################################
    # Import Additional Custom Modules
    # ----------------------------------
    # NOTE: This is done here so that the global vars can be used in the inner modules
    from src import yml_reader
    from src import compose_builder
    from src import rancher_compose

    # ##################################
    # 2. Reading YAML Files Into Script
    # ----------------------------------
    rancher_compose_list = yml_reader.readRancherComposeTemplate()
    docker_compose_list = yml_reader.readDockerComposeTemplate()
    config_file = yml_reader.readConfigurationFile()
    global_config = yml_reader.getGlobalConfig()
    env_config = yml_reader.getEnvConfig(ENV_ARGUMENT)
    PROJECT_NAME = config_file['project_name'] + "-" + ENV_ARGUMENT

    # ##################################################
    # 3. Combine config into the rancher compose
    # --------------------------------------------------
    compose_builder.addConfigToDockerCompose(docker_compose_list, global_config)
    compose_builder.addConfigToDockerCompose(docker_compose_list, env_config)

    # ###############################################
    # 4. Set the image for the deployment
    # -----------------------------------------------
    compose_builder.setImageForDockerConfig(docker_compose_list, ENV_ARGUMENT, config_file['image_base'])

    # ###############################################
    # 5. Save new yml out to a temp file
    # -----------------------------------------------
    yml_reader.createBuildDirectory()
    yml_reader.saveRancherComposeFile(rancher_compose_list)
    yml_reader.saveDockerComposeFile(docker_compose_list)

    # ###############################################
    # 6. Start updating this stuff to rancher baby
    # -----------------------------------------------
    cattle_client = cattle.Client(
        url=RANCHER_URL,
        access_key=RANCHER_ACCESS_KEY,
        secret_key=RANCHER_SECRET_KEY
    )
    rancher_compose.setRancherVars(RANCHER_URL, RANCHER_ACCESS_KEY, RANCHER_SECRET_KEY, PROJECT_NAME)
    rancher_compose.checkForExistingEnvironment(cattle_client, PROJECT_NAME)
    rancher_compose.pushToRancher()

Example 28

Project: sherpa
Source File: __init__.py
View license
def confidence(pars, parmins, parmaxes, parhardmins, parhardmaxes, sigma, eps,
               tol, maxiters, remin, verbose, limit_parnums, stat_cb,
               fit_cb, report_progress, get_par_name, do_parallel, numcores,
               open_interval):

    def get_prefix(index, name, minus_plus):
        '''To print the prefix/indent when verbose is on'''
        prefix = [[], []]
        blank = 3 * index * ' '
        for dir in range(2):
            prefix[dir] = blank + name + ' ' + minus_plus[dir] + ':'
        return prefix

    def get_delta_root(arg, dir, par_at_min):

        my_neg_pos = ConfBracket.neg_pos[dir]

        if is_iterable(arg):
            return arg
            # return map( lambda x: my_neg_pos * abs( x - par_at_min ), arg )
        elif None != arg:
            arg -= par_at_min
            return my_neg_pos * abs(arg)
        else:
            return arg

    def get_step_size(error_scales, upper_scales, index, par):

        if 0 != error_scales[index]:
            # if covar error is NaN then set it to fraction of the par value.
            ith_covar_err = 0.0625 * abs(par)
        else:
            ith_covar_err = abs(upper_scales[index])
        if 0.0 == ith_covar_err:
            # just in case covar and/or par is 0
            ith_covar_err = 1.0e-6

        return ith_covar_err

    def monitor_func(fcn, history):
        def myfunc(x, *args):
            fval = fcn(x, *args)
            history[0].append(x)
            history[1].append(fval)
            return fval
        return myfunc

    def print_status(myblog, verbose, prefix, answer, lock):

        if lock is not None:
            lock.acquire()

        if 0 == verbose:
            msg = '%s\t' % prefix.lstrip()
        else:
            msg = '%s\t' % prefix

        if is_iterable(answer):
            msg += list_to_open_interval(answer)
        elif answer is None:
            msg += '-----'
        else:
            msg += '%g' % answer
        myblog(msg)

        if lock is not None:
            lock.release()

    #
    # Work in the translated coordinate. Hence the 'errors/confidence'
    # are the zeros/roots in the translated coordinate system.
    #
    def translated_fit_cb(fcn, myargs):
        def translated_fit_cb_wrapper(x, *args):
            hlimit = myargs.hlimit
            slimit = myargs.slimit
            hmin = hlimit[0]
            hmax = hlimit[1]
            xpars = myargs.xpars
            ith_par = myargs.ith_par
            # The parameter must be within the hard limits
            if x < hmin[ith_par] or x > hmax[ith_par]:
                raise OutOfBoundErr
            smin = slimit[0]
            smax = slimit[1]
            orig_ith_xpar = xpars[ith_par]
            xpars[ith_par] = x
            translated_stat = fcn(
                xpars, smin, smax, ith_par) - myargs.target_stat
            xpars[ith_par] = orig_ith_xpar
            return translated_stat
        return translated_fit_cb_wrapper

    def verbose_fitcb(fcn, bloginfo):
        if 0 == bloginfo.verbose:
            return fcn

        def verbose_fcn(x, *args):
            fval = fcn(x, *args)
            str = '%s f( %e ) =' % (bloginfo.prefix, x)
            if fval is None:
                str = '%s None' % str
            else:
                str = '%s %e' % (str, fval)
            bloginfo.blogger.info(str)
            return fval
        return verbose_fcn

    sherpablog = logging.getLogger('sherpa')  # where to print progress report

    # Get minimum fit statistic, and calculate target statistic value
    orig_min_stat = stat_cb(pars)
    delta_stat = sigma * sigma
    target_stat = orig_min_stat + delta_stat

    lower_scales = None
    upper_scales = None
    error_scales = None
    nfits = 0
    results = None

    try:
        (lower_scales, upper_scales, error_scales, nfits,
         results) = covariance(pars, parmins, parmaxes, parhardmins,
                               parhardmaxes, 1.0, eps, tol, maxiters,
                               remin, limit_parnums, stat_cb,
                               fit_cb, report_progress)
    except EstNewMin as e:
        raise e
    except:
        error_scales = numpy.array(len(pars) * [est_hardminmax])

    debug = False                                 # for internal use only

    myargs = ConfArgs(pars, parmins, parmaxes, parhardmins, parhardmaxes,
                      target_stat)

    if 0 != verbose:
        msg = '#\n# f' + numpy.array2string(numpy.asarray(pars), precision=6)
        msg += ' = %e\n' % orig_min_stat
        msg += '# sigma = %e\n' % sigma
        msg += '# target_stat = %e\n' % target_stat
        msg += '# tol = %e\n' % eps
        msg += '%s' % myargs
        sherpablog.info(msg)

    dict = {}

    def func(counter, singleparnum, lock=None):

        # nfev contains the number of times it was fitted
        nfev, counter_cb = func_counter(fit_cb)

        #
        # These are the bounds to be returned by this method
        #
        conf_int = [[], []]
        error_flags = []

        #
        # If the user has requested a specific parameter to be
        # calculated then 'ith_par' represents the index of the
        # free parameter to deal with.
        #
        myargs.ith_par = singleparnum

        fitcb = translated_fit_cb(counter_cb, myargs)

        par_name = get_par_name(myargs.ith_par)

        ith_covar_err = get_step_size(error_scales, upper_scales, counter,
                                      pars[myargs.ith_par])

        trial_points = [[], []]
        fitcb = monitor_func(fitcb, trial_points)

        bracket = ConfBracket(myargs, trial_points)

        # the parameter name is set, may as well get the prefix
        prefix = get_prefix(counter, par_name, ['-', '+'])

        myfitcb = [verbose_fitcb(fitcb,
                                 ConfBlog(sherpablog, prefix[0], verbose, lock)),
                   verbose_fitcb(fitcb,
                                 ConfBlog(sherpablog, prefix[1], verbose, lock))]

        for dir in range(2):

            #
            # trial_points stores the history of the points for the
            # parameter which has been evaluated in order to locate
            # the root. Note the first point is 'given' since the info
            # of the minimum is crucial to the search.
            #
            bracket.trial_points[0].append(pars[myargs.ith_par])
            bracket.trial_points[1].append(- delta_stat)

            myblog = ConfBlog(sherpablog, prefix[dir], verbose, lock,
                              debug)

            # have to set the callback func otherwise disaster.
            bracket.fcn = myfitcb[dir]
            root = bracket(dir, iter, ith_covar_err, open_interval, maxiters,
                           eps, myblog)

            myzero = root(eps, myblog)

            delta_zero = get_delta_root(myzero, dir, pars[myargs.ith_par])

            conf_int[dir].append(delta_zero)

            status_prefix = get_prefix(counter, par_name, ['lower bound',
                                                           'upper bound'])
            print_status(myblog.blogger.info, verbose, status_prefix[dir],
                         delta_zero, lock)

        error_flags.append(est_success)

        #
        # include the minimum point to seperate the -/+ interval
        #
        dict[par_name] = trial_points

        return (conf_int[0][0], conf_int[1][0], error_flags[0],
                nfev[0], None)

    if len(limit_parnums) < 2 or not _multi or numcores < 2:
        do_parallel = False

    if not do_parallel:
        lower_limits = []
        upper_limits = []
        eflags = []
        nfits = 0
        for i in range(len(limit_parnums)):
            lower_limit, upper_limit, flags, nfit, extra = func(
                i, limit_parnums[i])
            lower_limits.append(lower_limit)
            upper_limits.append(upper_limit)
            eflags.append(flags)
            nfits += nfit
        return (lower_limits, upper_limits, eflags, nfits, None)

    return parallel_est(func, limit_parnums, pars, numcores)

Example 29

Project: smartsheet-python-sdk
Source File: report.py
View license
    def __init__(self, props=None, base_obj=None):
        """Initialize the Report model."""
        super(Report, self).__init__(props, base_obj)
        self._base = None
        if base_obj is not None:
            self._base = base_obj
        self._pre_request_filter = None
        self._log = logging.getLogger(__name__)
        self._log.info('initializing Report (%s)', __name__)

        self.allowed_values = {
            'access_level': [
                'VIEWER',
                'EDITOR',
                'EDITOR_SHARE',
                'ADMIN',
                'OWNER']}

        self._source_sheets = TypedList(Sheet)
        self._from_id = None
        self._modified_at = None
        self._owner_id = None
        self._columns = TypedList(Column)
        self._dependencies_enabled = None
        self._discussions = TypedList(Discussion)
        self._version = None
        self.__id = None
        self._gantt_enabled = None
        self._show_parent_rows_for_filters = None
        self._created_at = None
        self._name = None
        self._attachments = TypedList(Attachment)
        self._total_row_count = None
        self._favorite = None
        self._access_level = None
        self._rows = TypedList(Row)
        self._read_only = None
        self._permalink = None
        self._source = None
        self._effective_attachment_options = TypedList(str)
        self._owner = None
        self._resource_management_enabled = None
        self._user_settings = None

        if props:
            # account for alternate variable names from raw API response
            if 'sourceSheets' in props:
                self.source_sheets = props['sourceSheets']
            if 'source_sheets' in props:
                self.source_sheets = props['source_sheets']
            if 'fromId' in props:
                self.from_id = props['fromId']
            if 'from_id' in props:
                self.from_id = props['from_id']
            if 'modifiedAt' in props:
                self.modified_at = props['modifiedAt']
            if 'modified_at' in props:
                self.modified_at = props['modified_at']
            if 'ownerId' in props:
                self.owner_id = props['ownerId']
            if 'owner_id' in props:
                self.owner_id = props['owner_id']
            if 'columns' in props:
                self.columns = props['columns']
            if 'dependenciesEnabled' in props:
                self.dependencies_enabled = props[
                    'dependenciesEnabled']
            if 'dependencies_enabled' in props:
                self.dependencies_enabled = props[
                    'dependencies_enabled']
            if 'discussions' in props:
                self.discussions = props['discussions']
            if 'version' in props:
                self.version = props['version']
            if 'id' in props:
                self._id = props['id']
            if '_id' in props:
                self._id = props['_id']
            if 'ganttEnabled' in props:
                self.gantt_enabled = props['ganttEnabled']
            if 'gantt_enabled' in props:
                self.gantt_enabled = props['gantt_enabled']
            if 'showParentRowsForFilters' in props:
                self.show_parent_rows_for_filters = props[
                    'showParentRowsForFilters']
            if 'show_parent_rows_for_filters' in props:
                self.show_parent_rows_for_filters = props[
                    'show_parent_rows_for_filters']
            if 'createdAt' in props:
                self.created_at = props['createdAt']
            if 'created_at' in props:
                self.created_at = props['created_at']
            if 'name' in props:
                self.name = props['name']
            if 'attachments' in props:
                self.attachments = props['attachments']
            if 'totalRowCount' in props:
                self.total_row_count = props['totalRowCount']
            if 'total_row_count' in props:
                self.total_row_count = props['total_row_count']
            if 'favorite' in props:
                self.favorite = props['favorite']
            if 'accessLevel' in props:
                self.access_level = props['accessLevel']
            if 'access_level' in props:
                self.access_level = props['access_level']
            if 'rows' in props:
                self.rows = props['rows']
            if 'readOnly' in props:
                self.read_only = props['readOnly']
            if 'read_only' in props:
                self.read_only = props['read_only']
            if 'permalink' in props:
                self.permalink = props['permalink']
            if 'source' in props:
                self.source = props['source']
            if 'effectiveAttachmentOptions' in props:
                self.effective_attachment_options = props[
                    'effectiveAttachmentOptions']
            if 'effective_attachment_options' in props:
                self.effective_attachment_options = props[
                    'effective_attachment_options']
            if 'owner' in props:
                self.owner = props['owner']
            if 'resourceManagementEnabled' in props:
                self.resource_management_enabled = props[
                    'resourceManagementEnabled']
            if 'resource_management_enabled' in props:
                self.resource_management_enabled = props[
                    'resource_management_enabled']
            if 'userSettings' in props:
                self.user_settings = props['userSettings']
            if 'user_settings' in props:
                self.user_settings = props['user_settings']
            if 'source_sheets' not in props and 'sourceSheets' not in props:
                # props is a sheet or a list of sheets
                self.source_sheets = props
        # requests package Response object
        self.request_response = None
        self.__initialized = True

Example 30

Project: smartsheet-python-sdk
Source File: sheet.py
View license
    def __init__(self, props=None, base_obj=None):
        """Initialize the Sheet model."""
        self._base = None
        if base_obj is not None:
            self._base = base_obj
        self._pre_request_filter = None
        self._log = logging.getLogger(__name__)
        self._log.info('initializing Sheet (%s)', __name__)

        self.allowed_values = {
            'access_level': [
                'VIEWER',
                'EDITOR',
                'EDITOR_SHARE',
                'ADMIN',
                'OWNER']}

        self._access_level = None
        self._attachments = TypedList(Attachment)
        self._columns = TypedList(Column)
        self._created_at = None
        self._dependencies_enabled = None
        self._discussions = TypedList(Discussion)
        self._effective_attachment_options = TypedList(str)
        self._favorite = None
        self._from_id = None
        self._gantt_enabled = None
        self.__id = None
        self._modified_at = None
        self._name = None
        self._owner = None
        self._owner_id = None
        self._permalink = None
        self._read_only = None
        self._resource_management_enabled = None
        self._rows = TypedList(Row)
        self._show_parent_rows_for_filters = None
        self._source = None
        self._total_row_count = None
        self._user_settings = None
        self._version = None

        if props:
            # account for alternate variable names from raw API response
            if 'accessLevel' in props:
                self.access_level = props['accessLevel']
            if 'access_level' in props:
                self.access_level = props['access_level']
            if 'attachments' in props:
                self.attachments = props['attachments']
            if 'columns' in props:
                self.columns = props['columns']
            if 'createdAt' in props:
                self.created_at = props['createdAt']
            if 'created_at' in props:
                self.created_at = props['created_at']
            if 'dependenciesEnabled' in props:
                self.dependencies_enabled = props[
                    'dependenciesEnabled']
            if 'dependencies_enabled' in props:
                self.dependencies_enabled = props[
                    'dependencies_enabled']
            if 'discussions' in props:
                self.discussions = props['discussions']
            if 'effectiveAttachmentOptions' in props:
                self.effective_attachment_options = props[
                    'effectiveAttachmentOptions']
            if 'effective_attachment_options' in props:
                self.effective_attachment_options = props[
                    'effective_attachment_options']
            if 'favorite' in props:
                self.favorite = props['favorite']
            if 'fromId' in props:
                self.from_id = props['fromId']
            if 'from_id' in props:
                self.from_id = props['from_id']
            if 'ganttEnabled' in props:
                self.gantt_enabled = props['ganttEnabled']
            if 'gantt_enabled' in props:
                self.gantt_enabled = props['gantt_enabled']
            if 'id' in props:
                self._id = props['id']
            if '_id' in props:
                self._id = props['_id']
            if 'modifiedAt' in props:
                self.modified_at = props['modifiedAt']
            if 'modified_at' in props:
                self.modified_at = props['modified_at']
            if 'name' in props:
                self.name = props['name']
            if 'owner' in props:
                self.owner = props['owner']
            if 'ownerId' in props:
                self.owner_id = props['ownerId']
            if 'owner_id' in props:
                self.owner_id = props['owner_id']
            if 'permalink' in props:
                self.permalink = props['permalink']
            if 'readOnly' in props:
                self.read_only = props['readOnly']
            if 'read_only' in props:
                self.read_only = props['read_only']
            if 'resourceManagementEnabled' in props:
                self.resource_management_enabled = props[
                    'resourceManagementEnabled']
            if 'resource_management_enabled' in props:
                self.resource_management_enabled = props[
                    'resource_management_enabled']
            if 'rows' in props:
                self.rows = props['rows']
            if 'showParentRowsForFilters' in props:
                self.show_parent_rows_for_filters = props[
                    'showParentRowsForFilters']
            if 'show_parent_rows_for_filters' in props:
                self.show_parent_rows_for_filters = props[
                    'show_parent_rows_for_filters']
            if 'source' in props:
                self.source = props['source']
            if 'totalRowCount' in props:
                self.total_row_count = props['totalRowCount']
            if 'total_row_count' in props:
                self.total_row_count = props['total_row_count']
            if 'userSettings' in props:
                self.user_settings = props['userSettings']
            if 'user_settings' in props:
                self.user_settings = props['user_settings']
            if 'version' in props:
                self.version = props['version']
        # requests package Response object
        self.request_response = None
        self.__initialized = True

Example 31

Project: quadcopterPi
Source File: mode_UAV.py
View license
def mode_UAV(myQ):

    logger = logging.getLogger('myQ.mode_UAV')

    cycleTime = 0.010  # [s]

    corrR = 0
    corrP = 0
    corrY = 0
    roll_rate_target = 0
    pitch_rate_target = 0

    myQ.rc.throttle = 0

    selectedPath = 0

    try:

        #wait ack from user to start motors
        while myQ.rc.command != 9 and myQ.rc.command != -1 and myQ.rc.cycling:
            pass

        if myQ.rc.command != -1:
            myQ.rc.command = 0

        initTime = time()
        previousTime = initTime
        currentTime = initTime

        #displayCommand()
        while myQ.rc.cycling is True and myQ.rc.command != -1 and myQ.netscan.connectionUp is True:

            #manage cycletime
            while currentTime <= previousTime + cycleTime:
                currentTime = time()
                sleep(0.001)
            stepTime = currentTime - previousTime
            previousTime = currentTime

            # user commands:
            if myQ.rc.command == 0:
                corrR = 0
                corrP = 0
            elif myQ.rc.command == 1:
                #included 2 incapsulated pid for each angle:
                #1) get the Wcorr as roll PID
                #2) divide it for the cycletime to get a rot speed (target roll_rate)
                #3) get the Wcorr as roll_rate PID

                #ROLL
                roll_rate_target = myQ.pidR.calc(myQ.rc.roll, myQ.sensor.roll, stepTime)
                roll_rate_target = roll_rate_target / stepTime
                #now using r_rate from gyro . it is more claen signal
                corrR = myQ.pidR_rate.calc(roll_rate_target, myQ.sensor.r_rate, stepTime)

                #PITCH
                pitch_rate_target = myQ.pidP.calc(myQ.rc.pitch, myQ.sensor.pitch, stepTime)
                pitch_rate_target = pitch_rate_target / stepTime

                #now using r_rate from gyro . it is more claen signal
                corrP = myQ.pidP_rate.calc(pitch_rate_target, myQ.sensor.p_rate, stepTime)

                #TODO comment below to activate pitch
                #corrP = 0

            elif myQ.rc.command > 1:
                selectedPath = myQ.rc.command
                pathTime = time()
                myQ.rc.command = 1

            #TODO add yaw pid control here and throttle pid control

            #The sign used to add the correction depends on the
            # motor position respect the IMU orientation
            myQ.motor[0].setW(myQ.rc.throttle + corrR)
            myQ.motor[2].setW(myQ.rc.throttle - corrR)

            #myQ.motor[1].setW(myQ.rc.throttle - corrP)
            #myQ.motor[3].setW(myQ.rc.throttle + corrP)

            if  selectedPath == 2:
                #Test to have delta roll target in fiexd time

                if currentTime - pathTime > 3 and currentTime - pathTime < 6:
                    myQ.rc.roll = 3
                if currentTime - pathTime > 6 and currentTime - pathTime < 9:
                    myQ.rc.roll = 0
                if currentTime - pathTime > 9 and currentTime - pathTime < 12:
                    myQ.rc.roll = 3
                if currentTime - pathTime > 12 and currentTime - pathTime < 15:
                    myQ.rc.roll = 0
                if currentTime - pathTime > 15 and currentTime - pathTime < 18:
                    myQ.rc.roll = 3
                if currentTime - pathTime > 18:
                    myQ.rc.roll = 0

            if  selectedPath == 3:
                #Test to have delta roll target in fiexd time

                if currentTime - pathTime > 3 and currentTime - pathTime < 6:
                    myQ.rc.roll = 3
                if currentTime - pathTime > 6 and currentTime - pathTime < 9:
                    myQ.rc.roll = -3
                if currentTime - pathTime > 9 and currentTime - pathTime < 12:
                    myQ.rc.roll = 3
                if currentTime - pathTime > 12 and currentTime - pathTime < 15:
                    myQ.rc.roll = -3
                if currentTime - pathTime > 15 and currentTime - pathTime < 18:
                    myQ.rc.roll = 3
                if currentTime - pathTime > 18:
                    myQ.rc.roll = 0

            if  selectedPath == 4:
                #Test to have delta roll target in fiexd time

                if currentTime - pathTime > 2 and currentTime - pathTime < 3:
                    myQ.rc.roll = 1
                if currentTime - pathTime > 3 and currentTime - pathTime < 4:
                    myQ.rc.roll = 2
                if currentTime - pathTime > 4 and currentTime - pathTime < 5:
                    myQ.rc.roll = 3
                if currentTime - pathTime > 5 and currentTime - pathTime < 6:
                    myQ.rc.roll = 0
                if currentTime - pathTime > 6 and currentTime - pathTime < 7:
                    myQ.rc.roll = 1
                if currentTime - pathTime > 7 and currentTime - pathTime < 8:
                    myQ.rc.roll = 0
                if currentTime - pathTime > 8 and currentTime - pathTime < 9:
                    myQ.rc.roll = 1
                if currentTime - pathTime > 9 and currentTime - pathTime < 12:
                    myQ.rc.roll = 0
                if currentTime - pathTime > 12 and currentTime - pathTime < 15:
                    myQ.rc.roll = 5
                if currentTime - pathTime > 15 and currentTime - pathTime < 18:
                    myQ.rc.roll = 0
                if currentTime - pathTime > 18 and currentTime - pathTime < 20:
                    myQ.rc.roll = -5
                if currentTime - pathTime > 20:
                    myQ.rc.roll = 0

            myQ.writeLog(currentTime - initTime)

    except:
        logger.critical('Unexpected error:', sys.exc_info()[0])

Example 32

Project: quadcopterPi
Source File: MPU6050.py
View license
    def __init__(self, address=0x68):

        self.logger = logging.getLogger('myQ.MPU6050')

        self.logger.debug('IMU initializing...')
        try:
            self.i2c = I2C(address)
            self.address = address

            self.cal_iteration = 100

            self.roll_a_cal = 0
            self.pitch_a_cal = 0
            self.yaw_a_cal = 0
            self.roll_g_cal = 0
            self.pitch_g_cal = 0
            self.yaw_g_cal = 0
            self.gyro_scale = 0
            self.sensor_data = array('B', [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
            self.result_array = array('h', [0, 0, 0, 0, 0, 0, 0])

            #---------------------------------------------------------------------------
            # Reset all registers
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_PWR_MGMT_1, 0x80)
            time.sleep(5)

            #---------------------------------------------------------------------------
            # Sets clock source to gyro reference w/ PLL
            #---------------------------------------------------------------------------
            #SNT: 0x02 >> 0x03 (pll con z gyro reference)
            self.i2c.write8(self.__MPU6050_RA_PWR_MGMT_1, 0x03)
            time.sleep(0.005)

            # Sets sample rate to 1000/1+4 = 200Hz
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_SMPLRT_DIV, 0x04)
            time.sleep(0.005)

            #SoleNeroTech note: moved up this part of the code to solve a bug in MPU6050:
            #CONFIG has to be set just after PWR_MGMT_1
            #---------------------------------------------------------------------------
            # 0x02 => 98Hz  2ms delay
            # 0x03 => 40Hz  4
            # 0x04 => 20Hz  8
            # 0x05 => 10Hz  15
            self.i2c.write8(self.__MPU6050_RA_CONFIG, 0x05)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            #0x00=+/-250 0x08=+/- 500    0x10=+/-1000 0x18=+/-2000
            #SoleNeroTech  modified in 0x00
            self.i2c.write8(self.__MPU6050_RA_GYRO_CONFIG, 0x00)
            self.gyro_scale = 250
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            #0x00=+/-2 0x08=+/- 4    0x10=+/-8 0x18=+/-16
            self.i2c.write8(self.__MPU6050_RA_ACCEL_CONFIG, 0x00)
            time.sleep(0.005)

            #--------------------------------------------------------------------------
            # Disables FIFO, AUX I2C, FIFO and I2C reset bits to 0
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_USER_CTRL, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Setup INT pin to latch and AUX I2C pass through
            #---------------------------------------------------------------------------
            ##logger.debug('Enable interrupt')
            #SNT 0x20>0x02
            self.i2c.write8(self.__MPU6050_RA_INT_PIN_CFG, 0x02)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Controls frequency of wakeups in accel low power mode plus the sensor standby modes
            #---------------------------------------------------------------------------
            ##logger.debug('Disable low-power')
            self.i2c.write8(self.__MPU6050_RA_PWR_MGMT_2, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # ********************************: Experimental :**************************
            # Enable data ready interrupt
            #---------------------------------------------------------------------------
            ##logger.debug('Interrupt data ready')
            self.i2c.write8(self.__MPU6050_RA_INT_ENABLE, 0x01)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Freefall threshold of |0mg|
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_FF_THR, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Freefall duration limit of 0
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_FF_DUR, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Motion threshold of 0mg
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_MOT_THR, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Motion duration of 0s
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_MOT_DUR, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Zero motion threshold
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_ZRMOT_THR, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Zero motion duration threshold
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_ZRMOT_DUR, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Disable sensor output to FIFO buffer
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_FIFO_EN, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # AUX I2C setup
            # Sets AUX I2C to single master control, plus other config
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_I2C_MST_CTRL, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Setup AUX I2C slaves
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV0_ADDR, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV0_REG, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV0_CTRL, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV1_ADDR, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV1_REG, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV1_CTRL, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV2_ADDR, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV2_REG, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV2_CTRL, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV3_ADDR, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV3_REG, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV3_CTRL, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV4_ADDR, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV4_REG, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV4_DO, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV4_CTRL, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV4_DI, 0x00)

            #---------------------------------------------------------------------------
            # Slave out, dont care
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV0_DO, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV1_DO, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV2_DO, 0x00)
            self.i2c.write8(self.__MPU6050_RA_I2C_SLV3_DO, 0x00)

            #---------------------------------------------------------------------------
            # More slave config
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_I2C_MST_DELAY_CTRL, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Reset sensor signal paths
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_SIGNAL_PATH_RESET, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Motion detection control
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_MOT_DETECT_CTRL, 0x00)
            time.sleep(0.005)

            #---------------------------------------------------------------------------
            # Data transfer to and from the FIFO buffer
            #---------------------------------------------------------------------------
            self.i2c.write8(self.__MPU6050_RA_FIFO_R_W, 0x00)
            time.sleep(0.005)

            self.CheckSetting()
        except:
            logger.critical('Unexpected error:', sys.exc_info()[0])

Example 33

Project: splunk-webframework
Source File: __init__.py
View license
    def verify_request(self, uri, http_method=u'GET', body=None,
            headers=None, require_resource_owner=True, require_verifier=False,
            require_realm=False, required_realm=None):
        """Verifies a request ensuring that the following is true:

        Per `section 3.2`_ of the spec.

        - all mandated OAuth parameters are supplied
        - parameters are only supplied in one source which may be the URI
          query, the Authorization header or the body
        - all parameters are checked and validated, see comments and the
          methods and properties of this class for further details.
        - the supplied signature is verified against a recalculated one

        A ValueError will be raised if any parameter is missing,
        supplied twice or invalid. A HTTP 400 Response should be returned
        upon catching an exception.

        A HTTP 401 Response should be returned if verify_request returns False.

        `Timing attacks`_ are prevented through the use of dummy credentials to
        create near constant time verification even if an invalid credential
        is used. Early exit on invalid credentials would enable attackers
        to perform `enumeration attacks`_. Near constant time string comparison
        is used to prevent secret key guessing. Note that timing attacks can
        only be prevented through near constant time execution, not by adding
        a random delay which would only require more samples to be gathered.

        .. _`section 3.2`: http://tools.ietf.org/html/rfc5849#section-3.2
        .. _`Timing attacks`: http://rdist.root.org/2010/07/19/exploiting-remote-timing-attacks/
        .. _`enumeration attacks`: http://www.sans.edu/research/security-laboratory/article/attacks-browsing
        """
        # Only include body data from x-www-form-urlencoded requests
        headers = headers or {}
        if (u"Content-Type" in headers and
                headers[u"Content-Type"] == CONTENT_TYPE_FORM_URLENCODED):
            request = Request(uri, http_method, body, headers)
        else:
            request = Request(uri, http_method, u'', headers)

        if self.enforce_ssl and not request.uri.lower().startswith("https://"):
            raise ValueError("Insecure transport, only HTTPS is allowed.")

        signature_type, params, oauth_params = self.get_signature_type_and_params(request)

        # The server SHOULD return a 400 (Bad Request) status code when
        # receiving a request with duplicated protocol parameters.
        if len(dict(oauth_params)) != len(oauth_params):
            raise ValueError("Duplicate OAuth entries.")

        oauth_params = dict(oauth_params)
        request_signature = oauth_params.get(u'oauth_signature')
        client_key = oauth_params.get(u'oauth_consumer_key')
        resource_owner_key = oauth_params.get(u'oauth_token')
        nonce = oauth_params.get(u'oauth_nonce')
        timestamp = oauth_params.get(u'oauth_timestamp')
        callback_uri = oauth_params.get(u'oauth_callback')
        verifier = oauth_params.get(u'oauth_verifier')
        signature_method = oauth_params.get(u'oauth_signature_method')
        realm = dict(params).get(u'realm')

        # The server SHOULD return a 400 (Bad Request) status code when
        # receiving a request with missing parameters.
        if not all((request_signature, client_key, nonce,
                    timestamp, signature_method)):
            raise ValueError("Missing OAuth parameters.")

        # OAuth does not mandate a particular signature method, as each
        # implementation can have its own unique requirements.  Servers are
        # free to implement and document their own custom methods.
        # Recommending any particular method is beyond the scope of this
        # specification.  Implementers should review the Security
        # Considerations section (`Section 4`_) before deciding on which
        # method to support.
        # .. _`Section 4`: http://tools.ietf.org/html/rfc5849#section-4
        if not signature_method in self.allowed_signature_methods:
            raise ValueError("Invalid signature method.")

        # Servers receiving an authenticated request MUST validate it by:
        #   If the "oauth_version" parameter is present, ensuring its value is
        #   "1.0".
        if u'oauth_version' in oauth_params and oauth_params[u'oauth_version'] != u'1.0':
            raise ValueError("Invalid OAuth version.")

        # The timestamp value MUST be a positive integer. Unless otherwise
        # specified by the server's documentation, the timestamp is expressed
        # in the number of seconds since January 1, 1970 00:00:00 GMT.
        if len(timestamp) != 10:
            raise ValueError("Invalid timestamp size")
        try:
            ts = int(timestamp)

        except ValueError:
            raise ValueError("Timestamp must be an integer")

        else:
            # To avoid the need to retain an infinite number of nonce values for
            # future checks, servers MAY choose to restrict the time period after
            # which a request with an old timestamp is rejected.
            if time.time() - ts > self.timestamp_lifetime:
                raise ValueError("Request too old, over 10 minutes.")

        # Provider specific validation of parameters, used to enforce
        # restrictions such as character set and length.
        if not self.check_client_key(client_key):
            raise ValueError("Invalid client key.")

        if not resource_owner_key and require_resource_owner:
            raise ValueError("Missing resource owner.")

        if (require_resource_owner and not require_verifier and
            not self.check_access_token(resource_owner_key)):
            raise ValueError("Invalid resource owner key.")

        if (require_resource_owner and require_verifier and
            not self.check_request_token(resource_owner_key)):
            raise ValueError("Invalid resource owner key.")

        if not self.check_nonce(nonce):
            raise ValueError("Invalid nonce.")

        if realm and not self.check_realm(realm):
            raise ValueError("Invalid realm. Allowed are %s" % self.realms)

        if not verifier and require_verifier:
            raise ValueError("Missing verifier.")

        if require_verifier and not self.check_verifier(verifier):
            raise ValueError("Invalid verifier.")

        # Servers receiving an authenticated request MUST validate it by:
        #   If using the "HMAC-SHA1" or "RSA-SHA1" signature methods, ensuring
        #   that the combination of nonce/timestamp/token (if present)
        #   received from the client has not been used before in a previous
        #   request (the server MAY reject requests with stale timestamps as
        #   described in `Section 3.3`_).
        # .._`Section 3.3`: http://tools.ietf.org/html/rfc5849#section-3.3
        #
        # We check this before validating client and resource owner for
        # increased security and performance, both gained by doing less work.
        if require_verifier:
            token = {"request_token": resource_owner_key}
        else:
            token = {"access_token": resource_owner_key}
        if not self.validate_timestamp_and_nonce(client_key, timestamp,
                nonce, **token):
                return False

        # The server SHOULD return a 401 (Unauthorized) status code when
        # receiving a request with invalid client credentials.
        # Note: This is postponed in order to avoid timing attacks, instead
        # a dummy client is assigned and used to maintain near constant
        # time request verification.
        #
        # Note that early exit would enable client enumeration
        valid_client = self.validate_client_key(client_key)
        if not valid_client:
            client_key = self.dummy_client

        # Ensure a valid redirection uri is used
        valid_redirect = self.validate_redirect_uri(client_key, callback_uri)

        # The server SHOULD return a 401 (Unauthorized) status code when
        # receiving a request with invalid or expired token.
        # Note: This is postponed in order to avoid timing attacks, instead
        # a dummy token is assigned and used to maintain near constant
        # time request verification.
        #
        # Note that early exit would enable resource owner enumeration
        if resource_owner_key:
            if require_verifier:
                valid_resource_owner = self.validate_request_token(
                    client_key, resource_owner_key)
            else:
                valid_resource_owner = self.validate_access_token(
                    client_key, resource_owner_key)
            if not valid_resource_owner:
                resource_owner_key = self.dummy_resource_owner
        else:
            valid_resource_owner = True

        # Note that `realm`_ is only used in authorization headers and how
        # it should be interepreted is not included in the OAuth spec.
        # However they could be seen as a scope or realm to which the
        # client has access and as such every client should be checked
        # to ensure it is authorized access to that scope or realm.
        # .. _`realm`: http://tools.ietf.org/html/rfc2617#section-1.2
        #
        # Note that early exit would enable client realm access enumeration.
        #
        # The require_realm indicates this is the first step in the OAuth
        # workflow where a client requests access to a specific realm.
        #
        # Clients obtaining an access token will not supply a realm and it will
        # not be checked. Instead the previously requested realm should be
        # transferred from the request token to the access token.
        #
        # Access to protected resources will always validate the realm but note
        # that the realm is now tied to the access token and not provided by
        # the client.
        if require_realm and not resource_owner_key:
            valid_realm = self.validate_requested_realm(client_key, realm)
        elif require_verifier:
            valid_realm = True
        else:
            valid_realm = self.validate_realm(client_key, resource_owner_key,
                    uri=request.uri, required_realm=required_realm)

        # The server MUST verify (Section 3.2) the validity of the request,
        # ensure that the resource owner has authorized the provisioning of
        # token credentials to the client, and ensure that the temporary
        # credentials have not expired or been used before.  The server MUST
        # also verify the verification code received from the client.
        # .. _`Section 3.2`: http://tools.ietf.org/html/rfc5849#section-3.2
        #
        # Note that early exit would enable resource owner authorization
        # verifier enumertion.
        if verifier:
            valid_verifier = self.validate_verifier(client_key,
                resource_owner_key, verifier)
        else:
            valid_verifier = True

        # Parameters to Client depend on signature method which may vary
        # for each request. Note that HMAC-SHA1 and PLAINTEXT share parameters

        request.params = filter(lambda x: x[0] != "oauth_signature", params)
        request.signature = request_signature

        # ---- RSA Signature verification ----
        if signature_method == SIGNATURE_RSA:
            # The server verifies the signature per `[RFC3447] section 8.2.2`_
            # .. _`[RFC3447] section 8.2.2`: http://tools.ietf.org/html/rfc3447#section-8.2.1
            rsa_key = self.get_rsa_key(client_key)
            valid_signature = signature.verify_rsa_sha1(request, rsa_key)

        # ---- HMAC or Plaintext Signature verification ----
        else:
            # Servers receiving an authenticated request MUST validate it by:
            #   Recalculating the request signature independently as described in
            #   `Section 3.4`_ and comparing it to the value received from the
            #   client via the "oauth_signature" parameter.
            # .. _`Section 3.4`: http://tools.ietf.org/html/rfc5849#section-3.4
            client_secret = self.get_client_secret(client_key)
            if require_verifier:
                resource_owner_secret = self.get_request_token_secret(
                    client_key, resource_owner_key)
            else:
                resource_owner_secret = self.get_access_token_secret(
                    client_key, resource_owner_key)

            if signature_method == SIGNATURE_HMAC:
                valid_signature = signature.verify_hmac_sha1(request,
                    client_secret, resource_owner_secret)
            else:
                valid_signature = signature.verify_plaintext(request,
                    client_secret, resource_owner_secret)

        # We delay checking validity until the very end, using dummy values for
        # calculations and fetching secrets/keys to ensure the flow of every
        # request remains almost identical regardless of whether valid values
        # have been supplied. This ensures near constant time execution and
        # prevents malicious users from guessing sensitive information
        v = all((valid_client, valid_resource_owner, valid_realm,
                    valid_redirect, valid_verifier, valid_signature))
        logger = logging.getLogger("oauthlib")
        if not v:
            logger.info("[Failure] OAuthLib request verification failed.")
            logger.info("Valid client:\t%s" % valid_client)
            logger.info("Valid token:\t%s\t(Required: %s" % (valid_resource_owner, require_resource_owner))
            logger.info("Valid realm:\t%s\t(Required: %s)" % (valid_realm, require_realm))
            logger.info("Valid callback:\t%s" % valid_redirect)
            logger.info("Valid verifier:\t%s\t(Required: %s)" % (valid_verifier, require_verifier))
            logger.info("Valid signature:\t%s" % valid_signature)
        return v

Example 34

Project: daemonize
Source File: daemonize.py
View license
    def start(self):
        """
        Start daemonization process.
        """
        # If pidfile already exists, we should read pid from there; to overwrite it, if locking
        # will fail, because locking attempt somehow purges the file contents.
        if os.path.isfile(self.pid):
            with open(self.pid, "r") as old_pidfile:
                old_pid = old_pidfile.read()
        # Create a lockfile so that only one instance of this daemon is running at any time.
        try:
            lockfile = open(self.pid, "w")
        except IOError:
            print("Unable to create the pidfile.")
            sys.exit(1)
        try:
            # Try to get an exclusive lock on the file. This will fail if another process has the file
            # locked.
            fcntl.flock(lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
        except IOError:
            print("Unable to lock on the pidfile.")
            # We need to overwrite the pidfile if we got here.
            with open(self.pid, "w") as pidfile:
                pidfile.write(old_pid)
            sys.exit(1)

        # skip fork if foreground is specified
        if not self.foreground:
            # Fork, creating a new process for the child.
            try:
                process_id = os.fork()
            except OSError as e:
                self.logger.error("Unable to fork, errno: {0}".format(e.errno))
                sys.exit(1)
            if process_id != 0:
                # This is the parent process. Exit without cleanup,
                # see https://github.com/thesharp/daemonize/issues/46
                os._exit(0)
            # This is the child process. Continue.

            # Stop listening for signals that the parent process receives.
            # This is done by getting a new process id.
            # setpgrp() is an alternative to setsid().
            # setsid puts the process in a new parent group and detaches its controlling terminal.
            process_id = os.setsid()
            if process_id == -1:
                # Uh oh, there was a problem.
                sys.exit(1)

            # Add lockfile to self.keep_fds.
            self.keep_fds.append(lockfile.fileno())

            # Close all file descriptors, except the ones mentioned in self.keep_fds.
            devnull = "/dev/null"
            if hasattr(os, "devnull"):
                # Python has set os.devnull on this system, use it instead as it might be different
                # than /dev/null.
                devnull = os.devnull

            if self.auto_close_fds:
                for fd in range(3, resource.getrlimit(resource.RLIMIT_NOFILE)[0]):
                    if fd not in self.keep_fds:
                        try:
                            os.close(fd)
                        except OSError:
                            pass

            devnull_fd = os.open(devnull, os.O_RDWR)
            os.dup2(devnull_fd, 0)
            os.dup2(devnull_fd, 1)
            os.dup2(devnull_fd, 2)
            os.close(devnull_fd)

        if self.logger is None:
            # Initialize logging.
            self.logger = logging.getLogger(self.app)
            self.logger.setLevel(logging.DEBUG)
            # Display log messages only on defined handlers.
            self.logger.propagate = False

            # Initialize syslog.
            # It will correctly work on OS X, Linux and FreeBSD.
            if sys.platform == "darwin":
                syslog_address = "/var/run/syslog"
            else:
                syslog_address = "/dev/log"

            # We will continue with syslog initialization only if actually have such capabilities
            # on the machine we are running this.
            if os.path.exists(syslog_address):
                syslog = handlers.SysLogHandler(syslog_address)
                if self.verbose:
                    syslog.setLevel(logging.DEBUG)
                else:
                    syslog.setLevel(logging.INFO)
                # Try to mimic to normal syslog messages.
                formatter = logging.Formatter("%(asctime)s %(name)s: %(message)s",
                                              "%b %e %H:%M:%S")
                syslog.setFormatter(formatter)

                self.logger.addHandler(syslog)

        # Set umask to default to safe file permissions when running as a root daemon. 027 is an
        # octal number which we are typing as 0o27 for Python3 compatibility.
        os.umask(0o27)

        # Change to a known directory. If this isn't done, starting a daemon in a subdirectory that
        # needs to be deleted results in "directory busy" errors.
        os.chdir(self.chdir)

        # Execute privileged action
        privileged_action_result = self.privileged_action()
        if not privileged_action_result:
            privileged_action_result = []

        # Change owner of pid file, it's required because pid file will be removed at exit.
        uid, gid = -1, -1

        if self.group:
            try:
                gid = grp.getgrnam(self.group).gr_gid
            except KeyError:
                self.logger.error("Group {0} not found".format(self.group))
                sys.exit(1)

        if self.user:
            try:
                uid = pwd.getpwnam(self.user).pw_uid
            except KeyError:
                self.logger.error("User {0} not found.".format(self.user))
                sys.exit(1)

        if uid != -1 or gid != -1:
            os.chown(self.pid, uid, gid)

        # Change gid
        if self.group:
            try:
                os.setgid(gid)
            except OSError:
                self.logger.error("Unable to change gid.")
                sys.exit(1)

        # Change uid
        if self.user:
            try:
                uid = pwd.getpwnam(self.user).pw_uid
            except KeyError:
                self.logger.error("User {0} not found.".format(self.user))
                sys.exit(1)
            try:
                os.setuid(uid)
            except OSError:
                self.logger.error("Unable to change uid.")
                sys.exit(1)

        try:
            lockfile.write("%s" % (os.getpid()))
            lockfile.flush()
        except IOError:
            self.logger.error("Unable to write pid to the pidfile.")
            print("Unable to write pid to the pidfile.")
            sys.exit(1)

        # Set custom action on SIGTERM.
        signal.signal(signal.SIGTERM, self.sigterm)
        atexit.register(self.exit)

        self.logger.warn("Starting daemon.")

        try:
            self.action(*privileged_action_result)
        except Exception:
            for line in traceback.format_exc().split("\n"):
                self.logger.error(line)

Example 35

Project: joerd
Source File: download.py
View license
@contextmanager
def get(url, options={}):
    """
    Download a file to a temporary directory, returning it.

    The options provided will control the behaviour of the download algorithm.

      * 'tries' - The maximum number of tries to download the file before
        giving up and raising an exception.
      * 'timeout' - Timeout in seconds before considering the connection to
        have failed.
      * 'verifier' - A function which is called with a filelike object. It
        should return True if the file is okay and appears to be fully
        downloaded.
    """
    logger = logging.getLogger('download')

    with closing(tempfile.NamedTemporaryFile()) as tmp:
        # current file position = number of bytes read
        filepos = 0

        # file size when downloaded, if known
        filesize = None

        # number of attempts so far
        tries = 0

        # last try which resulted in some forward progress (i.e: filepos got
        # bigger)
        last_successful_try = 0

        # maximum number of attempts to make
        max_tries = options.get('tries', 1)

        # timeout for blocking operations (e.g: connect) in seconds
        timeout = options.get('timeout', 60)

        # verifier function
        verifier = options.get('verifier')

        # backoff function - to delay between retries
        backoff = options.get('backoff')

        # whether the server supports Range headers (if it doesn't we'll have
        # to restart from the beginning every time).
        accept_range = False

        # we need to download _something_ if the file position is less than the
        # known size, or the size is unknown.
        while filesize is None or filepos < filesize:
            # explode if we've exceeded the number of allowed attempts
            if tries >= max_tries:
                raise DownloadFailedError("Max tries exceeded (%d) while "
                                          "downloading file %r"
                                          % (max_tries, url))
            else:
                if backoff and tries > last_successful_try:
                    backoff(tries - last_successful_try)
                tries += 1

            req = urllib2.Request(url)

            # if the server supports accept range, and we have a partial
            # download then attemp to resume it.
            if accept_range and filepos > 0:
                logger.info("Continuing (try %d/%d) at %d bytes: %r"
                            % (tries, max_tries, filepos, url))
                assert filesize is not None
                req.headers['Range'] = 'bytes=%s-%s' % (filepos, filesize - 1)
            else:
                # otherwise, truncate the file in readiness to download from
                # scratch.
                logger.info("Downloading (try %d/%d) %r"
                            % (tries, max_tries, url))
                filepos = 0
                tmp.seek(0, os.SEEK_SET)
                tmp.truncate(0)

            try:
                f = urllib2.urlopen(req, timeout=timeout)

                # try to get the filesize, if the server reports it.
                if filesize is None:
                    content_length = f.info().get('Content-Length')
                    if content_length is not None:
                        try:
                            filesize = int(content_length)
                        except ValueError:
                            pass

                # detect whether the server accepts Range requests.
                accept_range = f.info().get('Accept-Ranges') == 'bytes'

                # copy data from the server
                shutil.copyfileobj(f, tmp)

            except (IOError, httplib.HTTPException) as e:
                logger.debug("Got HTTP error: %s" % str(e))
                continue

            except ftplib.all_errors as e:
                logger.debug("Got FTP error: %s" % str(e))
                continue

            except socket.timeout as e:
                logger.debug("Got socket timeout: %s" % str(e))
                continue

            # update number of bytes read (this would be nicer if copyfileobj
            # returned it.
            old_filepos = filepos
            filepos = tmp.tell()
            if filepos > old_filepos:
                last_successful_try = tries

            # if we don't know how large the file is supposed to be, then
            # verify it every time.
            if filesize is None and verifier is not None:
                # reset tmp file to beginning for verification
                tmp.seek(0, os.SEEK_SET)
                if verifier(tmp):
                    break
                # no need to reset here - since filesize is none, then we'll be
                # downloading from scratch, which will truncate the file.

        # verify the file, if it hasn't been verified before
        if filesize is not None and verifier is not None:
            # reset tmp file to beginning for verification
            tmp.seek(0, os.SEEK_SET)
            if not verifier(tmp):
                raise DownloadFailedError("File downloaded from %r failed "
                                          "verification" % url)

        tmp.seek(0, os.SEEK_SET)
        yield tmp

Example 36

Project: joerd
Source File: normal.py
View license
    def render(self, tmp_dir):
        logger = logging.getLogger('normal')

        bbox = self._mercator_bbox

        mid_dir = os.path.join(tmp_dir, self.output_dir,
                               str(self.z), str(self.x))
        mkdir_p(mid_dir)

        tile = self.tile_name()
        tile_file = os.path.join(tmp_dir, self.output_dir,
                                 tile + ".png")
        logger.debug("Generating tile %r..." % tile)

        filter_size = 10

        outfile = tile_file
        dst_bbox = bbox.bounds
        dst_x_size = 256
        dst_y_size = 256
        dst_x_res = float(dst_bbox[2] - dst_bbox[0]) / dst_x_size
        dst_y_res = float(dst_bbox[3] - dst_bbox[1]) / dst_y_size
        dst_srs = osr.SpatialReference()
        dst_srs.ImportFromEPSG(3857)

        # expand bbox & image to generate "bleed" for image filter
        mid_min_x = dst_bbox[0] - filter_size * dst_x_res
        mid_min_y = dst_bbox[1] - filter_size * dst_y_res
        mid_max_x = dst_bbox[2] + filter_size * dst_x_res
        mid_max_y = dst_bbox[3] + filter_size * dst_y_res
        filter_top_margin = filter_size
        filter_bot_margin = filter_size
        filter_lft_margin = filter_size
        filter_rgt_margin = filter_size

        # clip bounding box back to the edges of the world. GDAL can handle
        # wrapping around the world, but it doesn't give the results that
        # would be expected.
        if mid_min_x < -0.5 * mercator.MERCATOR_WORLD_SIZE:
            filter_lft_margin = 0
            mid_min_x = dst_bbox[0]
        if mid_min_y < -0.5 * mercator.MERCATOR_WORLD_SIZE:
            filter_bot_margin = 0
            mid_min_y = dst_bbox[1]
        if mid_max_x > 0.5 * mercator.MERCATOR_WORLD_SIZE:
            filter_rgt_margin = 0
            mid_max_x = dst_bbox[2]
        if mid_max_y > 0.5 * mercator.MERCATOR_WORLD_SIZE:
            filter_top_margin = 0
            mid_max_y = dst_bbox[3]

        mid_x_size = dst_x_size + filter_lft_margin + filter_rgt_margin
        mid_y_size = dst_y_size + filter_bot_margin + filter_top_margin
        mid_bbox = (mid_min_x, mid_min_y, mid_max_x, mid_max_y)

        mid_drv = gdal.GetDriverByName("MEM")
        mid_ds = mid_drv.Create('', mid_x_size, mid_y_size, 1, gdal.GDT_Float32)

        mid_gt = (mid_bbox[0], dst_x_res, 0,
                  mid_bbox[3], 0, -dst_y_res)
        mid_ds.SetGeoTransform(mid_gt)
        mid_ds.SetProjection(dst_srs.ExportToWkt())
        mid_ds.GetRasterBand(1).SetNoDataValue(mercator.FLT_NODATA)

        # figure out what the approximate scale of the output image is in
        # lat/lon coordinates. this is used to select the appropriate filter.
        ll_bbox = self._latlon_bbox
        ll_x_res = float(ll_bbox.bounds[2] - ll_bbox.bounds[0]) / dst_x_size
        ll_y_res = float(ll_bbox.bounds[3] - ll_bbox.bounds[1]) / dst_y_size

        # calculate the resolution of a pixel in real meters for both x and y.
        # this will be used to scale the gradient so that it's consistent
        # across zoom levels.
        ll_mid_x = 0.5 * (ll_bbox.bounds[2] + ll_bbox.bounds[0])
        ll_spc_x = 0.5 * (ll_bbox.bounds[2] - ll_bbox.bounds[0]) / dst_x_size
        ll_mid_y = 0.5 * (ll_bbox.bounds[3] + ll_bbox.bounds[1])
        ll_spc_y = 0.5 * (ll_bbox.bounds[3] - ll_bbox.bounds[1]) / dst_y_size
        geod = Geodesic.WGS84
        # NOTE: in defiance of predictability and regularity, the geod methods
        # take input as (lat, lon) in that order, rather than (x, y) as would
        # be sensible.
        # NOTE: at low zooms, taking the width across the tile starts to break
        # down, so we take the width across a small portion of the interior of
        # the tile instead.
        geodesic_res_x = -1.0 / \
                         geod.Inverse(ll_mid_y, ll_mid_x - ll_spc_x,
                                      ll_mid_y, ll_mid_x + ll_spc_x)['s12']
        geodesic_res_y = 1.0 / \
                         geod.Inverse(ll_mid_y - ll_spc_y, ll_mid_x,
                                      ll_mid_y + ll_spc_y, ll_mid_x)['s12']

        composite.compose(self, mid_ds, logger, min(ll_x_res, ll_y_res))

        pixels = mid_ds.GetRasterBand(1).ReadAsArray(0, 0, mid_x_size, mid_y_size)
        ygrad, xgrad = numpy.gradient(pixels, 2)
        img = numpy.dstack((geodesic_res_x * xgrad, geodesic_res_y * ygrad,
                            numpy.ones((mid_y_size, mid_x_size))))

        # first, we normalise to unit vectors. this puts each element of img
        # in the range (-1, 1). the "einsum" stuff is serious black magic, but
        # what it (should be) saying is "for each i,j in the rows and columns,
        # the output is the sum of img[i,j,k]*img[i,j,k]" - i.e: the square.
        norm = numpy.sqrt(numpy.einsum('ijk,ijk->ij', img, img))

        # the norm is now the "wrong shape" according to numpy, so we need to
        # copy the norm value out into RGB components.
        norm_copy = norm[:, :, numpy.newaxis]

        # dividing the img by norm_copy should give us RGB components with
        # values between -1 and 1, but we need values between 0 and 255 for
        # PNG channels. so we move and scale the values to fit in that range.
        scaled = (128.0 * (img / norm_copy + 1.0))

        # and finally clip it to (0, 255) just in case
        img = numpy.clip(scaled, 0.0, 255.0)

        # Create output as a 4-channel RGBA image, each (byte) channel
        # corresponds to x, y, z, h where x, y and z are the respective
        # components of the normal, and h is an index into a hypsometric tint
        # table (see HEIGHT_TABLE).
        dst_ds = mid_drv.Create('', dst_x_size, dst_y_size, 4, gdal.GDT_Byte)

        dst_gt = (dst_bbox[0], dst_x_res, 0,
                  dst_bbox[3], 0, -dst_y_res)
        dst_ds.SetGeoTransform(dst_gt)
        dst_ds.SetProjection(dst_srs.ExportToWkt())

        # apply the height mapping function to get the table index.
        func = numpy.vectorize(_height_mapping_func)
        hyps = func(pixels).astype(numpy.uint8)

        # extract the area without the "bleed" margin.
        ext = img[filter_top_margin:(filter_top_margin+dst_y_size), \
                  filter_lft_margin:(filter_lft_margin+dst_x_size)]
        dst_ds.GetRasterBand(1).WriteArray(ext[...,0].astype(numpy.uint8))
        dst_ds.GetRasterBand(2).WriteArray(ext[...,1].astype(numpy.uint8))
        dst_ds.GetRasterBand(3).WriteArray(ext[...,2].astype(numpy.uint8))

        # add hypsometric tint index as alpha channel
        dst_ds.GetRasterBand(4).WriteArray(
            hyps[filter_top_margin:(filter_top_margin+dst_y_size),
                 filter_lft_margin:(filter_lft_margin+dst_x_size)])

        png_drv = gdal.GetDriverByName("PNG")
        png_ds = png_drv.CreateCopy(tile_file, dst_ds)

        # explicitly delete the datasources. the Python-GDAL docs suggest that
        # this is a good idea not only to dispose of memory buffers but also
        # to ensure that the backing file handles are closed.
        del png_ds
        del dst_ds
        del mid_ds

        assert os.path.isfile(tile_file)

        source_names = [type(s).__name__ for s in self.sources]
        logger.info("Done generating tile %r from %s"
                    % (tile, ", ".join(source_names)))

Example 37

Project: aamporter
Source File: aamporter.py
View license
def main():
    usage = """

%prog [options] path/to/plist [path/to/more/plists..]
%prog --build-product-plist [path/to/CCP/pkg/file.ccp] [--munki-update-for BaseProductPkginfoName]

The first form will check and cache updates for the channels listed in the product plists
given as arguments.

The second form will generate a product plist containing all channel IDs contained in the
installer metadata. Accepts either a path to a .cpp file (from Creative Cloud Packager) or
a mounted ESD volume path for CS6-and-earlier installers.

See %prog --help for more options and the README for more detail."""

    o = optparse.OptionParser(usage=usage)
    o.add_option("-l", "--platform", type='choice', choices=['mac', 'win'], default='mac',
        help="Download Adobe updates for Mac or Windows. Available options are 'mac' or 'win', defaults to 'mac'.")
    o.add_option("-m", "--munkiimport", action="store_true", default=False,
        help="Process downloaded updates with munkiimport using options defined in %s." % os.path.basename(settings_plist))
    o.add_option("-r", "--include-revoked", action="store_true", default=False,
        help="Include updates that have been marked as revoked in Adobe's feed XML.")
    o.add_option("--skip-cc", action="store_true", default=False,
        help=("Skip updates for Creative Cloud updates. Useful for certain updates for "
              "CS-era applications that incorporate CC subscription updates."))
    o.add_option("-f", "--force-import", action="store_true", default=False,
        help="Run munkiimport even if it finds an identical pkginfo and installer_item_hash in the repo.")
    o.add_option("-c", "--make-catalogs", action="store_true", default=False,
        help="Automatically run makecatalogs after importing into Munki.")
    o.add_option("-p", "--product-plist", "--plist", action="append", default=[],
        help="Deprecated option for specifying product plists, kept for compatibility. Instead, pass plist paths \
as arguments.")
    o.add_option("-b", "--build-product-plist", action="store",
        help="Given a path to either a mounted Adobe product ESD installer or a .ccp file from a package built with CCP, \
save a product plist containing every Channel ID found for the product. Plist is saved to the current working directory.")
    o.add_option("-u", "--munki-update-for", action="store",
        help="To be used with the --build-product-plist option, specifies the base Munki product.")
    o.add_option("-v", "--verbose", action="count", default=0,
        help="Output verbosity. Can be specified either '-v' or '-vv'.")
    o.add_option("--no-colors", action="store_true", default=False,
        help="Disable colored ANSI output.")
    o.add_option("--no-progressbar", action="store_true", default=False,
        help="Disable the progress indicator.")

    opts, args = o.parse_args()

    # setup logging
    global L
    L = logging.getLogger('com.github.aamporter')
    log_stdout_handler = logging.StreamHandler(stream=sys.stdout)
    log_stdout_handler.setFormatter(ColorFormatter(
        use_color=not opts.no_colors))
    L.addHandler(log_stdout_handler)
    # INFO is level 30, so each verbose option count lowers level by 10
    L.setLevel(INFO - (10 * opts.verbose))

    # arg/opt processing
    if len(sys.argv) == 1:
        o.print_usage()
        sys.exit(0)

    # any args we just pass through to the "legacy" --product-plist/--plist options
    if args:
        opts.product_plist.extend(args)
    if opts.munki_update_for and not opts.build_product_plist:
        errorExit("--munki-update-for requires the --build-product-plist option!")
    if not opts.build_product_plist and not opts.product_plist:
        errorExit("One of --product-plist or --build-product-plist must be specified!")
    if opts.platform == 'win' and opts.munkiimport:
        errorExit("Cannot use the --munkiimport option with --platform win option!")

    if opts.build_product_plist:
        esd_path = opts.build_product_plist
        if esd_path.endswith('/'):
            esd_path = esd_path[0:-1]
        plist = buildProductPlist(esd_path, opts.munki_update_for)
        if not plist:
            errorExit("Couldn't build payloads from path %s." % esd_path)
        else:
            if opts.munki_update_for:
                output_plist_name = opts.munki_update_for
            else:
                output_plist_name = os.path.basename(esd_path.replace(' ', ''))
            output_plist_name += '.plist'
            output_plist_file = os.path.join(os.getcwd(), output_plist_name)
            if os.path.exists(output_plist_file):
                errorExit("A file already exists at %s, not going to overwrite." %
                    output_plist_file)
            try:
                plistlib.writePlist(plist, output_plist_file)
            except:
                errorExit("Error writing plist to %s" % output_plist_file)
            print "Product plist written to %s" % output_plist_file
            sys.exit(0)

    # munki sanity checks
    if opts.munkiimport:
        if not os.path.exists('/usr/local/munki'):
            errorExit("No Munki installation could be found. Get it at http://code.google.com/p/munki")
        sys.path.insert(0, MUNKI_DIR)
        munkiimport_prefs = os.path.expanduser('~/Library/Preferences/com.googlecode.munki.munkiimport.plist')
        if pref('munki_tool') == 'munkiimport':
            if not os.path.exists(munkiimport_prefs):
                errorExit("Your Munki repo seems to not be configured. Run munkiimport --configure first.")
            try:
                import imp
                # munkiimport doesn't end in .py, so we use imp to make it available to the import system
                imp.load_source('munkiimport', os.path.join(MUNKI_DIR, 'munkiimport'))
                import munkiimport
                munkiimport.REPO_PATH = munkiimport.pref('repo_path')
            except ImportError:
                errorExit("There was an error importing munkilib, which is needed for --munkiimport functionality.")

            # rewrite some of munkiimport's function names since they were changed to
            # snake case around 2.6.1:
            # https://github.com/munki/munki/commit/e3948104e869a6a5eb6b440559f4c57144922e71
            try:
                munkiimport.repoAvailable()
            except AttributeError:
                munkiimport.repoAvailable = munkiimport.repo_available
                munkiimport.makePkgInfo = munkiimport.make_pkginfo
                munkiimport.findMatchingPkginfo = munkiimport.find_matching_pkginfo
                munkiimport.makeCatalogs = munkiimport.make_catalogs
            if not munkiimport.repoAvailable():
                errorExit("The Munki repo cannot be located. This tool is not interactive; first ensure the repo is mounted.")

    # set up the cache path
    local_cache_path = pref('local_cache_path')
    if os.path.exists(local_cache_path) and not os.path.isdir(local_cache_path):
        errorExit("Local cache path %s was specified and exists, but it is not a directory!" %
            local_cache_path)
    elif not os.path.exists(local_cache_path):
        try:
            os.mkdir(local_cache_path)
        except OSError:
            errorExit("Local cache path %s could not be created. Verify permissions." %
                local_cache_path)
        except:
            errorExit("Unknown error creating local cache path %s." % local_cache_path)
    try:
        os.access(local_cache_path, os.W_OK)
    except:
        errorExit("Cannot write to local cache path!" % local_cache_path)

    # load our product plists
    product_plists = []
    for plist_path in opts.product_plist:
        try:
            plist = plistlib.readPlist(plist_path)
        except:
            errorExit("Couldn't read plist at %s!" % plist_path)
        if 'channels' not in plist.keys():
            errorExit("Plist at %s is missing a 'channels' array, which is required." % plist_path)
        else:
            product_plists.append(plist)

    # sanity-check the settings plist for unknown keys
    if os.path.exists(settings_plist):
        try:
            app_options = plistlib.readPlist(settings_plist)
        except:
            errorExit("There was an error loading the settings plist at %s" % settings_plist)
        for k in app_options.keys():
            if k not in supported_settings_keys:
                print "Warning: Unknown setting in %s: %s" % (os.path.basename(settings_plist), k)

    L.log(INFO, "Starting aamporter run..")
    if opts.munkiimport:
        L.log(INFO, "Will import into Munki (--munkiimport option given).")

    L.log(DEBUG, "aamporter preferences:")
    for key in supported_settings_keys:
        L.log(DEBUG, " - {0}: {1}".format(key, pref(key)))

    if (sys.version_info.minor, sys.version_info.micro) == (7, 10):
        global NONSSL_ADOBE_URL
        NONSSL_ADOBE_URL = True
        L.log(VERBOSE, ("Python 2.7.10 detected, using HTTP feed URLs to work "
                        "around SSL issues."))

    # pull feed info and populate channels
    L.log(INFO, "Retrieving feed data..")
    feed = getFeedData(opts.platform)
    parsed = parseFeedData(feed)
    channels = getChannelsFromProductPlists(product_plists)
    L.log(INFO, "Processing the following Channel IDs:")
    [ L.log(INFO, "  - %s" % channel) for channel in sorted(channels) ]

    # begin caching run and build updates dictionary with product/version info
    updates = {}
    for channelid in channels.keys():
        L.log(VERBOSE, "Getting updates for Channel ID %s.." % channelid)
        channel_updates = getUpdatesForChannel(channelid, parsed)
        if not channel_updates:
            L.log(DEBUG, "No updates for channel %s" % channelid)
            continue
        channel_updates = addUpdatesXML(channel_updates, opts.platform, skipTargetLicensingCC=opts.skip_cc)

        for update in channel_updates:
            L.log(VERBOSE, "Considering update %s, %s.." % (update.product, update.version))

            if opts.include_revoked is False:
                highest_version = getHighestVersionOfProduct(channel_updates, update.product)
                if update.version != highest_version:
                    L.log(DEBUG, "%s is not the highest version available (%s) for this update. Skipping.." % (
                        update.version, highest_version))
                    continue

                if updateIsRevoked(update.channel, update.product, update.version, parsed):
                    L.log(DEBUG, "Update is revoked. Skipping update.")
                    continue

                file_element = update.xml.find('InstallFiles/File')
                if file_element is None:
                    L.log(DEBUG, "No File XML element found. Skipping update.")
                else:
                    filename = file_element.find('Name').text
                    update_bytes = file_element.find('Size').text
                    description = update.xml.find('Description/en_US').text
                    display_name = update.xml.find('DisplayName/en_US').text

                    if not update.product in updates.keys():
                        updates[update.product] = {}
                    if not update.version in updates[update.product].keys():
                        updates[update.product][update.version] = {}
                        updates[update.product][update.version]['channel_ids'] = []
                        updates[update.product][update.version]['update_for'] = []
                    updates[update.product][update.version]['channel_ids'].append(update.channel)
                    for opt in ['munki_repo_destination_path',
                                'munki_update_for',
                                'makepkginfo_options']:
                        if opt in channels[update.channel].keys():
                            updates[update.product][update.version][opt] = channels[update.channel][opt]
                    updates[update.product][update.version]['description'] = description
                    updates[update.product][update.version]['display_name'] = display_name
                    dmg_url = urljoin(getURL('updates'), UPDATE_PATH_PREFIX + opts.platform) + \
                            '/%s/%s/%s' % (update.product, update.version, filename)
                    output_filename = os.path.join(local_cache_path, "%s-%s.%s" % (
                            update.product, update.version, 'dmg' if opts.platform == 'mac' else 'zip'))
                    updates[update.product][update.version]['local_path'] = output_filename
                    need_to_dl = True
                    if os.path.exists(output_filename):
                        we_have_bytes = os.stat(output_filename).st_size
                        if we_have_bytes == int(update_bytes):
                            L.log(INFO, "Skipping download of %s %s, it is already cached."
                                % (update.product, update.version))
                            need_to_dl = False
                        else:
                            L.log(VERBOSE, "Incomplete download (%s bytes on disk, should be %s), re-starting." % (
                                we_have_bytes, update_bytes))
                    if need_to_dl:
                        L.log(INFO, "Downloading %s %s (%s bytes) to %s" % (update.product, update.version, update_bytes, output_filename))
                        if opts.no_progressbar:
                            urllib.urlretrieve(dmg_url, output_filename)
                        else:
                            urllib.urlretrieve(dmg_url, output_filename, reporthook)

    L.log(INFO, "Done caching updates.")

    # begin munkiimport run
    if opts.munkiimport:
        L.log(INFO, "Beginning Munki imports..")
        for (update_name, update_meta) in updates.items():
            for (version_name, version_meta) in update_meta.items():
                need_to_import = True
                item_name = "%s%s" % (update_name.replace('-', '_'),
                    pref('munki_pkginfo_name_suffix'))
                # Do 'exists in repo' checks if we're not forcing imports
                if opts.force_import is False and pref("munki_tool") == "munkiimport":
                    pkginfo = munkiimport.makePkgInfo(['--name',
                                            item_name,
                                            version_meta['local_path']],
                                            False)
                    # Cribbed from munkiimport
                    L.log(VERBOSE, "Looking for a matching pkginfo for %s %s.." % (
                        item_name, version_name))
                    matchingpkginfo = munkiimport.findMatchingPkginfo(pkginfo)
                    if matchingpkginfo:
                        L.log(VERBOSE, "Got a matching pkginfo.")
                        if ('installer_item_hash' in matchingpkginfo and
                            matchingpkginfo['installer_item_hash'] ==
                            pkginfo.get('installer_item_hash')):
                            need_to_import = False
                            L.log(INFO,
                                ("We have an exact match for %s %s in the repo. Skipping.." % (
                                    item_name, version_name)))
                    else:
                        need_to_import = True

                if need_to_import:
                    munkiimport_opts = pref('munkiimport_options')[:]
                    if pref("munki_tool") == 'munkiimport':
                        if 'munki_repo_destination_path' in version_meta.keys():
                            subdir = version_meta['munki_repo_destination_path']
                        else:
                            subdir = pref('munki_repo_destination_path')
                        munkiimport_opts.append('--subdirectory')
                        munkiimport_opts.append(subdir)
                    if not version_meta['munki_update_for']:
                        L.log(WARNING,
                            "Warning: {0} does not have an 'update_for' key "
                            "specified in the product plist!".format(item_name))
                        update_catalogs = []
                    else:
                        # handle case of munki_update_for being either a list or a string
                        flatten = lambda *n: (e for a in n
                            for e in (flatten(*a) if isinstance(a, (tuple, list)) else (a,)))
                        update_catalogs = list(flatten(version_meta['munki_update_for']))
                        for base_product in update_catalogs:
                            munkiimport_opts.append('--update_for')
                            munkiimport_opts.append(base_product)
                    munkiimport_opts.extend(['--name', item_name,
                                             '--displayname', version_meta['display_name'],
                                             '--description', version_meta['description']])

                    if 'makepkginfo_options' in version_meta:
                        L.log(VERBOSE,
                            "Appending makepkginfo options: %s" %
                            " ".join(version_meta['makepkginfo_options']))
                        munkiimport_opts += version_meta['makepkginfo_options']

                    if pref('munki_tool') == 'munkiimport':
                        import_cmd = ['/usr/local/munki/munkiimport', '--nointeractive']
                    elif pref('munki_tool') == 'makepkginfo':
                        import_cmd = ['/usr/local/munki/makepkginfo']
                    else:
                        # TODO: validate this pref earlier
                        L.log(ERROR, "Not sure what tool you wanted to use; munki_tool should be 'munkiimport' " + \
                        "or 'makepkginfo' but we got '%s'.  Skipping import." % (pref('munki_tool')))
                        break
                    # Load our app munkiimport options overrides last
                    import_cmd += munkiimport_opts
                    import_cmd.append(version_meta['local_path'])

                    L.log(INFO, "Importing {0} {1} into Munki. Update for: {2}".format(
                        item_name, version_name, ', '.join(update_catalogs)))
                    L.log(VERBOSE, "Calling %s on %s version %s, file %s." % (
                        pref('munki_tool'),
                        update_name,
                        version_name,
                        version_meta['local_path']))
                    munkiprocess = subprocess.Popen(import_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                    # wait for the process to terminate
                    stdout, stderr = munkiprocess.communicate()
                    import_retcode = munkiprocess.returncode
                    if import_retcode:
                        L.log(ERROR, "munkiimport returned an error. Skipping update..")
                    else:
                        if pref('munki_tool') == 'makepkginfo':
                            plist_path = os.path.splitext(version_meta['local_path'])[0] + ".plist"
                            with open(plist_path, "w") as plist:
                                plist.write(stdout)
                                L.log(INFO, "pkginfo written to %s" % plist_path)


        L.log(INFO, "Done Munki imports.")
        if opts.make_catalogs:
            munkiimport.makeCatalogs()

Example 38

Project: Fiona
Source File: collect.py
View license
@click.command(short_help="Collect a sequence of features.")
@cligj.precision_opt
@cligj.indent_opt
@cligj.compact_opt
@click.option('--record-buffered/--no-record-buffered', default=False,
              help="Economical buffering of writes at record, not collection "
              "(default), level.")
@click.option('--ignore-errors/--no-ignore-errors', default=False,
              help="log errors but do not stop serialization.")
@options.src_crs_opt
@click.option('--with-ld-context/--without-ld-context', default=False,
              help="add a JSON-LD context to JSON output.")
@click.option('--add-ld-context-item', multiple=True,
              help="map a term to a URI and add it to the output's JSON LD "
                   "context.")
@click.option('--parse/--no-parse', default=True,
              help="load and dump the geojson feature (default is True)")
@click.pass_context
def collect(ctx, precision, indent, compact, record_buffered, ignore_errors,
            src_crs, with_ld_context, add_ld_context_item, parse):
    """Make a GeoJSON feature collection from a sequence of GeoJSON
    features and print it."""
    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
    logger = logging.getLogger('fio')
    stdin = click.get_text_stream('stdin')
    sink = click.get_text_stream('stdout')

    dump_kwds = {'sort_keys': True}
    if indent:
        dump_kwds['indent'] = indent
    if compact:
        dump_kwds['separators'] = (',', ':')
    item_sep = compact and ',' or ', '

    if src_crs:
        if not parse:
            raise click.UsageError("Can't specify --src-crs with --no-parse")
        transformer = partial(transform_geom, src_crs, 'EPSG:4326',
                              antimeridian_cutting=True, precision=precision)
    else:
        transformer = lambda x: x

    first_line = next(stdin)

    # If parsing geojson
    if parse:
        # If input is RS-delimited JSON sequence.
        if first_line.startswith(u'\x1e'):
            def feature_text_gen():
                buffer = first_line.strip(u'\x1e')
                for line in stdin:
                    if line.startswith(u'\x1e'):
                        if buffer:
                            feat = json.loads(buffer)
                            feat['geometry'] = transformer(feat['geometry'])
                            yield json.dumps(feat, **dump_kwds)
                        buffer = line.strip(u'\x1e')
                    else:
                        buffer += line
                else:
                    feat = json.loads(buffer)
                    feat['geometry'] = transformer(feat['geometry'])
                    yield json.dumps(feat, **dump_kwds)
        else:
            def feature_text_gen():
                feat = json.loads(first_line)
                feat['geometry'] = transformer(feat['geometry'])
                yield json.dumps(feat, **dump_kwds)

                for line in stdin:
                    feat = json.loads(line)
                    feat['geometry'] = transformer(feat['geometry'])
                    yield json.dumps(feat, **dump_kwds)

    # If *not* parsing geojson
    else:
        # If input is RS-delimited JSON sequence.
        if first_line.startswith(u'\x1e'):
            def feature_text_gen():
                buffer = first_line.strip(u'\x1e')
                for line in stdin:
                    if line.startswith(u'\x1e'):
                        if buffer:
                            yield buffer
                        buffer = line.strip(u'\x1e')
                    else:
                        buffer += line
                else:
                    yield buffer
        else:
            def feature_text_gen():
                yield first_line
                for line in stdin:
                    yield line

    try:
        source = feature_text_gen()

        if record_buffered:
            # Buffer GeoJSON data at the feature level for smaller
            # memory footprint.
            indented = bool(indent)
            rec_indent = "\n" + " " * (2 * (indent or 0))

            collection = {
                'type': 'FeatureCollection',
                'features': []}
            if with_ld_context:
                collection['@context'] = helpers.make_ld_context(
                    add_ld_context_item)

            head, tail = json.dumps(collection, **dump_kwds).split('[]')

            sink.write(head)
            sink.write("[")

            # Try the first record.
            try:
                i, first = 0, next(source)
                if with_ld_context:
                    first = helpers.id_record(first)
                if indented:
                    sink.write(rec_indent)
                sink.write(first.replace("\n", rec_indent))
            except StopIteration:
                pass
            except Exception as exc:
                # Ignoring errors is *not* the default.
                if ignore_errors:
                    logger.error(
                        "failed to serialize file record %d (%s), "
                        "continuing",
                        i, exc)
                else:
                    # Log error and close up the GeoJSON, leaving it
                    # more or less valid no matter what happens above.
                    logger.critical(
                        "failed to serialize file record %d (%s), "
                        "quiting",
                        i, exc)
                    sink.write("]")
                    sink.write(tail)
                    if indented:
                        sink.write("\n")
                    raise

            # Because trailing commas aren't valid in JSON arrays
            # we'll write the item separator before each of the
            # remaining features.
            for i, rec in enumerate(source, 1):
                try:
                    if with_ld_context:
                        rec = helpers.id_record(rec)
                    if indented:
                        sink.write(rec_indent)
                    sink.write(item_sep)
                    sink.write(rec.replace("\n", rec_indent))
                except Exception as exc:
                    if ignore_errors:
                        logger.error(
                            "failed to serialize file record %d (%s), "
                            "continuing",
                            i, exc)
                    else:
                        logger.critical(
                            "failed to serialize file record %d (%s), "
                            "quiting",
                            i, exc)
                        sink.write("]")
                        sink.write(tail)
                        if indented:
                            sink.write("\n")
                        raise

            # Close up the GeoJSON after writing all features.
            sink.write("]")
            sink.write(tail)
            if indented:
                sink.write("\n")

        else:
            # Buffer GeoJSON data at the collection level. The default.
            collection = {
                'type': 'FeatureCollection',
                'features': []}
            if with_ld_context:
                collection['@context'] = helpers.make_ld_context(
                    add_ld_context_item)

            head, tail = json.dumps(collection, **dump_kwds).split('[]')
            sink.write(head)
            sink.write("[")
            sink.write(",".join(source))
            sink.write("]")
            sink.write(tail)
            sink.write("\n")

    except Exception:
        logger.exception("Exception caught during processing")
        raise click.Abort()

Example 39

Project: Fiona
Source File: dump.py
View license
@click.command(short_help="Dump a dataset to GeoJSON.")
@click.argument('input', type=click.Path(), required=True)
@click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer,
              help="Print information about a specific layer.  The first "
                   "layer is used by default.  Layers use zero-based "
                   "numbering when accessed by index.")
@click.option('--encoding', help="Specify encoding of the input file.")
@cligj.precision_opt
@cligj.indent_opt
@cligj.compact_opt
@click.option('--record-buffered/--no-record-buffered', default=False,
              help="Economical buffering of writes at record, not collection "
                   "(default), level.")
@click.option('--ignore-errors/--no-ignore-errors', default=False,
              help="log errors but do not stop serialization.")
@click.option('--with-ld-context/--without-ld-context', default=False,
              help="add a JSON-LD context to JSON output.")
@click.option('--add-ld-context-item', multiple=True,
              help="map a term to a URI and add it to the output's JSON LD "
                   "context.")
@click.pass_context
def dump(ctx, input, encoding, precision, indent, compact, record_buffered,
         ignore_errors, with_ld_context, add_ld_context_item, layer):

    """Dump a dataset either as a GeoJSON feature collection (the default)
    or a sequence of GeoJSON features."""

    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
    logger = logging.getLogger('fio')
    sink = click.get_text_stream('stdout')

    dump_kwds = {'sort_keys': True}
    if indent:
        dump_kwds['indent'] = indent
    if compact:
        dump_kwds['separators'] = (',', ':')
    item_sep = compact and ',' or ', '

    open_kwds = {}
    if encoding:
        open_kwds['encoding'] = encoding
    if layer:
        open_kwds['layer'] = layer

    def transformer(crs, feat):
        tg = partial(transform_geom, crs, 'EPSG:4326',
                     antimeridian_cutting=True, precision=precision)
        feat['geometry'] = tg(feat['geometry'])
        return feat

    try:
        with fiona.drivers(CPL_DEBUG=verbosity > 2):
            with fiona.open(input, **open_kwds) as source:
                meta = source.meta
                meta['fields'] = dict(source.schema['properties'].items())

                if record_buffered:
                    # Buffer GeoJSON data at the feature level for smaller
                    # memory footprint.
                    indented = bool(indent)
                    rec_indent = "\n" + " " * (2 * (indent or 0))

                    collection = {
                        'type': 'FeatureCollection',
                        'fiona:schema': meta['schema'],
                        'fiona:crs': meta['crs'],
                        'features': []}
                    if with_ld_context:
                        collection['@context'] = helpers.make_ld_context(
                            add_ld_context_item)

                    head, tail = json.dumps(
                        collection, **dump_kwds).split('[]')

                    sink.write(head)
                    sink.write("[")

                    itr = iter(source)

                    # Try the first record.
                    try:
                        i, first = 0, next(itr)
                        first = transformer(first)
                        if with_ld_context:
                            first = helpers.id_record(first)
                        if indented:
                            sink.write(rec_indent)
                        sink.write(json.dumps(
                            first, **dump_kwds).replace("\n", rec_indent))
                    except StopIteration:
                        pass
                    except Exception as exc:
                        # Ignoring errors is *not* the default.
                        if ignore_errors:
                            logger.error(
                                "failed to serialize file record %d (%s), "
                                "continuing",
                                i, exc)
                        else:
                            # Log error and close up the GeoJSON, leaving it
                            # more or less valid no matter what happens above.
                            logger.critical(
                                "failed to serialize file record %d (%s), "
                                "quiting",
                                i, exc)
                            sink.write("]")
                            sink.write(tail)
                            if indented:
                                sink.write("\n")
                            raise

                    # Because trailing commas aren't valid in JSON arrays
                    # we'll write the item separator before each of the
                    # remaining features.
                    for i, rec in enumerate(itr, 1):
                        rec = transformer(rec)
                        try:
                            if with_ld_context:
                                rec = helpers.id_record(rec)
                            if indented:
                                sink.write(rec_indent)
                            sink.write(item_sep)
                            sink.write(json.dumps(
                                rec, **dump_kwds).replace("\n", rec_indent))
                        except Exception as exc:
                            if ignore_errors:
                                logger.error(
                                    "failed to serialize file record %d (%s), "
                                    "continuing",
                                    i, exc)
                            else:
                                logger.critical(
                                    "failed to serialize file record %d (%s), "
                                    "quiting",
                                    i, exc)
                                sink.write("]")
                                sink.write(tail)
                                if indented:
                                    sink.write("\n")
                                raise

                    # Close up the GeoJSON after writing all features.
                    sink.write("]")
                    sink.write(tail)
                    if indented:
                        sink.write("\n")

                else:
                    # Buffer GeoJSON data at the collection level. The default.
                    collection = {
                        'type': 'FeatureCollection',
                        'fiona:schema': meta['schema'],
                        'fiona:crs': meta['crs']}
                    if with_ld_context:
                        collection['@context'] = helpers.make_ld_context(
                            add_ld_context_item)
                        collection['features'] = [
                            helpers.id_record(transformer(rec))
                            for rec in source]
                    else:
                        collection['features'] = [
                            transformer(source.crs, rec) for rec in source]
                    json.dump(collection, sink, **dump_kwds)

    except Exception:
        logger.exception("Exception caught during processing")
        raise click.Abort()

Example 40

Project: udpack
Source File: udpack.py
View license
def main_cli():
    import argparse
    import configparser
    import signal, sys
    
    def sigterm_handler(signal, frame):
        sys.exit(0)
    
    # Parse command line arguments
    parser = argparse.ArgumentParser(description = 'UDPack is a obfuscating UDP '
            'proxy / relay, generally used in pairs to first obfuscate and '
            'then unobfuscate UDP traffic.', 
            epilog = 'remote-addr, listen-addr and packer can also be '
            'specified in the configuration file. Command line arguments '
            'take precedence.')
    
    parser.add_argument('configfile', type=argparse.FileType('r'), help=
            'Configuration file. Contains packer options.')
    parser.add_argument('--verbose', '-v', action='count', help='Increase ' 
        'verbosity level for application debug log. Specify once to see '
        'WARNING, twice to see INFO and thrice for DEBUG.')
    parser.add_argument('--access-log', '-a', help='Access log filename. '
        'Information on connecting clients will be written to this file, in '
        'addition to being printed to the console.')
    parser.add_argument('--remote-addr', '-r', help='Remote host,port to '
        'connect to. Separate host and port with a comma.')
    parser.add_argument('--listen-addr', '-l', help='Local host,port to listen '
        'on. Separate host and port with a comma.')
    parser.add_argument('--packer', '-p', help='Packer used for processing data.')
    
    args = parser.parse_args()
    
    # Set logging
    logger = logging.getLogger('udpack')
    logger.setLevel(logging.DEBUG)
    
    logconsole = logging.StreamHandler()
    logconsoleformatter = logging.Formatter('[%(asctime)s] %(name)-6s '
            '%(levelname)-8s %(message)s')
    logconsole.setFormatter(logconsoleformatter)
    if args.verbose is None:
        logconsole.setLevel(logging.ERROR)
    elif args.verbose == 1:
        logconsole.setLevel(logging.WARNING)
    elif args.verbose == 2:
        logconsole.setLevel(logging.INFO)
    else:
        logconsole.setLevel(logging.DEBUG)
    
    logger.addHandler(logconsole)
    
    logger.debug("Verbosity level set")
    logger.debug("Arguments:")
    logger.debug(args)
    
    accesslogger = logging.getLogger('access')
    accesslogger.setLevel(logging.DEBUG)
    
    accesslogconsole = logging.StreamHandler()
    accesslogfileformatter = logging.Formatter('[%(asctime)s]%(message)s')
    accesslogconsole.setFormatter(accesslogfileformatter)
    accesslogconsole.setLevel(logging.INFO)
    
    accesslogger.addHandler(accesslogconsole)
    
    if args.access_log is not None:
        accesslogfile = logging.FileHandler(args.access_log)
        accesslogfile.setFormatter(accesslogfileformatter)
        accesslogfile.setLevel(logging.INFO)
        accesslogger.addHandler(accesslogfile)
    
    # Read config file
    conffile = configparser.ConfigParser(empty_lines_in_values=False)
    conffile.read_file(args.configfile)
    args.configfile.close()
    
    logger.debug('Config file read')
    
    # Collect options for main application
    local_config = {}
    for c in ('remote_addr', 'listen_addr', 'packer'):
        if vars(args)[c] is not None:
            local_config[c] = vars(args)[c]
        elif c in conffile['udpack']:
            local_config[c] = conffile['udpack'][c]
        else:
            raise RuntimeError('Option {} not specified in either command line '
                'options or config file'.format(c))
    
    for c in ('remote_addr', 'listen_addr'):
        local_config[c] = tuple(local_config[c].split(','))
    
    local_config['connect_timeout'] = conffile.getint('udpack', 
            'connect_timeout', fallback=PACKER_DEFAULT_CONNECT_TIMEOUT)
    
    local_config['idle_timeout'] = conffile.getint('udpack',
            'idle_timeout', fallback=PACKER_DEFAULT_IDLE_TIMEOUT)
    
    try:
        local_config['packer'] = {
            'straightthroughpacker': UDPackStraightThroughPacker,
            'straightthroughunpacker': UDPackStraightThroughUnpacker,
            'shufflepacker': UDPackShufflePacker,
            'shuffleunpacker': UDPackShuffleUnpacker,
            'xorpatchpacker': UDPackXORPatchPacker,
            'xorpatchunpacker': UDPackXORPatchUnpacker,
            'toymodelencryptionpacker': UDPackToyModelEncryptionPacker,
            'toymodelencryptionunpacker': UDPackToyModelEncryptionUnpacker,
            'toymodelencryptiondelaypacker': UDPackToyModelEncryptionDelayPacker,
            'toymodelencryptiondelayunpacker': UDPackToyModelEncryptionDelayUnpacker
            }[local_config['packer'].lower()]
    except KeyError:
        raise RuntimeError('Invalid packer "{}"'.format(local_config['packer']))
    
    
    # Create listening connection
    loop = asyncio.get_event_loop()
    receiver = loop.create_datagram_endpoint(
        lambda: UDPackReceiverProtocol(loop, local_config['remote_addr'], 
                local_config['packer'], conffile,
                local_config['connect_timeout'],
                local_config['idle_timeout']),
        local_addr = local_config['listen_addr'])
    transport, protocol = loop.run_until_complete(receiver)
    
    # Run until interrupt
    try:
        signal.signal(signal.SIGTERM, sigterm_handler)
        while True:
            # Workaround for Python Issue 23057 in Windows
            # https://bugs.python.org/issue23057
            loop.run_until_complete(asyncio.sleep(1))
    except (KeyboardInterrupt, SystemExit) as e:
        logger.info("Received {}".format(repr(e)))
    finally:
        logger.info("Terminating")
        transport.abort()

Example 41

Project: INGInious
Source File: database_updater.py
View license
def update_database(database, gridfs, course_factory, user_manager):
    """
    Checks the database version and update the db if necessary
    :param course_factory: the course factory
    """

    logger = logging.getLogger("inginious.db_update")

    db_version = database.db_version.find_one({})
    if db_version is None:
        db_version = 0
    else:
        db_version = db_version['db_version']

    if db_version < 1:
        logger.info("Updating database to db_version 1")
        # Init the database
        database.submissions.ensure_index([("username", pymongo.ASCENDING)])
        database.submissions.ensure_index([("courseid", pymongo.ASCENDING)])
        database.submissions.ensure_index([("courseid", pymongo.ASCENDING), ("taskid", pymongo.ASCENDING)])
        database.submissions.ensure_index([("submitted_on", pymongo.DESCENDING)])  # sort speed

        database.user_tasks.ensure_index([("username", pymongo.ASCENDING), ("courseid", pymongo.ASCENDING), ("taskid", pymongo.ASCENDING)],
                                         unique=True)
        database.user_tasks.ensure_index([("username", pymongo.ASCENDING), ("courseid", pymongo.ASCENDING)])
        database.user_tasks.ensure_index([("courseid", pymongo.ASCENDING), ("taskid", pymongo.ASCENDING)])
        database.user_tasks.ensure_index([("courseid", pymongo.ASCENDING)])
        database.user_tasks.ensure_index([("username", pymongo.ASCENDING)])

        db_version = 1

    if db_version < 2:
        logger.info("Updating database to db_version 2")
        # Register users that submitted some tasks to the related courses
        data = database.user_tasks.aggregate([{"$group": {"_id": "$courseid", "usernames": {"$addToSet": "$username"}}}])
        for r in list(data):
            try:
                course = course_factory.get_course(r['_id'])
                for u in r['usernames']:
                    user_manager.course_register_user(course, u, force=True)
            except:
                logger.error("There was an error while updating the database. Some users may have been unregistered from the course {}".format(r['_id']))
        db_version = 2

    if db_version < 3:
        logger.info("Updating database to db_version 3")
        # Add the grade for all the old submissions
        database.submissions.update({}, {"$set": {"grade": 0.0}}, multi=True)
        database.submissions.update({"result": "success"}, {"$set": {"grade": 100.0}}, multi=True)
        database.user_tasks.update({}, {"$set": {"grade": 0.0}}, multi=True)
        database.user_tasks.update({"succeeded": True}, {"$set": {"grade": 100.0}}, multi=True)
        db_version = 3

    if db_version < 4:
        logger.info("Updating database to db_version 4")
        submissions = database.submissions.find({"$where": "!Array.isArray(this.username)"})
        for submission in submissions:
            submission["username"] = [submission["username"]]
            database.submissions.save(submission)
        db_version = 4

    if db_version < 5:
        logger.info("Updating database to db_version 5")
        database.drop_collection("users")
        database.submissions.update_many({}, {"$set": {"response_type": "html"}})
        db_version = 5

    if db_version < 6:
        logger.info("Updating database to db_version 6")
        course_list = list(database.registration.aggregate([
            {"$match": {}},
            {
                "$group": {
                    "_id": "$courseid",
                    "students": {"$addToSet": "$username"}
                }
            }]))

        classrooms = {}
        for course in course_list:
            classrooms[course["_id"]] = {"courseid": course["_id"], "groups": [], "description": "Default classroom", "default": True,
                                         "students": course["students"], "tutors": set([])}

        group_list = list(database.groups.find({}, {'_id': 0}))
        for group in group_list:
            classrooms[group["course_id"]]["groups"].append({"size": group["size"], "students": group["users"]})
            classrooms[group["course_id"]]["tutors"] = classrooms[group["course_id"]]["tutors"].union(group["tutors"])

        for i, classroom in classrooms.items():
            classroom["tutors"] = list(classroom["tutors"])
            database.classrooms.insert(classroom)

        database.classrooms.create_index([("students", pymongo.ASCENDING)])
        database.classrooms.create_index([("groups.students", pymongo.ASCENDING)])

        db_version = 6

    if db_version < 7:
        logger.info("Updating database to db_version 7")
        database.submissions.update_many({}, {"$set": {"custom": {}}})
        db_version = 7

    if db_version < 8:
        logger.info("Updating database to db_version 8")
        database.classrooms.rename("aggregations")
        db_version = 8

    if db_version < 9:
        logger.info("Updating database to db_version 9")
        user_tasks = list(database.user_tasks.find())

        for user_task in user_tasks:
            username = user_task['username']
            taskid = user_task['taskid']
            courseid = user_task['courseid']

            tasks = list(database.submissions.find(
                {"username": username, "courseid": courseid, "taskid": taskid},
                projection=["_id", "status", "result", "grade", "submitted_on"],
                sort=[('submitted_on', pymongo.DESCENDING)]))

            # Before db v9, the default submission for evaluation was the best
            idx_best = -1
            for idx, val in enumerate(tasks):
                if idx_best == -1 or (val["status"] == "done" and tasks[idx_best]["grade"] < val["grade"]):
                    idx_best = idx

            # If best submission found, update and otherwise set to None
            if idx_best != -1:
                database.user_tasks.update_one({"username": username, "courseid": courseid, "taskid": taskid}, {"$set": {"submissionid": tasks[idx_best]["_id"]}})
            else:
                database.user_tasks.update_one({"username": username, "courseid": courseid, "taskid": taskid},
                                               {"$set": {"submissionid": None}})

        db_version = 9

    # Consistency bug : submissions must have a user task associated
    if db_version < 10:
        logger.info("Updating database to db_version 10")
        triplets = list(database.submissions.aggregate([{"$unwind": "$username"}, {"$group": {"_id": {"username": "$username", "taskid": "$taskid", "courseid": "$courseid"}}}]))
        for triplet in triplets:
            data = triplet['_id']
            user_task = database.user_tasks.find_one(data)
            if not user_task:
                submissions = list(database.submissions.find(data))
                data['tried'] = 0
                data['succeeded'] = False
                data['grade'] = -1
                data['submissionid'] = None
                for submission in submissions:
                    data['tried'] += 1
                    if "result" in submission and submission["result"] == "success":
                        data['succeeded'] = True
                    if "grade" in submission and data['grade'] < submission['grade']:
                        data['grade'] = submission['grade']
                        data['submissionid'] = submission['_id']

                database.user_tasks.insert(data)

        db_version = 10

    # Fix consistency bug in v9 and v10 : crashed submissions could also be set submission
    if db_version < 11:
        logger.info("Updating database to db_version 11")
        user_tasks = list(database.user_tasks.find())

        for user_task in user_tasks:
            username = user_task['username']
            taskid = user_task['taskid']
            courseid = user_task['courseid']

            if user_task["submissionid"] is None:
                tasks = list(database.submissions.find(
                    {"username": username, "courseid": courseid, "taskid": taskid},
                    projection=["_id", "status", "result", "grade", "submitted_on"],
                    sort=[('submitted_on', pymongo.DESCENDING)]))
                if len(tasks) > 0:
                    # No set submission and len(submissions) > 0 should not happen
                    # As update 9 fixed this for successful submissions, all these have crashed, set the first one
                    database.user_tasks.update_one({"username": username, "courseid": courseid, "taskid": taskid},
                                                   {"$set": {"submissionid": tasks[0]["_id"]}})

        db_version = 11

    database.db_version.update({}, {"$set": {"db_version": db_version}}, upsert=True)

Example 42

Project: galah
Source File: consumer.py
View license
def _run():
    logger = logging.getLogger("galah.sheep.%s" % threading.currentThread().name)
    logger.info("Consumer starting.")

    # Initialize the correct consumer suite.
    virtual_suite = get_virtual_suite(config["VIRTUAL_SUITE"])
    consumer = virtual_suite.Consumer(logger)

    # Set up the socket to send/receive messages to/from the shepherd
    shepherd = universal.context.socket(zmq.DEALER)
    shepherd.linger = 0
    shepherd.connect(config["shepherd/SHEEP_SOCKET"])

    # Loop until the program is shutting down
    while not universal.exiting:
        # Prepare a VM and make sure we're completely prepared to handle a test
        # request before asking the shepherd for one.
        logger.info("Waiting for virtual machine to become available...")
        machine_id = consumer.prepare_machine()

        def bleet():
            shepherd.send_json(FlockMessage("bleet", "").to_dict())

            # Figure out when we should send the next bleet
            return (
                datetime.datetime.now() + config["shepherd/BLEET_TIMEOUT"] / 2
            )

        # Send the intitial bleet so that the shepherd knows we're available
        logger.info("Ready for test request. Sending initial bleet.")
        next_bleet_time = bleet()

        # Set to True whenever the shepherd bloots. Set to False everytime we
        # bleet. If this variable is still False by the time it's time to bleet
        # again we know we've lost the shepherd.
        shepherd_blooted = False

        # Process traffic from shepherd.
        while True:
            try:
                message = exithelpers.recv_json(
                    shepherd,
                    timeout = max(
                        1, # 1 millisecond (0 would imply infinite timeout)
                        (next_bleet_time - datetime.datetime.now()).seconds
                            * 1000
                    )
                )

                message = FlockMessage(message["type"], message["body"])
            except exithelpers.Timeout:
                if not shepherd_blooted:
                    raise universal.ShepherdLost()

                logger.debug("Sending bleet.")
                next_bleet_time = bleet()
                shepherd_blooted = False

                continue

            if message.type == "bloot":
                logger.debug("Got bloot.")
                shepherd_blooted = True

            elif message.type == "identify":
                logger.info(
                    "Received request to identify. Sending environment."
                )

                # identify is a valid response to a bleet.
                shepherd_blooted = True

                identification = FlockMessage(
                    type = "environment",
                    body = universal.environment
                )

                shepherd.send_json(identification.to_dict())

            elif message.type == "request":
                # Received test request from the shepherd
                logger.info("Test request received, running tests.")
                logger.debug("Test request: %s", str(message))
                result = consumer.run_test(machine_id, message.body)

                # Check to see if the test harness crashed/somehow testing was
                # unable to be done.
                if result is None:
                    result = {
                        "failed": True
                    }

                # Add in the submission id to the result that we send back
                result["id"] = str(message.body["submission"]["id"])

                logger.info("Testing completed, sending results to shepherd.")
                logger.debug("Raw test results: %s", str(result))
                shepherd.send_json(FlockMessage("result", result).to_dict())

                # Wait for the shepherd to acknowledge the result. Ignore any
                # messages that we get from the shepherd besides an acknowledge.
                deadline = \
                    datetime.datetime.now() + datetime.timedelta(seconds = 30)
                while True:
                    try:
                        confirmation = exithelpers.recv_json(
                            shepherd,
                            timeout = max(
                                1, # 1 millisecond (0 would imply infinite timeout)
                                (deadline - datetime.datetime.now()).seconds
                                    * 1000
                            )
                        )

                        confirmation = FlockMessage(
                            confirmation["type"], confirmation["body"]
                        )
                    except exithelpers.Timeout:
                        raise universal.ShepherdLost(result = result)

                    logger.debug("Received message: %s", str(confirmation))

                    if confirmation.type == "bloot" and \
                            confirmation.body == result["id"]:
                        shepherd_blooted = True
                        break

                break

Example 43

Project: termite-data-server
Source File: ldap_auth.py
View license
def ldap_auth(server='ldap', port=None,
              base_dn='ou=users,dc=domain,dc=com',
              mode='uid', secure=False, cert_path=None, cert_file=None,
              bind_dn=None, bind_pw=None, filterstr='objectClass=*',
              username_attrib='uid',
              custom_scope='subtree',
              allowed_groups=None,
              manage_user=False,
              user_firstname_attrib='cn:1',
              user_lastname_attrib='cn:2',
              user_mail_attrib='mail',
              manage_groups=False,
              db=None,
              group_dn=None,
              group_name_attrib='cn',
              group_member_attrib='memberUid',
              group_filterstr='objectClass=*',
              logging_level='error'):

    """
    to use ldap login with MS Active Directory:

        from gluon.contrib.login_methods.ldap_auth import ldap_auth
        auth.settings.login_methods.append(ldap_auth(
            mode='ad', server='my.domain.controller',
            base_dn='ou=Users,dc=domain,dc=com'))

    to use ldap login with Notes Domino:

        auth.settings.login_methods.append(ldap_auth(
            mode='domino',server='my.domino.server'))

    to use ldap login with OpenLDAP:

        auth.settings.login_methods.append(ldap_auth(
            server='my.ldap.server', base_dn='ou=Users,dc=domain,dc=com'))

    to use ldap login with OpenLDAP and subtree search and (optionally)
    multiple DNs:

        auth.settings.login_methods.append(ldap_auth(
            mode='uid_r', server='my.ldap.server',
            base_dn=['ou=Users,dc=domain,dc=com','ou=Staff,dc=domain,dc=com']))

    or (if using CN):

        auth.settings.login_methods.append(ldap_auth(
            mode='cn', server='my.ldap.server',
            base_dn='ou=Users,dc=domain,dc=com'))

    or you can full customize the search for user:

        auth.settings.login_methods.append(ldap_auth(
            mode='custom', server='my.ldap.server',
            base_dn='ou=Users,dc=domain,dc=com',
            username_attrib='uid',
            custom_scope='subtree'))

    the custom_scope can be: base, onelevel, subtree.

    If using secure ldaps:// pass secure=True and cert_path="..."
    If ldap is using GnuTLS then you need cert_file="..." instead cert_path
    because cert_path isn't implemented in GnuTLS :(

    If you need to bind to the directory with an admin account in order to
    search it then specify bind_dn & bind_pw to use for this.
    - currently only implemented for Active Directory

    If you need to restrict the set of allowed users (e.g. to members of a
    department) then specify an rfc4515 search filter string.
    - currently only implemented for mode in ['ad', 'company', 'uid_r']

    You can manage user attributes first name, last name, email from ldap:
        auth.settings.login_methods.append(ldap_auth(...as usual...,
            manage_user=True,
            user_firstname_attrib='cn:1',
            user_lastname_attrib='cn:2',
            user_mail_attrib='mail'
           ))

    Where:
    manage_user - let web2py handle user data from ldap
    user_firstname_attrib - the attribute containing the user's first name
                            optionally you can specify parts.
                            Example: cn: "John Smith" - 'cn:1'='John'
    user_lastname_attrib - the attribute containing the user's last name
                            optionally you can specify parts.
                            Example: cn: "John Smith" - 'cn:2'='Smith'
    user_mail_attrib - the attribute containing the user's email address


    If you need group control from ldap to web2py app's database feel free
    to set:

        auth.settings.login_methods.append(ldap_auth(...as usual...,
            manage_groups=True,
            db=db,
            group_dn='ou=Groups,dc=domain,dc=com',
            group_name_attrib='cn',
            group_member_attrib='memberUid',
            group_filterstr='objectClass=*'
           ))

        Where:
        manage_group - let web2py handle the groups from ldap
        db - is the database object (need to have auth_user, auth_group,
            auth_membership)
        group_dn - the ldap branch of the groups
        group_name_attrib - the attribute where the group name is stored
        group_member_attrib - the attribute containing the group members name
        group_filterstr - as the filterstr but for group select

    You can restrict login access to specific groups if you specify:

        auth.settings.login_methods.append(ldap_auth(...as usual...,
            allowed_groups=[...],
            group_dn='ou=Groups,dc=domain,dc=com',
            group_name_attrib='cn',
            group_member_attrib='memberUid',#use 'member' for Active Directory
            group_filterstr='objectClass=*'
           ))

        Where:
        allowed_groups - a list with allowed ldap group names
        group_dn - the ldap branch of the groups
        group_name_attrib - the attribute where the group name is stored
        group_member_attrib - the attribute containing the group members name
        group_filterstr - as the filterstr but for group select

    If using Active Directory you must specify bind_dn and bind_pw for
    allowed_groups unless anonymous bind works.

    You can set the logging level with the "logging_level" parameter, default
    is "error" and can be set to error, warning, info, debug.
    """
    logger = logging.getLogger('web2py.auth.ldap_auth')
    if logging_level == 'error':
        logger.setLevel(logging.ERROR)
    elif logging_level == 'warning':
        logger.setLevel(logging.WARNING)
    elif logging_level == 'info':
        logger.setLevel(logging.INFO)
    elif logging_level == 'debug':
        logger.setLevel(logging.DEBUG)

    def ldap_auth_aux(username,
                      password,
                      ldap_server=server,
                      ldap_port=port,
                      ldap_basedn=base_dn,
                      ldap_mode=mode,
                      ldap_binddn=bind_dn,
                      ldap_bindpw=bind_pw,
                      secure=secure,
                      cert_path=cert_path,
                      cert_file=cert_file,
                      filterstr=filterstr,
                      username_attrib=username_attrib,
                      custom_scope=custom_scope,
                      manage_user=manage_user,
                      user_firstname_attrib=user_firstname_attrib,
                      user_lastname_attrib=user_lastname_attrib,
                      user_mail_attrib=user_mail_attrib,
                      manage_groups=manage_groups,
                      allowed_groups=allowed_groups,
                      db=db):
        if password == '':  # http://tools.ietf.org/html/rfc4513#section-5.1.2
            logger.warning('blank password not allowed')
            return False
        logger.debug('mode: [%s] manage_user: [%s] custom_scope: [%s]'
                     ' manage_groups: [%s]' % (str(mode), str(manage_user),
                     str(custom_scope), str(manage_groups)))
        if manage_user:
            if user_firstname_attrib.count(':') > 0:
                (user_firstname_attrib,
                 user_firstname_part) = user_firstname_attrib.split(':', 1)
                user_firstname_part = (int(user_firstname_part) - 1)
            else:
                user_firstname_part = None
            if user_lastname_attrib.count(':') > 0:
                (user_lastname_attrib,
                 user_lastname_part) = user_lastname_attrib.split(':', 1)
                user_lastname_part = (int(user_lastname_part) - 1)
            else:
                user_lastname_part = None
            user_firstname_attrib = ldap.filter.escape_filter_chars(
                user_firstname_attrib)
            user_lastname_attrib = ldap.filter.escape_filter_chars(
                user_lastname_attrib)
            user_mail_attrib = ldap.filter.escape_filter_chars(
                user_mail_attrib)
        try:
            if allowed_groups:
                if not is_user_in_allowed_groups(username, password):
                    return False
            con = init_ldap()
            if ldap_mode == 'ad':
                # Microsoft Active Directory
                if '@' not in username:
                    domain = []
                    for x in ldap_basedn.split(','):
                        if "DC=" in x.upper():
                            domain.append(x.split('=')[-1])
                    username = "%[email protected]%s" % (username, '.'.join(domain))
                username_bare = username.split("@")[0]
                con.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
                # In cases where ForestDnsZones and DomainDnsZones are found,
                # result will look like the following:
                # ['ldap://ForestDnsZones.domain.com/DC=ForestDnsZones,
                #    DC=domain,DC=com']
                if ldap_binddn:
                    # need to search directory with an admin account 1st
                    con.simple_bind_s(ldap_binddn, ldap_bindpw)
                else:
                    # credentials should be in the form of [email protected]
                    con.simple_bind_s(username, password)
                # this will throw an index error if the account is not found
                # in the ldap_basedn
                requested_attrs = ['sAMAccountName']
                if manage_user:
                    requested_attrs.extend([user_firstname_attrib,
                                           user_lastname_attrib,
                                           user_mail_attrib])
                result = con.search_ext_s(
                    ldap_basedn, ldap.SCOPE_SUBTREE,
                    "(&(sAMAccountName=%s)(%s))" % (
                                ldap.filter.escape_filter_chars(username_bare),
                                filterstr),
                    requested_attrs)[0][1]
                if not isinstance(result, dict):
                    # result should be a dict in the form
                    # {'sAMAccountName': [username_bare]}
                    logger.warning('User [%s] not found!' % username)
                    return False
                if ldap_binddn:
                    # We know the user exists & is in the correct OU
                    # so now we just check the password
                    con.simple_bind_s(username, password)
                username = username_bare

            if ldap_mode == 'domino':
                # Notes Domino
                if "@" in username:
                    username = username.split("@")[0]
                con.simple_bind_s(username, password)
                if manage_user:
                    # TODO: sorry I have no clue how to query attrs in domino
                    result = {user_firstname_attrib: username,
                              user_lastname_attrib: None,
                              user_mail_attrib: None}

            if ldap_mode == 'cn':
                # OpenLDAP (CN)
                if ldap_binddn and ldap_bindpw:
                    con.simple_bind_s(ldap_binddn, ldap_bindpw)
                dn = "cn=" + username + "," + ldap_basedn
                con.simple_bind_s(dn, password)
                if manage_user:
                    result = con.search_s(dn, ldap.SCOPE_BASE,
                                          "(objectClass=*)",
                                          [user_firstname_attrib,
                                          user_lastname_attrib,
                                          user_mail_attrib])[0][1]

            if ldap_mode == 'uid':
                # OpenLDAP (UID)
                if ldap_binddn and ldap_bindpw:
                    con.simple_bind_s(ldap_binddn, ldap_bindpw)
                    dn = "uid=" + username + "," + ldap_basedn
                    dn = con.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, "(uid=%s)"%username, [''])[0][0]
                else:
                    dn = "uid=" + username + "," + ldap_basedn
                con.simple_bind_s(dn, password)
                if manage_user:
                    result = con.search_s(dn, ldap.SCOPE_BASE,
                                          "(objectClass=*)",
                                          [user_firstname_attrib,
                                          user_lastname_attrib,
                                          user_mail_attrib])[0][1]

            if ldap_mode == 'company':
                # no DNs or password needed to search directory
                dn = ""
                pw = ""
                # bind anonymously
                con.simple_bind_s(dn, pw)
                # search by e-mail address
                filter = '(&(mail=%s)(%s))' % (
                                ldap.filter.escape_filter_chars(username),
                                filterstr)
                # find the uid
                attrs = ['uid']
                if manage_user:
                    attrs.extend([user_firstname_attrib,
                                  user_lastname_attrib,
                                  user_mail_attrib])
                # perform the actual search
                company_search_result = con.search_s(ldap_basedn,
                                                     ldap.SCOPE_SUBTREE,
                                                     filter, attrs)
                dn = company_search_result[0][0]
                result = company_search_result[0][1]
                # perform the real authentication test
                con.simple_bind_s(dn, password)

            if ldap_mode == 'uid_r':
                # OpenLDAP (UID) with subtree search and multiple DNs
                if isinstance(ldap_basedn, list):
                    basedns = ldap_basedn
                else:
                    basedns = [ldap_basedn]
                filter = '(&(uid=%s)(%s))' % (
                    ldap.filter.escape_filter_chars(username), filterstr)
                found = False
                for basedn in basedns:
                    try:
                        result = con.search_s(basedn, ldap.SCOPE_SUBTREE,
                                              filter)
                        if result:
                            user_dn = result[0][0]
                            # Check the password
                            con.simple_bind_s(user_dn, password)
                            found = True
                            break
                    except ldap.LDAPError, detail:
                        (exc_type, exc_value) = sys.exc_info()[:2]
                        logger.warning(
                        "ldap_auth: searching %s for %s resulted in %s: %s\n" %
                                       (basedn, filter, exc_type, exc_value)
                                       )
                if not found:
                    logger.warning('User [%s] not found!' % username)
                    return False
                result = result[0][1]
            if ldap_mode == 'custom':
                # OpenLDAP (username_attrs) with subtree search and
                # multiple DNs
                if isinstance(ldap_basedn, list):
                    basedns = ldap_basedn
                else:
                    basedns = [ldap_basedn]
                filter = '(&(%s=%s)(%s))' % (username_attrib,
                                             ldap.filter.escape_filter_chars(
                                                 username),
                                             filterstr)
                if custom_scope == 'subtree':
                    ldap_scope = ldap.SCOPE_SUBTREE
                elif custom_scope == 'base':
                    ldap_scope = ldap.SCOPE_BASE
                elif custom_scope == 'onelevel':
                    ldap_scope = ldap.SCOPE_ONELEVEL
                found = False
                for basedn in basedns:
                    try:
                        result = con.search_s(basedn, ldap_scope, filter)
                        if result:
                            user_dn = result[0][0]
                            # Check the password
                            con.simple_bind_s(user_dn, password)
                            found = True
                            break
                    except ldap.LDAPError, detail:
                        (exc_type, exc_value) = sys.exc_info()[:2]
                        logger.warning(
                        "ldap_auth: searching %s for %s resulted in %s: %s\n" %
                                       (basedn, filter, exc_type, exc_value)
                                       )
                if not found:
                    logger.warning('User [%s] not found!' % username)
                    return False
                result = result[0][1]
            if manage_user:
                logger.info('[%s] Manage user data' % str(username))
                try:
                    if user_firstname_part is not None:
                        store_user_firstname = result[user_firstname_attrib][
                            0].split(' ', 1)[user_firstname_part]
                    else:
                        store_user_firstname = result[user_firstname_attrib][0]
                except KeyError, e:
                    store_user_firstname = None
                try:
                    if user_lastname_part is not None:
                        store_user_lastname = result[user_lastname_attrib][
                            0].split(' ', 1)[user_lastname_part]
                    else:
                        store_user_lastname = result[user_lastname_attrib][0]
                except KeyError, e:
                    store_user_lastname = None
                try:
                    store_user_mail = result[user_mail_attrib][0]
                except KeyError, e:
                    store_user_mail = None
                try:
                    #
                    # user as username
                    # #################
                    user_in_db = db(db.auth_user.username == username)
                    if user_in_db.count() > 0:
                        user_in_db.update(first_name=store_user_firstname,
                                          last_name=store_user_lastname,
                                          email=store_user_mail)
                    else:
                        db.auth_user.insert(first_name=store_user_firstname,
                                            last_name=store_user_lastname,
                                            email=store_user_mail,
                                            username=username)
                except:
                    #
                    # user as email
                    # ##############
                    user_in_db = db(db.auth_user.email == username)
                    if user_in_db.count() > 0:
                        user_in_db.update(first_name=store_user_firstname,
                                          last_name=store_user_lastname)
                    else:
                        db.auth_user.insert(first_name=store_user_firstname,
                                            last_name=store_user_lastname,
                                            email=username)
            con.unbind()

            if manage_groups:
                if not do_manage_groups(username, password):
                    return False
            return True
        except ldap.INVALID_CREDENTIALS, e:
            return False
        except ldap.LDAPError, e:
            import traceback
            logger.warning('[%s] Error in ldap processing' % str(username))
            logger.debug(traceback.format_exc())
            return False
        except IndexError, ex:  # for AD membership test
            import traceback
            logger.warning('[%s] Ldap result indexing error' % str(username))
            logger.debug(traceback.format_exc())
            return False

    def is_user_in_allowed_groups(username,
                                  password=None,
                                  allowed_groups=allowed_groups):
        """
        Figure out if the username is a member of an allowed group
        in ldap or not
        """
        #
        # Get all group name where the user is in actually in ldap
        # #########################################################
        ldap_groups_of_the_user = get_user_groups_from_ldap(username, password)

        # search for allowed group names
        if type(allowed_groups) != type(list()):
            allowed_groups = [allowed_groups]
        for group in allowed_groups:
            if ldap_groups_of_the_user.count(group) > 0:
                # Match
                return True
        # No match
        return False

    def do_manage_groups(username,
                         password=None,
                         db=db):
        """
        Manage user groups

        Get all user's group from ldap and refresh the already stored
        ones in web2py's application database or create new groups
        according to ldap.
        """
        logger.info('[%s] Manage user groups' % str(username))
        try:
            #
            # Get all group name where the user is in actually in ldap
            # #########################################################
            ldap_groups_of_the_user = get_user_groups_from_ldap(
                username, password)

            #
            # Get all group name where the user is in actually in local db
            # #############################################################
            try:
                db_user_id = db(db.auth_user.username == username).select(
                    db.auth_user.id).first().id
            except:
                try:
                    db_user_id = db(db.auth_user.email == username).select(
                        db.auth_user.id).first().id
                except AttributeError, e:
                    #
                    # There is no user in local db
                    # We create one
                    # ##############################
                    try:
                        db_user_id = db.auth_user.insert(username=username,
                                                         first_name=username)
                    except AttributeError, e:
                        db_user_id = db.auth_user.insert(email=username,
                                                         first_name=username)
            if not db_user_id:
                logging.error(
                    'There is no username or email for %s!' % username)
                raise
            db_group_search = db((db.auth_membership.user_id == db_user_id) &
                            (db.auth_user.id == db.auth_membership.user_id) &
                            (db.auth_group.id == db.auth_membership.group_id))
            db_groups_of_the_user = list()
            db_group_id = dict()

            if db_group_search.count() > 0:
                for group in db_group_search.select(db.auth_group.id,
                                                    db.auth_group.role,
                                                    distinct=True):
                    db_group_id[group.role] = group.id
                    db_groups_of_the_user.append(group.role)
            logging.debug('db groups of user %s: %s' %
                          (username, str(db_groups_of_the_user)))

            #
            # Delete user membership from groups where user is not anymore
            # #############################################################
            for group_to_del in db_groups_of_the_user:
                if ldap_groups_of_the_user.count(group_to_del) == 0:
                    db((db.auth_membership.user_id == db_user_id) &
                       (db.auth_membership.group_id == \
                         db_group_id[group_to_del])).delete()

            #
            # Create user membership in groups where user is not in already
            # ##############################################################
            for group_to_add in ldap_groups_of_the_user:
                if db_groups_of_the_user.count(group_to_add) == 0:
                    if db(db.auth_group.role == group_to_add).count() == 0:
                        gid = db.auth_group.insert(role=group_to_add,
                                            description='Generated from LDAP')
                    else:
                        gid = db(db.auth_group.role == group_to_add).select(
                            db.auth_group.id).first().id
                    db.auth_membership.insert(user_id=db_user_id,
                                              group_id=gid)
        except:
            logger.warning("[%s] Groups are not managed successfully!" %
                           str(username))
            import traceback
            logger.debug(traceback.format_exc())
            return False
        return True

    def init_ldap(ldap_server=server,
                  ldap_port=port,
                  ldap_basedn=base_dn,
                  ldap_mode=mode,
                  secure=secure,
                  cert_path=cert_path,
                  cert_file=cert_file):
        """
        Inicialize ldap connection
        """
        logger.info('[%s] Initialize ldap connection' % str(ldap_server))
        if secure:
            if not ldap_port:
                ldap_port = 636
            con = ldap.initialize(
                "ldaps://" + ldap_server + ":" + str(ldap_port))
            if cert_path:
                con.set_option(ldap.OPT_X_TLS_CACERTDIR, cert_path)
            if cert_file:
                con.set_option(ldap.OPT_X_TLS_CACERTFILE, cert_file)
        else:
            if not ldap_port:
                ldap_port = 389
            con = ldap.initialize(
                "ldap://" + ldap_server + ":" + str(ldap_port))
        return con

    def get_user_groups_from_ldap(username,
                                  password=None,
                                  base_dn=base_dn,
                                  ldap_binddn=bind_dn,
                                  ldap_bindpw=bind_pw,
                                  group_dn=group_dn,
                                  group_name_attrib=group_name_attrib,
                                  group_member_attrib=group_member_attrib,
                                  group_filterstr=group_filterstr,
                                  ldap_mode=mode):
        """
        Get all group names from ldap where the user is in
        """
        logger.info('[%s] Get user groups from ldap' % str(username))
        #
        # Get all group name where the user is in actually in ldap
        # #########################################################
        # Initialize ldap
        if not group_dn:
            group_dn = base_dn
        con = init_ldap()
        logger.debug('Username init: [%s]' % username)
        if ldap_mode == 'ad':
            #
            # Get the AD username
            # ####################
            if '@' not in username:
                domain = []
                for x in base_dn.split(','):
                    if "DC=" in x.upper():
                        domain.append(x.split('=')[-1])
                username = "%[email protected]%s" % (username, '.'.join(domain))
            username_bare = username.split("@")[0]
            con.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
            # In cases where ForestDnsZones and DomainDnsZones are found,
            # result will look like the following:
            # ['ldap://ForestDnsZones.domain.com/DC=ForestDnsZones,
            #     DC=domain,DC=com']
            if ldap_binddn:
                # need to search directory with an admin account 1st
                con.simple_bind_s(ldap_binddn, ldap_bindpw)
                logger.debug('Ldap bind connect...')
            else:
                # credentials should be in the form of [email protected]
                con.simple_bind_s(username, password)
                logger.debug('Ldap username connect...')
            # We have to use the full string
            username = con.search_ext_s(base_dn, ldap.SCOPE_SUBTREE,
                                        "(&(sAMAccountName=%s)(%s))" %
                            (ldap.filter.escape_filter_chars(username_bare),
                            filterstr), ["cn"])[0][0]
        else:
            if ldap_binddn:
                # need to search directory with an bind_dn account 1st
                con.simple_bind_s(ldap_binddn, ldap_bindpw)
            else:
                # bind as anonymous
                con.simple_bind_s('', '')
                
        # if username is None, return empty list
        if username is None:
            return list()
        # search for groups where user is in
        filter = '(&(%s=%s)(%s))' % (ldap.filter.escape_filter_chars(
                                                            group_member_attrib
                                                            ),
                                     ldap.filter.escape_filter_chars(username),
                                     group_filterstr)
        group_search_result = con.search_s(group_dn,
                                           ldap.SCOPE_SUBTREE,
                                           filter, [group_name_attrib])
        ldap_groups_of_the_user = list()
        for group_row in group_search_result:
            group = group_row[1]
            if type(group) == dict and group.has_key(group_name_attrib):
                ldap_groups_of_the_user.extend(group[group_name_attrib])

        con.unbind()
        logger.debug('User groups: %s' % ldap_groups_of_the_user)
        return list(ldap_groups_of_the_user)

    if filterstr[0] == '(' and filterstr[-1] == ')':  # rfc4515 syntax
        filterstr = filterstr[1:-1]  # parens added again where used
    return ldap_auth_aux

Example 44

Project: termite-visualizations
Source File: ldap_auth.py
View license
def ldap_auth(server='ldap', port=None,
              base_dn='ou=users,dc=domain,dc=com',
              mode='uid', secure=False, cert_path=None, cert_file=None,
              bind_dn=None, bind_pw=None, filterstr='objectClass=*',
              username_attrib='uid',
              custom_scope='subtree',
              allowed_groups=None,
              manage_user=False,
              user_firstname_attrib='cn:1',
              user_lastname_attrib='cn:2',
              user_mail_attrib='mail',
              manage_groups=False,
              db=None,
              group_dn=None,
              group_name_attrib='cn',
              group_member_attrib='memberUid',
              group_filterstr='objectClass=*',
              logging_level='error'):

    """
    to use ldap login with MS Active Directory:

        from gluon.contrib.login_methods.ldap_auth import ldap_auth
        auth.settings.login_methods.append(ldap_auth(
            mode='ad', server='my.domain.controller',
            base_dn='ou=Users,dc=domain,dc=com'))

    to use ldap login with Notes Domino:

        auth.settings.login_methods.append(ldap_auth(
            mode='domino',server='my.domino.server'))

    to use ldap login with OpenLDAP:

        auth.settings.login_methods.append(ldap_auth(
            server='my.ldap.server', base_dn='ou=Users,dc=domain,dc=com'))

    to use ldap login with OpenLDAP and subtree search and (optionally)
    multiple DNs:

        auth.settings.login_methods.append(ldap_auth(
            mode='uid_r', server='my.ldap.server',
            base_dn=['ou=Users,dc=domain,dc=com','ou=Staff,dc=domain,dc=com']))

    or (if using CN):

        auth.settings.login_methods.append(ldap_auth(
            mode='cn', server='my.ldap.server',
            base_dn='ou=Users,dc=domain,dc=com'))

    or you can full customize the search for user:

        auth.settings.login_methods.append(ldap_auth(
            mode='custom', server='my.ldap.server',
            base_dn='ou=Users,dc=domain,dc=com',
            username_attrib='uid',
            custom_scope='subtree'))

    the custom_scope can be: base, onelevel, subtree.

    If using secure ldaps:// pass secure=True and cert_path="..."
    If ldap is using GnuTLS then you need cert_file="..." instead cert_path
    because cert_path isn't implemented in GnuTLS :(

    If you need to bind to the directory with an admin account in order to
    search it then specify bind_dn & bind_pw to use for this.
    - currently only implemented for Active Directory

    If you need to restrict the set of allowed users (e.g. to members of a
    department) then specify an rfc4515 search filter string.
    - currently only implemented for mode in ['ad', 'company', 'uid_r']

    You can manage user attributes first name, last name, email from ldap:
        auth.settings.login_methods.append(ldap_auth(...as usual...,
            manage_user=True,
            user_firstname_attrib='cn:1',
            user_lastname_attrib='cn:2',
            user_mail_attrib='mail'
           ))

    Where:
    manage_user - let web2py handle user data from ldap
    user_firstname_attrib - the attribute containing the user's first name
                            optionally you can specify parts.
                            Example: cn: "John Smith" - 'cn:1'='John'
    user_lastname_attrib - the attribute containing the user's last name
                            optionally you can specify parts.
                            Example: cn: "John Smith" - 'cn:2'='Smith'
    user_mail_attrib - the attribute containing the user's email address


    If you need group control from ldap to web2py app's database feel free
    to set:

        auth.settings.login_methods.append(ldap_auth(...as usual...,
            manage_groups=True,
            db=db,
            group_dn='ou=Groups,dc=domain,dc=com',
            group_name_attrib='cn',
            group_member_attrib='memberUid',
            group_filterstr='objectClass=*'
           ))

        Where:
        manage_group - let web2py handle the groups from ldap
        db - is the database object (need to have auth_user, auth_group,
            auth_membership)
        group_dn - the ldap branch of the groups
        group_name_attrib - the attribute where the group name is stored
        group_member_attrib - the attribute containing the group members name
        group_filterstr - as the filterstr but for group select

    You can restrict login access to specific groups if you specify:

        auth.settings.login_methods.append(ldap_auth(...as usual...,
            allowed_groups=[...],
            group_dn='ou=Groups,dc=domain,dc=com',
            group_name_attrib='cn',
            group_member_attrib='memberUid',#use 'member' for Active Directory
            group_filterstr='objectClass=*'
           ))

        Where:
        allowed_groups - a list with allowed ldap group names
        group_dn - the ldap branch of the groups
        group_name_attrib - the attribute where the group name is stored
        group_member_attrib - the attribute containing the group members name
        group_filterstr - as the filterstr but for group select

    If using Active Directory you must specify bind_dn and bind_pw for
    allowed_groups unless anonymous bind works.

    You can set the logging level with the "logging_level" parameter, default
    is "error" and can be set to error, warning, info, debug.
    """
    logger = logging.getLogger('web2py.auth.ldap_auth')
    if logging_level == 'error':
        logger.setLevel(logging.ERROR)
    elif logging_level == 'warning':
        logger.setLevel(logging.WARNING)
    elif logging_level == 'info':
        logger.setLevel(logging.INFO)
    elif logging_level == 'debug':
        logger.setLevel(logging.DEBUG)

    def ldap_auth_aux(username,
                      password,
                      ldap_server=server,
                      ldap_port=port,
                      ldap_basedn=base_dn,
                      ldap_mode=mode,
                      ldap_binddn=bind_dn,
                      ldap_bindpw=bind_pw,
                      secure=secure,
                      cert_path=cert_path,
                      cert_file=cert_file,
                      filterstr=filterstr,
                      username_attrib=username_attrib,
                      custom_scope=custom_scope,
                      manage_user=manage_user,
                      user_firstname_attrib=user_firstname_attrib,
                      user_lastname_attrib=user_lastname_attrib,
                      user_mail_attrib=user_mail_attrib,
                      manage_groups=manage_groups,
                      allowed_groups=allowed_groups,
                      db=db):
        if password == '':  # http://tools.ietf.org/html/rfc4513#section-5.1.2
            logger.warning('blank password not allowed')
            return False
        logger.debug('mode: [%s] manage_user: [%s] custom_scope: [%s]'
                     ' manage_groups: [%s]' % (str(mode), str(manage_user),
                     str(custom_scope), str(manage_groups)))
        if manage_user:
            if user_firstname_attrib.count(':') > 0:
                (user_firstname_attrib,
                 user_firstname_part) = user_firstname_attrib.split(':', 1)
                user_firstname_part = (int(user_firstname_part) - 1)
            else:
                user_firstname_part = None
            if user_lastname_attrib.count(':') > 0:
                (user_lastname_attrib,
                 user_lastname_part) = user_lastname_attrib.split(':', 1)
                user_lastname_part = (int(user_lastname_part) - 1)
            else:
                user_lastname_part = None
            user_firstname_attrib = ldap.filter.escape_filter_chars(
                user_firstname_attrib)
            user_lastname_attrib = ldap.filter.escape_filter_chars(
                user_lastname_attrib)
            user_mail_attrib = ldap.filter.escape_filter_chars(
                user_mail_attrib)
        try:
            if allowed_groups:
                if not is_user_in_allowed_groups(username, password):
                    return False
            con = init_ldap()
            if ldap_mode == 'ad':
                # Microsoft Active Directory
                if '@' not in username:
                    domain = []
                    for x in ldap_basedn.split(','):
                        if "DC=" in x.upper():
                            domain.append(x.split('=')[-1])
                    username = "%[email protected]%s" % (username, '.'.join(domain))
                username_bare = username.split("@")[0]
                con.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
                # In cases where ForestDnsZones and DomainDnsZones are found,
                # result will look like the following:
                # ['ldap://ForestDnsZones.domain.com/DC=ForestDnsZones,
                #    DC=domain,DC=com']
                if ldap_binddn:
                    # need to search directory with an admin account 1st
                    con.simple_bind_s(ldap_binddn, ldap_bindpw)
                else:
                    # credentials should be in the form of [email protected]
                    con.simple_bind_s(username, password)
                # this will throw an index error if the account is not found
                # in the ldap_basedn
                requested_attrs = ['sAMAccountName']
                if manage_user:
                    requested_attrs.extend([user_firstname_attrib,
                                           user_lastname_attrib,
                                           user_mail_attrib])
                result = con.search_ext_s(
                    ldap_basedn, ldap.SCOPE_SUBTREE,
                    "(&(sAMAccountName=%s)(%s))" % (
                                ldap.filter.escape_filter_chars(username_bare),
                                filterstr),
                    requested_attrs)[0][1]
                if not isinstance(result, dict):
                    # result should be a dict in the form
                    # {'sAMAccountName': [username_bare]}
                    logger.warning('User [%s] not found!' % username)
                    return False
                if ldap_binddn:
                    # We know the user exists & is in the correct OU
                    # so now we just check the password
                    con.simple_bind_s(username, password)
                username = username_bare

            if ldap_mode == 'domino':
                # Notes Domino
                if "@" in username:
                    username = username.split("@")[0]
                con.simple_bind_s(username, password)
                if manage_user:
                    # TODO: sorry I have no clue how to query attrs in domino
                    result = {user_firstname_attrib: username,
                              user_lastname_attrib: None,
                              user_mail_attrib: None}

            if ldap_mode == 'cn':
                # OpenLDAP (CN)
                if ldap_binddn and ldap_bindpw:
                    con.simple_bind_s(ldap_binddn, ldap_bindpw)
                dn = "cn=" + username + "," + ldap_basedn
                con.simple_bind_s(dn, password)
                if manage_user:
                    result = con.search_s(dn, ldap.SCOPE_BASE,
                                          "(objectClass=*)",
                                          [user_firstname_attrib,
                                          user_lastname_attrib,
                                          user_mail_attrib])[0][1]

            if ldap_mode == 'uid':
                # OpenLDAP (UID)
                if ldap_binddn and ldap_bindpw:
                    con.simple_bind_s(ldap_binddn, ldap_bindpw)
                    dn = "uid=" + username + "," + ldap_basedn
                    dn = con.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, "(uid=%s)"%username, [''])[0][0]
                else:
                    dn = "uid=" + username + "," + ldap_basedn
                con.simple_bind_s(dn, password)
                if manage_user:
                    result = con.search_s(dn, ldap.SCOPE_BASE,
                                          "(objectClass=*)",
                                          [user_firstname_attrib,
                                          user_lastname_attrib,
                                          user_mail_attrib])[0][1]

            if ldap_mode == 'company':
                # no DNs or password needed to search directory
                dn = ""
                pw = ""
                # bind anonymously
                con.simple_bind_s(dn, pw)
                # search by e-mail address
                filter = '(&(mail=%s)(%s))' % (
                                ldap.filter.escape_filter_chars(username),
                                filterstr)
                # find the uid
                attrs = ['uid']
                if manage_user:
                    attrs.extend([user_firstname_attrib,
                                  user_lastname_attrib,
                                  user_mail_attrib])
                # perform the actual search
                company_search_result = con.search_s(ldap_basedn,
                                                     ldap.SCOPE_SUBTREE,
                                                     filter, attrs)
                dn = company_search_result[0][0]
                result = company_search_result[0][1]
                # perform the real authentication test
                con.simple_bind_s(dn, password)

            if ldap_mode == 'uid_r':
                # OpenLDAP (UID) with subtree search and multiple DNs
                if isinstance(ldap_basedn, list):
                    basedns = ldap_basedn
                else:
                    basedns = [ldap_basedn]
                filter = '(&(uid=%s)(%s))' % (
                    ldap.filter.escape_filter_chars(username), filterstr)
                found = False
                for basedn in basedns:
                    try:
                        result = con.search_s(basedn, ldap.SCOPE_SUBTREE,
                                              filter)
                        if result:
                            user_dn = result[0][0]
                            # Check the password
                            con.simple_bind_s(user_dn, password)
                            found = True
                            break
                    except ldap.LDAPError, detail:
                        (exc_type, exc_value) = sys.exc_info()[:2]
                        logger.warning(
                        "ldap_auth: searching %s for %s resulted in %s: %s\n" %
                                       (basedn, filter, exc_type, exc_value)
                                       )
                if not found:
                    logger.warning('User [%s] not found!' % username)
                    return False
                result = result[0][1]
            if ldap_mode == 'custom':
                # OpenLDAP (username_attrs) with subtree search and
                # multiple DNs
                if isinstance(ldap_basedn, list):
                    basedns = ldap_basedn
                else:
                    basedns = [ldap_basedn]
                filter = '(&(%s=%s)(%s))' % (username_attrib,
                                             ldap.filter.escape_filter_chars(
                                                 username),
                                             filterstr)
                if custom_scope == 'subtree':
                    ldap_scope = ldap.SCOPE_SUBTREE
                elif custom_scope == 'base':
                    ldap_scope = ldap.SCOPE_BASE
                elif custom_scope == 'onelevel':
                    ldap_scope = ldap.SCOPE_ONELEVEL
                found = False
                for basedn in basedns:
                    try:
                        result = con.search_s(basedn, ldap_scope, filter)
                        if result:
                            user_dn = result[0][0]
                            # Check the password
                            con.simple_bind_s(user_dn, password)
                            found = True
                            break
                    except ldap.LDAPError, detail:
                        (exc_type, exc_value) = sys.exc_info()[:2]
                        logger.warning(
                        "ldap_auth: searching %s for %s resulted in %s: %s\n" %
                                       (basedn, filter, exc_type, exc_value)
                                       )
                if not found:
                    logger.warning('User [%s] not found!' % username)
                    return False
                result = result[0][1]
            if manage_user:
                logger.info('[%s] Manage user data' % str(username))
                try:
                    if user_firstname_part is not None:
                        store_user_firstname = result[user_firstname_attrib][
                            0].split(' ', 1)[user_firstname_part]
                    else:
                        store_user_firstname = result[user_firstname_attrib][0]
                except KeyError, e:
                    store_user_firstname = None
                try:
                    if user_lastname_part is not None:
                        store_user_lastname = result[user_lastname_attrib][
                            0].split(' ', 1)[user_lastname_part]
                    else:
                        store_user_lastname = result[user_lastname_attrib][0]
                except KeyError, e:
                    store_user_lastname = None
                try:
                    store_user_mail = result[user_mail_attrib][0]
                except KeyError, e:
                    store_user_mail = None
                try:
                    #
                    # user as username
                    # #################
                    user_in_db = db(db.auth_user.username == username)
                    if user_in_db.count() > 0:
                        user_in_db.update(first_name=store_user_firstname,
                                          last_name=store_user_lastname,
                                          email=store_user_mail)
                    else:
                        db.auth_user.insert(first_name=store_user_firstname,
                                            last_name=store_user_lastname,
                                            email=store_user_mail,
                                            username=username)
                except:
                    #
                    # user as email
                    # ##############
                    user_in_db = db(db.auth_user.email == username)
                    if user_in_db.count() > 0:
                        user_in_db.update(first_name=store_user_firstname,
                                          last_name=store_user_lastname)
                    else:
                        db.auth_user.insert(first_name=store_user_firstname,
                                            last_name=store_user_lastname,
                                            email=username)
            con.unbind()

            if manage_groups:
                if not do_manage_groups(username, password):
                    return False
            return True
        except ldap.INVALID_CREDENTIALS, e:
            return False
        except ldap.LDAPError, e:
            import traceback
            logger.warning('[%s] Error in ldap processing' % str(username))
            logger.debug(traceback.format_exc())
            return False
        except IndexError, ex:  # for AD membership test
            import traceback
            logger.warning('[%s] Ldap result indexing error' % str(username))
            logger.debug(traceback.format_exc())
            return False

    def is_user_in_allowed_groups(username,
                                  password=None,
                                  allowed_groups=allowed_groups):
        """
        Figure out if the username is a member of an allowed group
        in ldap or not
        """
        #
        # Get all group name where the user is in actually in ldap
        # #########################################################
        ldap_groups_of_the_user = get_user_groups_from_ldap(username, password)

        # search for allowed group names
        if type(allowed_groups) != type(list()):
            allowed_groups = [allowed_groups]
        for group in allowed_groups:
            if ldap_groups_of_the_user.count(group) > 0:
                # Match
                return True
        # No match
        return False

    def do_manage_groups(username,
                         password=None,
                         db=db):
        """
        Manage user groups

        Get all user's group from ldap and refresh the already stored
        ones in web2py's application database or create new groups
        according to ldap.
        """
        logger.info('[%s] Manage user groups' % str(username))
        try:
            #
            # Get all group name where the user is in actually in ldap
            # #########################################################
            ldap_groups_of_the_user = get_user_groups_from_ldap(
                username, password)

            #
            # Get all group name where the user is in actually in local db
            # #############################################################
            try:
                db_user_id = db(db.auth_user.username == username).select(
                    db.auth_user.id).first().id
            except:
                try:
                    db_user_id = db(db.auth_user.email == username).select(
                        db.auth_user.id).first().id
                except AttributeError, e:
                    #
                    # There is no user in local db
                    # We create one
                    # ##############################
                    try:
                        db_user_id = db.auth_user.insert(username=username,
                                                         first_name=username)
                    except AttributeError, e:
                        db_user_id = db.auth_user.insert(email=username,
                                                         first_name=username)
            if not db_user_id:
                logging.error(
                    'There is no username or email for %s!' % username)
                raise
            db_group_search = db((db.auth_membership.user_id == db_user_id) &
                            (db.auth_user.id == db.auth_membership.user_id) &
                            (db.auth_group.id == db.auth_membership.group_id))
            db_groups_of_the_user = list()
            db_group_id = dict()

            if db_group_search.count() > 0:
                for group in db_group_search.select(db.auth_group.id,
                                                    db.auth_group.role,
                                                    distinct=True):
                    db_group_id[group.role] = group.id
                    db_groups_of_the_user.append(group.role)
            logging.debug('db groups of user %s: %s' %
                          (username, str(db_groups_of_the_user)))

            #
            # Delete user membership from groups where user is not anymore
            # #############################################################
            for group_to_del in db_groups_of_the_user:
                if ldap_groups_of_the_user.count(group_to_del) == 0:
                    db((db.auth_membership.user_id == db_user_id) &
                       (db.auth_membership.group_id == \
                         db_group_id[group_to_del])).delete()

            #
            # Create user membership in groups where user is not in already
            # ##############################################################
            for group_to_add in ldap_groups_of_the_user:
                if db_groups_of_the_user.count(group_to_add) == 0:
                    if db(db.auth_group.role == group_to_add).count() == 0:
                        gid = db.auth_group.insert(role=group_to_add,
                                            description='Generated from LDAP')
                    else:
                        gid = db(db.auth_group.role == group_to_add).select(
                            db.auth_group.id).first().id
                    db.auth_membership.insert(user_id=db_user_id,
                                              group_id=gid)
        except:
            logger.warning("[%s] Groups are not managed successfully!" %
                           str(username))
            import traceback
            logger.debug(traceback.format_exc())
            return False
        return True

    def init_ldap(ldap_server=server,
                  ldap_port=port,
                  ldap_basedn=base_dn,
                  ldap_mode=mode,
                  secure=secure,
                  cert_path=cert_path,
                  cert_file=cert_file):
        """
        Inicialize ldap connection
        """
        logger.info('[%s] Initialize ldap connection' % str(ldap_server))
        if secure:
            if not ldap_port:
                ldap_port = 636
            con = ldap.initialize(
                "ldaps://" + ldap_server + ":" + str(ldap_port))
            if cert_path:
                con.set_option(ldap.OPT_X_TLS_CACERTDIR, cert_path)
            if cert_file:
                con.set_option(ldap.OPT_X_TLS_CACERTFILE, cert_file)
        else:
            if not ldap_port:
                ldap_port = 389
            con = ldap.initialize(
                "ldap://" + ldap_server + ":" + str(ldap_port))
        return con

    def get_user_groups_from_ldap(username,
                                  password=None,
                                  base_dn=base_dn,
                                  ldap_binddn=bind_dn,
                                  ldap_bindpw=bind_pw,
                                  group_dn=group_dn,
                                  group_name_attrib=group_name_attrib,
                                  group_member_attrib=group_member_attrib,
                                  group_filterstr=group_filterstr,
                                  ldap_mode=mode):
        """
        Get all group names from ldap where the user is in
        """
        logger.info('[%s] Get user groups from ldap' % str(username))
        #
        # Get all group name where the user is in actually in ldap
        # #########################################################
        # Initialize ldap
        if not group_dn:
            group_dn = base_dn
        con = init_ldap()
        logger.debug('Username init: [%s]' % username)
        if ldap_mode == 'ad':
            #
            # Get the AD username
            # ####################
            if '@' not in username:
                domain = []
                for x in base_dn.split(','):
                    if "DC=" in x.upper():
                        domain.append(x.split('=')[-1])
                username = "%[email protected]%s" % (username, '.'.join(domain))
            username_bare = username.split("@")[0]
            con.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
            # In cases where ForestDnsZones and DomainDnsZones are found,
            # result will look like the following:
            # ['ldap://ForestDnsZones.domain.com/DC=ForestDnsZones,
            #     DC=domain,DC=com']
            if ldap_binddn:
                # need to search directory with an admin account 1st
                con.simple_bind_s(ldap_binddn, ldap_bindpw)
                logger.debug('Ldap bind connect...')
            else:
                # credentials should be in the form of [email protected]
                con.simple_bind_s(username, password)
                logger.debug('Ldap username connect...')
            # We have to use the full string
            username = con.search_ext_s(base_dn, ldap.SCOPE_SUBTREE,
                                        "(&(sAMAccountName=%s)(%s))" %
                            (ldap.filter.escape_filter_chars(username_bare),
                            filterstr), ["cn"])[0][0]
        else:
            if ldap_binddn:
                # need to search directory with an bind_dn account 1st
                con.simple_bind_s(ldap_binddn, ldap_bindpw)
            else:
                # bind as anonymous
                con.simple_bind_s('', '')
                
        # if username is None, return empty list
        if username is None:
            return list()
        # search for groups where user is in
        filter = '(&(%s=%s)(%s))' % (ldap.filter.escape_filter_chars(
                                                            group_member_attrib
                                                            ),
                                     ldap.filter.escape_filter_chars(username),
                                     group_filterstr)
        group_search_result = con.search_s(group_dn,
                                           ldap.SCOPE_SUBTREE,
                                           filter, [group_name_attrib])
        ldap_groups_of_the_user = list()
        for group_row in group_search_result:
            group = group_row[1]
            if type(group) == dict and group.has_key(group_name_attrib):
                ldap_groups_of_the_user.extend(group[group_name_attrib])

        con.unbind()
        logger.debug('User groups: %s' % ldap_groups_of_the_user)
        return list(ldap_groups_of_the_user)

    if filterstr[0] == '(' and filterstr[-1] == ')':  # rfc4515 syntax
        filterstr = filterstr[1:-1]  # parens added again where used
    return ldap_auth_aux

Example 45

Project: fwlite
Source File: config.py
View license
    def __init__(self):
        self.logger = logging.getLogger('config')
        self.logger.setLevel(logging.INFO)
        hdr = logging.StreamHandler()
        formatter = logging.Formatter('%(asctime)s %(name)s:%(levelname)s %(message)s',
                                      datefmt='%H:%M:%S')
        hdr.setFormatter(formatter)
        self.logger.addHandler(hdr)

        self.version = SConfigParser()
        self.userconf = SConfigParser()
        self.reload()
        self.UPDATE_INTV = 6
        self.timeout = self.userconf.dgetint('fgfwproxy', 'timeout', 4)
        ParentProxy.DEFAULT_TIMEOUT = self.timeout
        self.parentlist = ParentProxyList()
        self.HOSTS = defaultdict(list)
        self.GUI = '-GUI' in sys.argv
        self.rproxy = self.userconf.dgetbool('fgfwproxy', 'rproxy', False)

        listen = self.userconf.dget('fgfwproxy', 'listen', '8118')
        if listen.isdigit():
            self.listen = ('127.0.0.1', int(listen))
        else:
            self.listen = (listen.rsplit(':', 1)[0], int(listen.rsplit(':', 1)[1]))

        try:
            self.local_ip = set(socket.gethostbyname_ex(socket.gethostname())[2])
        except Exception:
            try:
                csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
                csock.connect(('8.8.8.8', 53))
                (addr, port) = csock.getsockname()
                csock.close()
                self.local_ip = set([addr])
            except socket.error:
                self.local_ip = set(['127.0.0.1'])
        ip = self.local_ip.pop()
        self.local_ip.add(ip)
        self.PAC = '''\
function FindProxyForURL(url, host) {
if (isPlainHostName(host) ||
    host.indexOf('127.') == 0 ||
    host.indexOf('192.168.') == 0 ||
    host.indexOf('10.') == 0 ||
    shExpMatch(host, 'localhost.*'))
    {
        return 'DIRECT';
    }
return "PROXY %s:%s; DIRECT";}''' % (ip, self.listen[1])
        if self.userconf.dget('fgfwproxy', 'pac', ''):
            if os.path.isfile(self.userconf.dget('fgfwproxy', 'pac', '')):
                self.PAC = open(self.userconf.dget('fgfwproxy', 'pac', '')).read()
            else:
                self.PAC = '''\
function FindProxyForURL(url, host) {
if (isPlainHostName(host) ||
    host.indexOf('127.') == 0 ||
    host.indexOf('192.168.') == 0 ||
    host.indexOf('10.') == 0 ||
    shExpMatch(host, 'localhost.*'))
    {
        return 'DIRECT';
    }
return "PROXY %s; DIRECT";}''' % self.userconf.dget('fgfwproxy', 'pac', '')
        self.PAC = self.PAC.encode()

        if self.userconf.dget('FGFW_Lite', 'logfile', ''):
            path = self.userconf.dget('FGFW_Lite', 'logfile', '')
            dirname = os.path.dirname(path)
            if dirname and not os.path.exists(dirname):
                os.makedirs(dirname)
            formatter = logging.Formatter('FW-Lite %(asctime)s %(levelname)s %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
            hdlr = logging.handlers.RotatingFileHandler(path, maxBytes=1048576, backupCount=5)
            hdlr.setFormatter(formatter)
            self.logger.addHandler(hdlr)

        self.region = set(x.upper() for x in self.userconf.dget('fgfwproxy', 'region', '').split('|') if x.strip())
        self.profiles = len(self.userconf.dget('fgfwproxy', 'profile', '134'))
        self.xheaders = self.userconf.dgetbool('fgfwproxy', 'xheaders', False)

        if self.userconf.dget('fgfwproxy', 'parentproxy', ''):
            self.addparentproxy('direct', '%s 0' % self.userconf.dget('fgfwproxy', 'parentproxy', ''))
            self.addparentproxy('local', 'direct 100')
        else:
            self.addparentproxy('direct', 'direct 0')

        ParentProxy.set_via(self.parentlist.direct)

        for k, v in self.userconf.items('parents'):
            if '6Rc59g0jFlTppvel' in v:
                self.userconf.remove_option('parents', k)
                self.confsave()
                continue
            self.addparentproxy(k, v)

        if not self.rproxy and len([k for k in self.parentlist.httpsparents() if k.httpspriority < 100]) == 0:
            self.logger.warning('No parent proxy available!')

        self.maxretry = self.userconf.dgetint('fgfwproxy', 'maxretry', 4)

        def addhost(host, ip):
            if isinstance(ip, bytes):
                ip = unicode(ip)
            try:
                ipo = ip_address(ip)
                if isinstance(ipo, IPv4Address):
                    self.HOSTS[host].append((2, ip))
                else:
                    self.HOSTS[host].append((10, ip))
            except Exception:
                self.logger.warning('unsupported host: %s' % ip)
                sys.stderr.write(traceback.format_exc() + '\n')
                sys.stderr.flush()

        for host, ip in self.userconf.items('hosts'):
            addhost(host, ip)

        if os.path.isfile('./fgfw-lite/hosts'):
            for line in open('./fgfw-lite/hosts'):
                line = line.strip()
                if line and not line.startswith('#'):
                    try:
                        ip, host = line.split()
                        addhost(host, ip)
                    except Exception as e:
                        self.logger.warning('%s %s' % (e, line))
        self.localdns = [parse_hostport(dns, 53) for dns in self.userconf.dget('dns', 'localdns', '119.29.29.29').split('|')]
        self.remotedns = self.localdns if self.rproxy else [parse_hostport(dns, 53) for dns in self.userconf.dget('dns', 'remotedns', '8.8.8.8').split('|')]
        self.REDIRECTOR = redirector(self)
        self.PARENT_PROXY = get_proxy(self)
        bad_ip = set(self.userconf.dget('dns', 'bad_ip', '').split('|'))
        self.resolver = resolver.get_resolver(self.localdns, self.remotedns,
                                              proxy=ParentProxy('self', 'http://127.0.0.1:%d' % self.listen[1]),
                                              apfilter=[self.PARENT_PROXY.gfwlist, self.PARENT_PROXY.local],
                                              bad_ip=bad_ip)

Example 46

Project: aceproxy
Source File: aceclient.py
View license
    def _recvData(self):
        '''
        Data receiver method for greenlet
        '''
        logger = logging.getLogger('AceClient_recvdata')

        while True:
            gevent.sleep()
            try:
                self._recvbuffer = self._socket.read_until("\r\n")
                self._recvbuffer = self._recvbuffer.strip()
                #logger.debug('<<< ' + self._recvbuffer)
            except:
                # If something happened during read, abandon reader.
                if not self._shuttingDown.isSet():
                    logger.error("Exception at socket read")
                    self._shuttingDown.set()
                return

            if self._recvbuffer:
                # Parsing everything only if the string is not empty
                if self._recvbuffer.startswith(AceMessage.response.HELLO):
                    # Parse HELLO
                    if 'key=' in self._recvbuffer:
                        self._request_key_begin = self._recvbuffer.find('key=')
                        self._request_key = \
                            self._recvbuffer[self._request_key_begin+4:self._request_key_begin+14]
                        try:
                            self._write(AceMessage.request.READY_key(
                                self._request_key, self._product_key))
                        except urllib2.URLError as e:
                            logger.error("Can't connect to keygen server! " + \
                                repr(e))
                            self._auth = False
                            self._authevent.set()
                        self._request_key = None
                    else:
                        self._write(AceMessage.request.READY_nokey)

                elif self._recvbuffer.startswith(AceMessage.response.NOTREADY):
                    # NOTREADY
                    logger.error("Ace is not ready. Wrong auth?")
                    self._auth = False
                    self._authevent.set()

                elif self._recvbuffer.startswith(AceMessage.response.LOADRESP):
                    # LOADRESP
                    _contentinfo_raw = self._recvbuffer.split()[2:]
                    _contentinfo_raw = ' '.join(_contentinfo_raw)
                    _contentinfo = json.loads(_contentinfo_raw)
                    if _contentinfo.get('status') == 100:
                        logger.error("LOADASYNC returned error with message: %s"
                            % _contentinfo.get('message'))
                        self._result.set(False)
                    else:
                        logger.debug("Content info: %s", _contentinfo)
                        _filename = urllib2.unquote(_contentinfo.get('files')[0][0])
                        self._result.set(_filename)

                elif self._recvbuffer.startswith(AceMessage.response.START):
                    # START
                    if not self._seekback or (self._seekback and self._started_again):
                        # If seekback is disabled, we use link in first START command.
                        # If seekback is enabled, we wait for first START command and
                        # ignore it, then do seeback in first EVENT position command
                        # AceStream sends us STOP and START again with new link.
                        # We use only second link then.
                        try:
                            self._url = self._recvbuffer.split()[1]
                            self._urlresult.set(self._url)
                            self._resumeevent.set()
                        except IndexError as e:
                            self._url = None

                elif self._recvbuffer.startswith(AceMessage.response.STOP):
                    pass

                elif self._recvbuffer.startswith(AceMessage.response.SHUTDOWN):
                    logger.debug("Got SHUTDOWN from engine")
                    self._socket.close()
                    return

                elif self._recvbuffer.startswith(AceMessage.response.AUTH):
                    try:
                        self._auth = self._recvbuffer.split()[1]
                        # Send USERDATA here
                        self._write(
                            AceMessage.request.USERDATA(self._gender, self._age))
                    except:
                        pass
                    self._authevent.set()

                elif self._recvbuffer.startswith(AceMessage.response.GETUSERDATA):
                    raise AceException("You should init me first!")

                elif self._recvbuffer.startswith(AceMessage.response.LIVEPOS):
                    self._position = self._recvbuffer.split()
                    self._position_last = self._position[2].split('=')[1]
                    self._position_buf = self._position[9].split('=')[1]
                    self._position = self._position[4].split('=')[1]
                    logger.debug('Current position/last/buf: %s/%s/%s' % (self._position,
                                                                          self._position_last,
                                                                          self._position_buf)
                    )
                    if self._seekback and not self._started_again:
                        self._write(AceMessage.request.SEEK(str(int(self._position_last) - \
                            self._seekback)))
                        logger.debug('Seeking back')
                        self._started_again = True

                elif self._recvbuffer.startswith(AceMessage.response.STATE):
                    self._state = self._recvbuffer.split()[1]

                elif self._recvbuffer.startswith(AceMessage.response.STATUS):
                    self._tempstatus = self._recvbuffer.split()[1].split(';')[0]
                    if self._tempstatus != self._status:
                        self._status = self._tempstatus
                        logger.debug("STATUS changed to " + self._status)

                    if self._status == 'main:err':
                        logger.error(
                            self._status + ' with message ' + self._recvbuffer.split(';')[2])
                        self._result.set_exception(
                            AceException(self._status + ' with message ' + self._recvbuffer.split(';')[2]))
                        self._urlresult.set_exception(
                            AceException(self._status + ' with message ' + self._recvbuffer.split(';')[2]))
                    elif self._status == 'main:starting':
                        self._result.set(True)

                elif self._recvbuffer.startswith(AceMessage.response.PAUSE):
                    logger.debug("PAUSE event")
                    self._resumeevent.clear()

                elif self._recvbuffer.startswith(AceMessage.response.RESUME):
                    logger.debug("RESUME event")
                    gevent.sleep(self._pausedelay)
                    self._resumeevent.set()

Example 47

Project: BlogCatke
Source File: __init__.py
View license
    def verify_request(self, uri, http_method=u'GET', body=None,
            headers=None, require_resource_owner=True, require_verifier=False,
            require_realm=False, required_realm=None):
        """Verifies a request ensuring that the following is true:

        Per `section 3.2`_ of the spec.

        - all mandated OAuth parameters are supplied
        - parameters are only supplied in one source which may be the URI
          query, the Authorization header or the body
        - all parameters are checked and validated, see comments and the
          methods and properties of this class for further details.
        - the supplied signature is verified against a recalculated one

        A ValueError will be raised if any parameter is missing,
        supplied twice or invalid. A HTTP 400 Response should be returned
        upon catching an exception.

        A HTTP 401 Response should be returned if verify_request returns False.

        `Timing attacks`_ are prevented through the use of dummy credentials to
        create near constant time verification even if an invalid credential
        is used. Early exit on invalid credentials would enable attackers
        to perform `enumeration attacks`_. Near constant time string comparison
        is used to prevent secret key guessing. Note that timing attacks can
        only be prevented through near constant time execution, not by adding
        a random delay which would only require more samples to be gathered.

        .. _`section 3.2`: http://tools.ietf.org/html/rfc5849#section-3.2
        .. _`Timing attacks`: http://rdist.root.org/2010/07/19/exploiting-remote-timing-attacks/
        .. _`enumeration attacks`: http://www.sans.edu/research/security-laboratory/article/attacks-browsing
        """
        # Only include body data from x-www-form-urlencoded requests
        headers = headers or {}
        if (u"Content-Type" in headers and
                headers[u"Content-Type"] == CONTENT_TYPE_FORM_URLENCODED):
            request = Request(uri, http_method, body, headers)
        else:
            request = Request(uri, http_method, u'', headers)

        if self.enforce_ssl and not request.uri.lower().startswith("https://"):
            raise ValueError("Insecure transport, only HTTPS is allowed.")

        signature_type, params, oauth_params = self.get_signature_type_and_params(request)

        # The server SHOULD return a 400 (Bad Request) status code when
        # receiving a request with duplicated protocol parameters.
        if len(dict(oauth_params)) != len(oauth_params):
            raise ValueError("Duplicate OAuth entries.")

        oauth_params = dict(oauth_params)
        request_signature = oauth_params.get(u'oauth_signature')
        client_key = oauth_params.get(u'oauth_consumer_key')
        resource_owner_key = oauth_params.get(u'oauth_token')
        nonce = oauth_params.get(u'oauth_nonce')
        timestamp = oauth_params.get(u'oauth_timestamp')
        callback_uri = oauth_params.get(u'oauth_callback')
        verifier = oauth_params.get(u'oauth_verifier')
        signature_method = oauth_params.get(u'oauth_signature_method')
        realm = dict(params).get(u'realm')

        # The server SHOULD return a 400 (Bad Request) status code when
        # receiving a request with missing parameters.
        if not all((request_signature, client_key, nonce,
                    timestamp, signature_method)):
            raise ValueError("Missing OAuth parameters.")

        # OAuth does not mandate a particular signature method, as each
        # implementation can have its own unique requirements.  Servers are
        # free to implement and document their own custom methods.
        # Recommending any particular method is beyond the scope of this
        # specification.  Implementers should review the Security
        # Considerations section (`Section 4`_) before deciding on which
        # method to support.
        # .. _`Section 4`: http://tools.ietf.org/html/rfc5849#section-4
        if not signature_method in self.allowed_signature_methods:
            raise ValueError("Invalid signature method.")

        # Servers receiving an authenticated request MUST validate it by:
        #   If the "oauth_version" parameter is present, ensuring its value is
        #   "1.0".
        if u'oauth_version' in oauth_params and oauth_params[u'oauth_version'] != u'1.0':
            raise ValueError("Invalid OAuth version.")

        # The timestamp value MUST be a positive integer. Unless otherwise
        # specified by the server's documentation, the timestamp is expressed
        # in the number of seconds since January 1, 1970 00:00:00 GMT.
        if len(timestamp) != 10:
            raise ValueError("Invalid timestamp size")
        try:
            ts = int(timestamp)

        except ValueError:
            raise ValueError("Timestamp must be an integer")

        else:
            # To avoid the need to retain an infinite number of nonce values for
            # future checks, servers MAY choose to restrict the time period after
            # which a request with an old timestamp is rejected.
            if time.time() - ts > self.timestamp_lifetime:
                raise ValueError("Request too old, over 10 minutes.")

        # Provider specific validation of parameters, used to enforce
        # restrictions such as character set and length.
        if not self.check_client_key(client_key):
            raise ValueError("Invalid client key.")

        if not resource_owner_key and require_resource_owner:
            raise ValueError("Missing resource owner.")

        if (require_resource_owner and not require_verifier and
            not self.check_access_token(resource_owner_key)):
            raise ValueError("Invalid resource owner key.")

        if (require_resource_owner and require_verifier and
            not self.check_request_token(resource_owner_key)):
            raise ValueError("Invalid resource owner key.")

        if not self.check_nonce(nonce):
            raise ValueError("Invalid nonce.")

        if realm and not self.check_realm(realm):
            raise ValueError("Invalid realm. Allowed are %s" % self.realms)

        if not verifier and require_verifier:
            raise ValueError("Missing verifier.")

        if require_verifier and not self.check_verifier(verifier):
            raise ValueError("Invalid verifier.")

        # Servers receiving an authenticated request MUST validate it by:
        #   If using the "HMAC-SHA1" or "RSA-SHA1" signature methods, ensuring
        #   that the combination of nonce/timestamp/token (if present)
        #   received from the client has not been used before in a previous
        #   request (the server MAY reject requests with stale timestamps as
        #   described in `Section 3.3`_).
        # .._`Section 3.3`: http://tools.ietf.org/html/rfc5849#section-3.3
        #
        # We check this before validating client and resource owner for
        # increased security and performance, both gained by doing less work.
        if require_verifier:
            token = {"request_token": resource_owner_key}
        else:
            token = {"access_token": resource_owner_key}
        if not self.validate_timestamp_and_nonce(client_key, timestamp,
                nonce, **token):
                return False

        # The server SHOULD return a 401 (Unauthorized) status code when
        # receiving a request with invalid client credentials.
        # Note: This is postponed in order to avoid timing attacks, instead
        # a dummy client is assigned and used to maintain near constant
        # time request verification.
        #
        # Note that early exit would enable client enumeration
        valid_client = self.validate_client_key(client_key)
        if not valid_client:
            client_key = self.dummy_client

        # Ensure a valid redirection uri is used
        valid_redirect = self.validate_redirect_uri(client_key, callback_uri)

        # The server SHOULD return a 401 (Unauthorized) status code when
        # receiving a request with invalid or expired token.
        # Note: This is postponed in order to avoid timing attacks, instead
        # a dummy token is assigned and used to maintain near constant
        # time request verification.
        #
        # Note that early exit would enable resource owner enumeration
        if resource_owner_key:
            if require_verifier:
                valid_resource_owner = self.validate_request_token(
                    client_key, resource_owner_key)
            else:
                valid_resource_owner = self.validate_access_token(
                    client_key, resource_owner_key)
            if not valid_resource_owner:
                resource_owner_key = self.dummy_resource_owner
        else:
            valid_resource_owner = True

        # Note that `realm`_ is only used in authorization headers and how
        # it should be interepreted is not included in the OAuth spec.
        # However they could be seen as a scope or realm to which the
        # client has access and as such every client should be checked
        # to ensure it is authorized access to that scope or realm.
        # .. _`realm`: http://tools.ietf.org/html/rfc2617#section-1.2
        #
        # Note that early exit would enable client realm access enumeration.
        #
        # The require_realm indicates this is the first step in the OAuth
        # workflow where a client requests access to a specific realm.
        #
        # Clients obtaining an access token will not supply a realm and it will
        # not be checked. Instead the previously requested realm should be
        # transferred from the request token to the access token.
        #
        # Access to protected resources will always validate the realm but note
        # that the realm is now tied to the access token and not provided by
        # the client.
        if require_realm and not resource_owner_key:
            valid_realm = self.validate_requested_realm(client_key, realm)
        elif require_verifier:
            valid_realm = True
        else:
            valid_realm = self.validate_realm(client_key, resource_owner_key,
                    uri=request.uri, required_realm=required_realm)

        # The server MUST verify (Section 3.2) the validity of the request,
        # ensure that the resource owner has authorized the provisioning of
        # token credentials to the client, and ensure that the temporary
        # credentials have not expired or been used before.  The server MUST
        # also verify the verification code received from the client.
        # .. _`Section 3.2`: http://tools.ietf.org/html/rfc5849#section-3.2
        #
        # Note that early exit would enable resource owner authorization
        # verifier enumertion.
        if verifier:
            valid_verifier = self.validate_verifier(client_key,
                resource_owner_key, verifier)
        else:
            valid_verifier = True

        # Parameters to Client depend on signature method which may vary
        # for each request. Note that HMAC-SHA1 and PLAINTEXT share parameters

        request.params = filter(lambda x: x[0] != "oauth_signature", params)
        request.signature = request_signature

        # ---- RSA Signature verification ----
        if signature_method == SIGNATURE_RSA:
            # The server verifies the signature per `[RFC3447] section 8.2.2`_
            # .. _`[RFC3447] section 8.2.2`: http://tools.ietf.org/html/rfc3447#section-8.2.1
            rsa_key = self.get_rsa_key(client_key)
            valid_signature = signature.verify_rsa_sha1(request, rsa_key)

        # ---- HMAC or Plaintext Signature verification ----
        else:
            # Servers receiving an authenticated request MUST validate it by:
            #   Recalculating the request signature independently as described in
            #   `Section 3.4`_ and comparing it to the value received from the
            #   client via the "oauth_signature" parameter.
            # .. _`Section 3.4`: http://tools.ietf.org/html/rfc5849#section-3.4
            client_secret = self.get_client_secret(client_key)
            if require_verifier:
                resource_owner_secret = self.get_request_token_secret(
                    client_key, resource_owner_key)
            else:
                resource_owner_secret = self.get_access_token_secret(
                    client_key, resource_owner_key)

            if signature_method == SIGNATURE_HMAC:
                valid_signature = signature.verify_hmac_sha1(request,
                    client_secret, resource_owner_secret)
            else:
                valid_signature = signature.verify_plaintext(request,
                    client_secret, resource_owner_secret)

        # We delay checking validity until the very end, using dummy values for
        # calculations and fetching secrets/keys to ensure the flow of every
        # request remains almost identical regardless of whether valid values
        # have been supplied. This ensures near constant time execution and
        # prevents malicious users from guessing sensitive information
        v = all((valid_client, valid_resource_owner, valid_realm,
                    valid_redirect, valid_verifier, valid_signature))
        logger = logging.getLogger("oauthlib")
        if not v:
            logger.info("[Failure] OAuthLib request verification failed.")
            logger.info("Valid client:\t%s" % valid_client)
            logger.info("Valid token:\t%s\t(Required: %s" % (valid_resource_owner, require_resource_owner))
            logger.info("Valid realm:\t%s\t(Required: %s)" % (valid_realm, require_realm))
            logger.info("Valid callback:\t%s" % valid_redirect)
            logger.info("Valid verifier:\t%s\t(Required: %s)" % (valid_verifier, require_verifier))
            logger.info("Valid signature:\t%s" % valid_signature)
        return v

Example 48

Project: visionegg
Source File: GUI.py
View license
    def __init__(self,master=None,**cnf):
        VisionEgg.config._Tkinter_used = True
        Tkinter.Frame.__init__(self,master,**cnf)
        self.winfo_toplevel().title('Vision Egg - Graphics configuration')
        self.pack()

        self.clicked_ok = 0 # So we can distinguish between clicking OK and closing the window

        row = 0
        Tkinter.Label(self,
                      text="Vision Egg - Graphics configuration",
                      font=("Helvetica",14,"bold")).grid(row=row,columnspan=2)
        row += 1

        ################## begin topframe ##############################

        topframe = Tkinter.Frame(self)
        topframe.grid(row=row,column=0,columnspan=2)
        topframe_row = 0

        Tkinter.Label(topframe,
                      text=self.format_string("The default value for these variables and the presence of this dialog window can be controlled via the Vision Egg config file. If this file exists in the Vision Egg user directory, that file is used.  Otherwise, the configuration file found in the Vision Egg system directory is used."),
                      ).grid(row=topframe_row,column=1,columnspan=2,sticky=Tkinter.W)
        topframe_row += 1

        try:
            import _imaging, _imagingtk
            import ImageFile, ImageFileIO, BmpImagePlugin, JpegImagePlugin
            import Image,ImageTk
            im = Image.open(os.path.join(VisionEgg.config.VISIONEGG_SYSTEM_DIR,'data','visionegg.bmp'))
            self.tk_im=ImageTk.PhotoImage(im)
            Tkinter.Label(topframe,image=self.tk_im).grid(row=0,rowspan=topframe_row,column=0)
        except Exception,x:
            logger = logging.getLogger('VisionEgg.GUI')
            logger.info("No Vision Egg logo :( because of error while "
                        "trying to display image in "
                        "GUI.GraphicsConfigurationWindow: %s: "
                        "%s"%(str(x.__class__),str(x)))

        ################## end topframe ##############################

        row += 1

        ################## begin file_frame ##############################

        file_frame = Tkinter.Frame(self)
        file_frame.grid(row=row,columnspan=2,sticky=Tkinter.W+Tkinter.E,pady=5)

        # Script name and location
        file_row = 0
        Tkinter.Label(file_frame,
                      text="This script:").grid(row=file_row,column=0,sticky=Tkinter.E)
        Tkinter.Label(file_frame,
                      text="%s"%(os.path.abspath(sys.argv[0]),)).grid(row=file_row,column=1,sticky=Tkinter.W)
        file_row += 1
        # Vision Egg system dir
        Tkinter.Label(file_frame,
                      text="Vision Egg system directory:").grid(row=file_row,column=0,sticky=Tkinter.E)
        Tkinter.Label(file_frame,
                      text="%s"%(os.path.abspath(VisionEgg.config.VISIONEGG_SYSTEM_DIR),)).grid(row=file_row,column=1,sticky=Tkinter.W)
        file_row += 1

        # Vision Egg user dir
        Tkinter.Label(file_frame,
                      text="Vision Egg user directory:").grid(row=file_row,column=0,sticky=Tkinter.E)
        Tkinter.Label(file_frame,
                      text="%s"%(os.path.abspath(VisionEgg.config.VISIONEGG_USER_DIR),)).grid(row=file_row,column=1,sticky=Tkinter.W)
        file_row += 1

        # Config file
        Tkinter.Label(file_frame,
                      text="Config file location:").grid(row=file_row,column=0,sticky=Tkinter.E)
        if VisionEgg.config.VISIONEGG_CONFIG_FILE:
            Tkinter.Label(file_frame,
                          text="%s"%(os.path.abspath(VisionEgg.config.VISIONEGG_CONFIG_FILE),)).grid(row=file_row,column=1,sticky=Tkinter.W)
        else:
            Tkinter.Label(file_frame,
                          text="(None)").grid(row=file_row,column=1,sticky=Tkinter.W)
        file_row += 1

        # Log file location
        Tkinter.Label(file_frame,
                      text="Log file location:").grid(row=file_row,column=0,sticky=Tkinter.E)
        if VisionEgg.config.VISIONEGG_LOG_FILE:
            Tkinter.Label(file_frame,
                          text="%s"%(os.path.abspath(VisionEgg.config.VISIONEGG_LOG_FILE),)).grid(row=file_row,column=1,sticky=Tkinter.W)
        else:
            Tkinter.Label(file_frame,
                          text="(stderr console)").grid(row=file_row,column=1,sticky=Tkinter.W)

        ################## end file_frame ##############################

        row += 1

        ################## begin cf ##############################

        cf = Tkinter.Frame(self)
        cf.grid(row=row,column=0,padx=10)

        cf_row = 0
        # Fullscreen
        self.fullscreen = Tkinter.BooleanVar()
        self.fullscreen.set(VisionEgg.config.VISIONEGG_FULLSCREEN)
        Tkinter.Checkbutton(cf,
                            text='Fullscreen',
                            variable=self.fullscreen,
                            relief=Tkinter.FLAT).grid(row=cf_row,column=0,sticky=Tkinter.W)

        cf_row += 1
        self.synclync_present = Tkinter.BooleanVar()
        self.synclync_present.set(VisionEgg.config.SYNCLYNC_PRESENT)
        try:
            import synclync
            self.show_synclync_option = 1
        except:
            self.show_synclync_option = 0

        if self.show_synclync_option:
            Tkinter.Checkbutton(cf,
                                text='SyncLync device present',
                                variable=self.synclync_present,
                                relief=Tkinter.FLAT).grid(row=cf_row,column=0,sticky=Tkinter.W)


        cf_row += 1
        # Maximum priority
        self.maxpriority = Tkinter.BooleanVar()
        self.maxpriority.set(VisionEgg.config.VISIONEGG_MAXPRIORITY)

        Tkinter.Checkbutton(cf,
                            text='Maximum priority (use with caution)',
                            variable=self.maxpriority,
                            relief=Tkinter.FLAT).grid(row=cf_row,column=0,sticky=Tkinter.W)
        cf_row += 1

        if sys.platform=='darwin':
            # Only used on darwin platform
            self.darwin_conventional = Tkinter.IntVar()
            self.darwin_conventional.set(VisionEgg.config.VISIONEGG_DARWIN_MAXPRIORITY_CONVENTIONAL_NOT_REALTIME)
            self.darwin_priority = Tkinter.StringVar()
            self.darwin_priority.set(str(VisionEgg.config.VISIONEGG_DARWIN_CONVENTIONAL_PRIORITY))
            self.darwin_realtime_period_denom = Tkinter.StringVar()
            self.darwin_realtime_period_denom.set(str(VisionEgg.config.VISIONEGG_DARWIN_REALTIME_PERIOD_DENOM))
            self.darwin_realtime_computation_denom = Tkinter.StringVar()
            self.darwin_realtime_computation_denom.set(str(VisionEgg.config.VISIONEGG_DARWIN_REALTIME_COMPUTATION_DENOM))
            self.darwin_realtime_constraint_denom = Tkinter.StringVar()
            self.darwin_realtime_constraint_denom.set(str(VisionEgg.config.VISIONEGG_DARWIN_REALTIME_CONSTRAINT_DENOM))
            self.darwin_realtime_preemptible = Tkinter.BooleanVar()
            self.darwin_realtime_preemptible.set(not VisionEgg.config.VISIONEGG_DARWIN_REALTIME_PREEMPTIBLE)
            Tkinter.Button(cf,text="Maximum priority options...",
                           command=self.darwin_maxpriority_tune).grid(row=cf_row,column=0)
            cf_row += 1

        # Sync swap
        self.sync_swap = Tkinter.BooleanVar()
        self.sync_swap.set(VisionEgg.config.VISIONEGG_SYNC_SWAP)
        Tkinter.Checkbutton(cf,
                            text='Attempt vsync',
                            variable=self.sync_swap,
                            relief=Tkinter.FLAT).grid(row=cf_row,column=0,sticky=Tkinter.W)
        cf_row += 1

        # Frameless window
        self.frameless = Tkinter.BooleanVar()
        self.frameless.set(VisionEgg.config.VISIONEGG_FRAMELESS_WINDOW)
        Tkinter.Checkbutton(cf,
                            text='No frame around window',
                            variable=self.frameless,
                            relief=Tkinter.FLAT).grid(row=cf_row,column=0,sticky=Tkinter.W)
        cf_row += 1

        # Hide mouse
        self.mouse_visible = Tkinter.BooleanVar()
        self.mouse_visible.set(not VisionEgg.config.VISIONEGG_HIDE_MOUSE)
        Tkinter.Checkbutton(cf,
                            text='Mouse cursor visible',
                            variable=self.mouse_visible,
                            relief=Tkinter.FLAT).grid(row=cf_row,column=0,sticky=Tkinter.W)
        cf_row += 1

        # Stereo
        self.stereo = Tkinter.BooleanVar()
        self.stereo.set(VisionEgg.config.VISIONEGG_REQUEST_STEREO)
        Tkinter.Checkbutton(cf,
                            text='Stereo',
                            variable=self.stereo,
                            relief=Tkinter.FLAT).grid(row=cf_row,column=0,sticky=Tkinter.W)
        cf_row += 1

        if sys.platform == 'darwin':
            if sys.version == '2.2 (#11, Jan  6 2002, 01:00:42) \n[GCC 2.95.2 19991024 (release)]':
                if Tkinter.TkVersion == 8.4:
                    # The Tk in Bob Ippolito's kitchensink distro had
                    # a bug in Checkbutton
                    Tkinter.Label(cf,text="If you want to check any buttons\n(Mac OS X Tk 8.4a4 bug workaround):").grid(row=cf_row,column=0)
                    cf_row += 1
                    Tkinter.Button(cf,text="PRESS ME FIRST").grid(row=cf_row,column=0)
                    cf_row += 1

        ################## end cf ##############################

        ################## begin entry_frame ###################

        entry_frame = Tkinter.Frame(self)
        entry_frame.grid(row=row,column=1,padx=10,sticky="n")
        row += 1
        ef_row = 0

        # frame rate
        Tkinter.Label(entry_frame,text="What will your monitor's refresh rate be (Hz):").grid(row=ef_row,column=0,sticky=Tkinter.E)
        self.frame_rate = Tkinter.StringVar()
        self.frame_rate.set("%s"%str(VisionEgg.config.VISIONEGG_MONITOR_REFRESH_HZ))
        Tkinter.Entry(entry_frame,textvariable=self.frame_rate).grid(row=ef_row,column=1,sticky=Tkinter.W)
        ef_row += 1

        # width
        Tkinter.Label(entry_frame,text="Window width (pixels):").grid(row=ef_row,column=0,sticky=Tkinter.E)
        self.width = Tkinter.StringVar()
        self.width.set("%s"%str(VisionEgg.config.VISIONEGG_SCREEN_W))
        Tkinter.Entry(entry_frame,textvariable=self.width).grid(row=ef_row,column=1,sticky=Tkinter.W)
        ef_row += 1

        # height
        Tkinter.Label(entry_frame,text="Window height (pixels):").grid(row=ef_row,column=0,sticky=Tkinter.E)
        self.height = Tkinter.StringVar()
        self.height.set("%s"%str(VisionEgg.config.VISIONEGG_SCREEN_H))
        Tkinter.Entry(entry_frame,textvariable=self.height).grid(row=ef_row,column=1,sticky=Tkinter.W)
        ef_row += 1

        # color depth
        Tkinter.Label(entry_frame,text="Requested total color depth (bits per pixel):").grid(row=ef_row,column=0,sticky=Tkinter.E)
        self.color_depth = Tkinter.StringVar()
        self.color_depth.set(str(VisionEgg.config.VISIONEGG_PREFERRED_BPP))
        Tkinter.Entry(entry_frame,textvariable=self.color_depth).grid(row=ef_row,column=1,sticky=Tkinter.W)
        ef_row += 1

        # red depth
        Tkinter.Label(entry_frame,text="Requested red bits per pixel:").grid(row=ef_row,column=0,sticky=Tkinter.E)
        self.red_depth = Tkinter.StringVar()
        self.red_depth.set(str(VisionEgg.config.VISIONEGG_REQUEST_RED_BITS))
        Tkinter.Entry(entry_frame,textvariable=self.red_depth).grid(row=ef_row,column=1,sticky=Tkinter.W)
        ef_row += 1

        # green depth
        Tkinter.Label(entry_frame,text="Requested green bits per pixel:").grid(row=ef_row,column=0,sticky=Tkinter.E)
        self.green_depth = Tkinter.StringVar()
        self.green_depth.set(str(VisionEgg.config.VISIONEGG_REQUEST_GREEN_BITS))
        Tkinter.Entry(entry_frame,textvariable=self.green_depth).grid(row=ef_row,column=1,sticky=Tkinter.W)
        ef_row += 1

        # blue depth
        Tkinter.Label(entry_frame,text="Requested blue bits per pixel:").grid(row=ef_row,column=0,sticky=Tkinter.E)
        self.blue_depth = Tkinter.StringVar()
        self.blue_depth.set(str(VisionEgg.config.VISIONEGG_REQUEST_BLUE_BITS))
        Tkinter.Entry(entry_frame,textvariable=self.blue_depth).grid(row=ef_row,column=1,sticky=Tkinter.W)
        ef_row += 1

        # alpha depth
        Tkinter.Label(entry_frame,text="Requested alpha bits per pixel:").grid(row=ef_row,column=0,sticky=Tkinter.E)
        self.alpha_depth = Tkinter.StringVar()
        self.alpha_depth.set(str(VisionEgg.config.VISIONEGG_REQUEST_ALPHA_BITS))
        Tkinter.Entry(entry_frame,textvariable=self.alpha_depth).grid(row=ef_row,column=1,sticky=Tkinter.W)
        ef_row += 1

        ################## end entry_frame ###################

        ################## gamma_frame ###################

        # gamma stuff
        row += 1
        gamma_frame = Tkinter.Frame(self)
        gamma_frame.grid(row=row,columnspan=2,sticky="we")
        self.gamma_source = Tkinter.StringVar()
        self.gamma_source.set(str(VisionEgg.config.VISIONEGG_GAMMA_SOURCE).lower()) # can be 'none', 'invert', or 'file'
        Tkinter.Label(gamma_frame,
                      text="Gamma:").grid(row=0,column=0)
        Tkinter.Radiobutton(gamma_frame,
                            text="Native",
                            value="none",
                            variable = self.gamma_source).grid(row=0,column=1,padx=1)
        Tkinter.Radiobutton(gamma_frame,
                            text="Quick",
                            value="invert",
                            variable = self.gamma_source).grid(row=0,column=2)
        Tkinter.Label(gamma_frame,
                      text="R:").grid(row=0,column=3)
        self.gamma_invert_red = Tkinter.DoubleVar()
        self.gamma_invert_red.set( VisionEgg.config.VISIONEGG_GAMMA_INVERT_RED )
        Tkinter.Entry(gamma_frame,
                      textvariable=self.gamma_invert_red,
                      width=3).grid(row=0,column=4)
        Tkinter.Label(gamma_frame,
                      text="G:").grid(row=0,column=5)
        self.gamma_invert_green = Tkinter.DoubleVar()
        self.gamma_invert_green.set( VisionEgg.config.VISIONEGG_GAMMA_INVERT_GREEN )
        Tkinter.Entry(gamma_frame,
                      textvariable=self.gamma_invert_green,
                      width=3).grid(row=0,column=6)
        Tkinter.Label(gamma_frame,
                      text="B:").grid(row=0,column=7)
        self.gamma_invert_blue = Tkinter.DoubleVar()
        self.gamma_invert_blue.set( VisionEgg.config.VISIONEGG_GAMMA_INVERT_BLUE )
        Tkinter.Entry(gamma_frame,
                      textvariable=self.gamma_invert_blue,
                      width=3).grid(row=0,column=8)
        Tkinter.Radiobutton(gamma_frame,
                            text="Custom:",
                            value="file",
                            variable = self.gamma_source).grid(row=0,column=9)
        self.gamma_file = Tkinter.StringVar()
        if os.path.isfile(VisionEgg.config.VISIONEGG_GAMMA_FILE):
            self.gamma_file.set( VisionEgg.config.VISIONEGG_GAMMA_FILE )
        else:
            self.gamma_file.set("")
        Tkinter.Entry(gamma_frame,
                      textvariable=self.gamma_file,
                      width=15).grid(row=0,column=10)
        Tkinter.Button(gamma_frame,
                       command=self.set_gamma_file,
                       text="Set...").grid(row=0,column=11)

        ################## end gamma_frame ###################

        row += 1
        bf = Tkinter.Frame(self)
        bf.grid(row=row,columnspan=2,sticky=Tkinter.W+Tkinter.E)

        # Save settings to config file
        b = Tkinter.Button(bf,text="Save current settings to config file",command=self.save)
        b.grid(row=0,column=0,padx=20)
        b.bind('<Return>',self.start)

        # Start button
        b2 = Tkinter.Button(bf,text="ok",command=self.start)
        b2.grid(row=0,column=1,padx=20)
        b2.focus_force()
        b2.bind('<Return>',self.start)

        # Raise our application on darwin
        if sys.platform == 'darwin':
            try:
                # from Jack Jansen email 20 April 2003
                # WMAvailable() returns true if you can use the window
                # manager, and as a side #effect it raises the
                # application to the foreground.
                import MacOS
                if not MacOS.WMAvailable():
                    raise "Cannot reach the window manager"
            except:
                pass

Example 49

Project: voctomix
Source File: videodisplay.py
View license
    def __init__(self, drawing_area, port, width=None, height=None,
                 play_audio=False, level_callback=None):
        self.log = logging.getLogger('VideoDisplay[%u]' % port)

        self.drawing_area = drawing_area
        self.level_callback = level_callback

        if Config.has_option('previews', 'videocaps'):
            previewcaps = Config.get('previews', 'videocaps')
        else:
            previewcaps = Config.get('mix', 'videocaps')

        use_previews = (Config.getboolean('previews', 'enabled') and
                        Config.getboolean('previews', 'use'))

        # Preview-Ports are Raw-Ports + 1000
        if use_previews:
            self.log.info('using encoded previews instead of raw-video')
            port += 1000

            vdec = 'image/jpeg ! jpegdec'
            if Config.has_option('previews', 'vaapi'):
                try:
                    decoder = Config.get('previews', 'vaapi')
                    decoders = {
                        'h264': 'video/x-h264 ! avdec_h264',
                        'jpeg': 'image/jpeg ! jpegdec',
                        'mpeg2': 'video/mpeg,mpegversion=2 ! mpeg2dec'
                    }
                    vdec = decoders[decoder]
                except Exception as e:
                    self.log.error(e)

        else:
            self.log.info('using raw-video instead of encoded-previews')
            vdec = None

        # Setup Server-Connection, Demuxing and Decoding
        pipeline = """
            tcpclientsrc host={host} port={port} blocksize=1048576 !
            queue !
            matroskademux name=demux
        """

        if use_previews:
            pipeline += """
                demux. !
                {vdec} !
                {previewcaps} !
                queue !
            """

        else:
            pipeline += """
                demux. !
                {vcaps} !
                queue !
            """

        # Video Display
        videosystem = Config.get('videodisplay', 'system')
        self.log.debug('Configuring for Video-System %s', videosystem)
        if videosystem == 'gl':
            pipeline += """
                glupload !
                glcolorconvert !
                glimagesinkelement
            """

        elif videosystem == 'xv':
            pipeline += """
                xvimagesink
            """

        elif videosystem == 'x':
            prescale_caps = 'video/x-raw'
            if width and height:
                prescale_caps += ',width=%u,height=%u' % (width, height)

            pipeline += """
                videoconvert !
                videoscale !
                {prescale_caps} !
                ximagesink
            """.format(prescale_caps=prescale_caps)

        else:
            raise Exception(
                'Invalid Videodisplay-System configured: %s' % videosystem
            )

        # If an Audio-Path is required,
        # add an Audio-Path through a level-Element
        if self.level_callback or play_audio:
            pipeline += """
                demux. !
                {acaps} !
                queue !
                level name=lvl interval=50000000 !
            """

            # If Playback is requested, push fo pulseaudio
            if play_audio:
                pipeline += """
                    pulsesink
                """

            # Otherwise just trash the Audio
            else:
                pipeline += """
                    fakesink
                """

        pipeline = pipeline.format(
            acaps=Config.get('mix', 'audiocaps'),
            vcaps=Config.get('mix', 'videocaps'),
            previewcaps=previewcaps,
            host=Args.host if Args.host else Config.get('server', 'host'),
            vdec=vdec,
            port=port,
        )

        self.log.debug('Creating Display-Pipeline:\n%s', pipeline)
        self.pipeline = Gst.parse_launch(pipeline)
        self.pipeline.use_clock(Clock)

        self.drawing_area.realize()
        self.xid = self.drawing_area.get_property('window').get_xid()
        self.log.debug('Realized Drawing-Area with xid %u', self.xid)

        bus = self.pipeline.get_bus()
        bus.add_signal_watch()
        bus.enable_sync_message_emission()

        bus.connect('message::error', self.on_error)
        bus.connect("sync-message::element", self.on_syncmsg)

        if self.level_callback:
            bus.connect("message::element", self.on_level)

        self.log.debug('Launching Display-Pipeline')
        self.pipeline.set_state(Gst.State.PLAYING)

Example 50

Project: pyline
Source File: pyline.py
View license
def main(args=None, iterable=None, output=None, results=None, opts=None):
    """parse args, process iterable, write to output, return a returncode

    ``pyline.main`` function

    Kwargs:
        args (None or list[str]): list of commandline arguments (``--help``)
        iterable (None or iterable[object{str,}]): iterable of objects
        output (object:write): a file-like object with a ``.write`` method
        results (None or object:append): if not None, append results here
        opts (None): if set, these preempt args and argument parsing
    Returns:
        int: nonzero on error

    Raises:
        OptParseError: optparse.parse_args(args) may raise

    .. code::

        import pyline
        pyline.main(['-v', 'l and l[1]'], ['one 1', 'two 2', 'three 3'])
        pyline.main(['-v', 'w and w[1]'], ['one 1', 'two 2', 'three 3'])

    """
    import logging
    import sys

    prs = get_option_parser()

    argv = args = list(args) if args is not None else [] # sys.argv[1:]
    if opts is None:
        (opts, args) = prs.parse_args(args)
    optsdict = None
    if hasattr(opts, '__dict__'):
        optsdict = opts.__dict__
    elif hasattr(opts, 'items'):
        optsdict = opts
    else:
        raise ValueError(opts)
    if not optsdict:
        optsdict = {}
    opts = optsdict

    log = logging.getLogger(DEFAULT_LOGGER)
    # if -q/--quiet is not specified
    if not opts.get('quiet'):
        #logging.basicConfig(
        #)
        log.setLevel(logging.WARN)

        # if -v/--verbose is specified
        if opts.get('verbose'):
            log.setLevel(logging.DEBUG)
    # if -q/--quiet is specified
    else:
        log.setLevel(logging.ERROR)
    log.info(('pyline.version', __version__))
    log.info(('argv', argv))
    log.info(('args', args))

    if opts.get('version'):
        print(__version__)
        return 0, None

    opts['col_map'] = collections.OrderedDict()
    if opts.get('col_mapstr'):
        opts['col_map'] = build_column_map(opts.get('col_mapstr'))

    sortfunc = None
    if opts.get('sort_asc') and opts.get('sort_desc'):
        prs.error("both sort-asc and sort-desc are specified")

    if 'cmd' not in opts:
        cmd = ' '.join(args)
        if not cmd.strip():
            if opts.get('regex'):
                if (opts.get('_output_format') == 'json'
                    and '<' in opts.get('regex')):
                    cmd = 'rgx and rgx.groupdict()'
                else:
                    cmd = 'rgx and rgx.groups()'
            else:
                cmd = 'obj'
        opts['cmd'] = cmd.strip()

    log.info(('cmd', opts['cmd']))
    # opts['attrs'] = PylineResult._fields # XX
    opts['attrs'] = list(opts['col_map'].keys()) if 'col_map' in opts else None

    try:
        if iterable is not None:
            opts['_file'] = iterable
        else:
            if opts.get('file') is '-':
                # opts._file = sys.stdin
                opts['_file'] = codecs.getreader('utf8')(sys.stdin)
            else:
                opts['_file'] = codecs.open(opts['file'], 'r', encoding='utf8')

        if output is not None:
            opts['_output'] = output
        else:
            if opts.get('output') is '-':
                # opts._output = sys.stdout
                opts['_output'] = codecs.getwriter('utf8')(sys.stdout)
            elif opts.get('output'):
                opts['_output'] = codecs.open(opts['output'], 'w', encoding='utf8')
            else:
                # opts._output = sys.stdout
                opts['_output'] = codecs.getwriter('utf8')(sys.stdout)

        if opts.get('_output_format') is None:
            #opts._output_format = DEFAULTS['_output_format']
            #opts['_output_format'] = 'csv'
            opts['_output_format'] = 'json'
            #TODO
        log.info(('_output_format', opts['_output_format']))

        log.info(('opts', opts))

        writer = ResultWriter.get_writer(
            opts['_output'],
            output_format=opts['_output_format'],
            number_lines=opts.get('number_lines'),
            attrs=opts['attrs'])
        writer.header()

        sortfunc = get_sort_function(**opts)
        # if sorting, collect and sort before printing
        if sortfunc:
            _results = []
            for result in pyline(opts['_file'], **opts):
                if not result.result:
                    # skip result if not bool(result.result)
                    continue
                _results.append(result)
            sorted_results = sortfunc(_results)
            if results is not None:
                results.extend(sorted_results)
            # import pdb; pdb.set_trace()  # XXX BREAKPOINT
            for result in sorted_results:
                writer.output_func(result)
        # if not sorting, return a result iterator
        else:
            for result in pyline(opts['_file'], **opts):
                if not result.result:
                    # skip result if not bool(result.result)
                    continue
                writer.output_func(result)
                if results is not None:
                    results.append(result)

        writer.footer()
    finally:
        if (getattr(opts.get('_file', codecs.EncodedFile),
                'fileno', int)() not in (0, 1, 2)):
            opts['_file'].close()

    # opts
    # results
    # sorted_results
    # if passed, results are .append-ed to results
    return 0, results