functools.wraps

Here are the examples of the python api functools.wraps taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

147 Examples 7

Example 1

Project: dose Source File: compat.py
def _wx_two_step_creation_on_classic(cls):
    """
    Patch the wxPython Classic class to behave like a wxPython
    Phoenix class on a 2-step creation process.

    On wxPython Phoenix, the first step is the parameterless
    ``__init__``, and the second step is the ``Create`` method with
    the construction parameters, e.g.::

        class CustomFrame(wx.Frame):
            def __init__(self, parent):
                super(CustomFrame, self).__init__() # 1st step
                # [...]
                self.Create(parent) # 2nd step
                # [...]

    On wxPython Classic, the same would be written as::

        class CustomFrame(wx.Frame):
            def __init__(self, parent):
                pre = wx.PreFrame() # 1st step
                # [... using "pre" instead of "self" ...]
                pre.Create(parent) # 2nd step
                self.PostCreate(pre) # "3rd step"
                # [...]
    """
    cls_init = cls.__init__
    cls_create = cls.Create

    @functools.wraps(cls_init)
    def __init__(self, *args, **kwargs):
        if args or kwargs:
            cls_init(self, *args, **kwargs)
        else: # 2-step creation
            new_self = getattr(wx, "Pre" + cls.__name__)()
            for pair in vars(new_self).items():
                setattr(self, *pair)

    if sys.platform == "win32":
        # On Windows, the wx.Pre*.Create constructors calls the
        # EVT_WINDOW_CREATE handler before returning (i.e, it processes
        # the event instead of just adding a message to the queue), and
        # that shouldn't happen before the PostCreate call in this thread
        @functools.wraps(cls_create)
        def create(self, *args, **kwargs):
            self.SetEvtHandlerEnabled(False)
            result = cls_create(self, *args, **kwargs)
            self.SetEvtHandlerEnabled(True)
            if result:
                self.PostCreate(self)
                wx.PostEvent(self, wx.WindowCreateEvent(self))
            return result
    else:
        @functools.wraps(cls_create)
        def create(self, *args, **kwargs):
            result = cls_create(self, *args, **kwargs)
            if result:
                self.PostCreate(self)
            return result

    cls.__init__ = __init__
    cls.Create = create

Example 2

Project: lrrbot Source File: utils.py
def coro_decorator(decorator):
	"""
	Utility decorator used when defining other decorators, so they can wrap
	either normal functions or asyncio coroutines.

	Usage:
	@coro_decorator
	def decorator(func):
		@functools.wraps(func)
		@asyncio.coroutine # decorator must return a coroutine, and use "yield from" to call func
		def wrapper(...)
			...
			ret = yield from func(...)
			...
			return ...
		return wrapper

	@decorator
	def normal_func():
		pass

	@decorator # @decorator must be above @coroutine
	@asyncio.coroutine
	def coro_func():
		pass

	Note that the decorator must *not* yield from anything *except* the function
	it's decorating.
	"""
	# any extra properties that we want to assign to wrappers, in any of the decorators
	# we use this on
	EXTRA_PARAMS = ('reset_throttle',)
	@functools.wraps(decorator)
	def wrapper(func):
		is_coro = asyncio.iscoroutinefunction(func)
		if not is_coro:
			func = asyncio.coroutine(func)

		decorated_coro = decorator(func)
		assert asyncio.iscoroutinefunction(decorated_coro)

		if is_coro:
			return decorated_coro
		else:
			# Unwrap the coroutine. We know it should never yield.
			@functools.wraps(decorated_coro, assigned=functools.WRAPPER_ASSIGNMENTS + EXTRA_PARAMS, updated=())
			def decorated_func(*args, **kwargs):
				x = iter(decorated_coro(*args, **kwargs))
				try:
					next(x)
				except StopIteration as e:
					return e.value
				else:
					raise Exception("Decorator %s behaving badly wrapping non-coroutine %s" % (decorator.__name__, func.__name__))
			return decorated_func
	return wrapper

Example 3

Project: pootle Source File: views.py
def redirect_to_tp_on_404(f):

    @functools.wraps(f)
    def method_wrapper(self, request, *args, **kwargs):
        try:
            request.permissions = get_matching_permissions(
                request.user,
                self.permission_context) or []
        except Http404 as e:
            # Test if lang code is not canonical but valid
            lang = Language.get_canonical(kwargs['language_code'])
            if lang is not None and lang.code != kwargs['language_code']:
                kwargs["language_code"] = lang.code
                return redirect(
                    resolve(request.path).view_name,
                    permanent=True,
                    **kwargs)

            elif kwargs["dir_path"] or kwargs.get("filename", None):
                try:
                    TranslationProject.objects.get(
                        project__code=kwargs["project_code"],
                        language__code=kwargs["language_code"])
                    # the TP exists so redirect to it
                    return redirect(
                        reverse(
                            'pootle-tp-browse',
                            kwargs={
                                k: v
                                for k, v
                                in kwargs.items()
                                if k in [
                                    "language_code",
                                    "project_code"]}))
                except TranslationProject.DoesNotExist:
                    pass

            # if we get here - the TP does not exist
            user_choice = self.request.COOKIES.get(
                'user-choice', None)
            if user_choice:
                url = None
                if user_choice == 'language':
                    url = reverse(
                        'pootle-language-browse',
                        args=[kwargs["language_code"]])
                elif user_choice == "project":
                    url = reverse(
                        'pootle-project-browse',
                        args=[kwargs["project_code"], '', ''])
                if url:
                    response = redirect(url)
                    response.delete_cookie('user-choice')
                    return response
            raise e
        return f(self, request, *args, **kwargs)
    return method_wrapper

Example 4

Project: unstdlib.py Source File: functools_.py
Function: memoized
def memoized(fn=None, cache=None):
    """ Memoize a function into an optionally-specificed cache container.

    If the `cache` container is not specified, then the instance container is
    accessible from the wrapped function's `memoize_cache` property.

    Example::

        >>> @memoized
        ... def foo(bar):
        ...   print("Not cached.")
        >>> foo(1)
        Not cached.
        >>> foo(1)
        >>> foo(2)
        Not cached.

    Example with a specific cache container (in this case, the
    ``RecentlyUsedContainer``, which will only store the ``maxsize`` most
    recently accessed items)::

        >>> from unstdlib.standard.collections_ import RecentlyUsedContainer
        >>> lru_container = RecentlyUsedContainer(maxsize=2)
        >>> @memoized(cache=lru_container)
        ... def baz(x):
        ...   print("Not cached.")
        >>> baz(1)
        Not cached.
        >>> baz(1)
        >>> baz(2)
        Not cached.
        >>> baz(3)
        Not cached.
        >>> baz(2)
        >>> baz(1)
        Not cached.
        >>> # Notice that the '2' key remains, but the '1' key was evicted from
        >>> # the cache.
    """
    if fn:
        # This is a hack to support both @memoize and @memoize(...)
        return memoized(cache=cache)(fn)

    if cache is None:
        cache = {}

    def decorator(fn):
        wrapped = wraps(fn)(partial(_memoized_call, fn, cache))
        wrapped.memoize_cache = cache
        return wrapped

    return decorator

Example 5

Project: django-clsview Source File: djclsview.py
Function: method_decorator
def method_decorator(decorator):
    
    """
    Wrap a function decorator as a method decorator.
    
    A lot of decorators in Python are written to depend on a set order of
    positional arguments. In Django, for example, all of the built-in decorators
    expect the first argument to be a `request` object. If you're writing a
    typical class-based view, the first argument will be `self`, so you won't be
    able to use the standard decorators. Until now!
    
    `method_decorator()` wraps a function decorator to produce a new method
    decorator. Use it like this:
    
        class MyClassBasedView(object):
            
            @method_decorator(login_required)
            def __call__(self, request, *args, **kwargs):
                do_something()
    
    Full Example
    ============
    
        >>> class X(object):
        ...     def meth(self, a, b):
        ...         return a + b
        
        >>> X().meth(1, 2)
        3
    
    A standard decorator for a binary operator might look like this:
    
        >>> def mult_2(func):
        ...     def wrapper(a, b):
        ...         return func(a * 2, b * 2)
        ...     return wrapper
    
    It works fine for a standard function:
    
        >>> @mult_2
        ... def func(a, b):
        ...     return a + b
        >>> func(1, 2)
        6
    
    But we can't use that directly inside a class:
    
        >>> class Y(object):
        ...     @mult_2
        ...     def meth(self, a, b):
        ...         return a + b
        
        >>> Y().meth(1, 2)
        Traceback (most recent call last):
        ...
        TypeError: wrapper() takes exactly 2 arguments (3 given)
    
    But using `@method_decorator`, we can:
    
        >>> class Z(object):
        ...     @method_decorator(mult_2)
        ...     def meth(self, a, b):
        ...         return a + b
        
        >>> Z().meth(1, 2)
        6
    
    """
    
    @wraps(decorator)
    def decoratorwrapper(method):
        @wraps(method)
        def methodwrapper(self, *args, **kwargs):
            return decorator(wraps(method)(partial(method, self)))(*args, **kwargs)
        return methodwrapper
    return decoratorwrapper

Example 6

Project: deep_recommend_system Source File: deprecation.py
def deprecated_args(date, instructions, *deprecated_arg_names):
  """Decorator for marking specific function arguments as deprecated.

  This decorator logs a deprecation warning whenever the decorated function is
  called with the deprecated argument. It has the following format:

    Calling <function> (from <module>) with <arg> is deprecated and will be
    removed after <date>. Instructions for updating:
      <instructions>

  <function> will include the class name if it is a method.

  It also edits the docstring of the function: ' (deprecated arguments)' is
  appended to the first line of the docstring and a deprecation notice is
  prepended to the rest of the docstring.

  Args:
    date: String. The date the function is scheduled to be removed. Must be
      ISO 8601 (YYYY-MM-DD).
    instructions: String. Instructions on how to update code using the
      deprecated function.
    *deprecated_arg_names: String. The deprecated arguments.

  Returns:
    Decorated function or method.

  Raises:
    ValueError: If date is not in ISO 8601 format, instructions are empty, or
      the deprecated arguments are not present in the function signature.
  """
  _validate_deprecation_args(date, instructions)
  if not deprecated_arg_names:
    raise ValueError('Specify which argument is deprecated.')

  def deprecated_wrapper(func):
    """Deprecation decorator."""
    decorator_utils.validate_callable(func, 'deprecated_args')

    arg_spec = inspect.getargspec(func)
    deprecated_positions = [
        (i, arg_name) for (i, arg_name) in enumerate(arg_spec.args)
        if arg_name in deprecated_arg_names]
    is_varargs_deprecated = arg_spec.varargs in deprecated_arg_names
    is_kwargs_deprecated = arg_spec.keywords in deprecated_arg_names

    if (len(deprecated_positions) + is_varargs_deprecated + is_kwargs_deprecated
        != len(deprecated_arg_names)):
      known_args = arg_spec.args + [arg_spec.varargs, arg_spec.keywords]
      missing_args = [arg_name for arg_name in deprecated_arg_names
                      if arg_name not in known_args]
      raise ValueError('The following deprecated arguments are not present '
                       'in the function signature: %s. '
                       'Found next arguments: %s.' % (missing_args, known_args))

    @functools.wraps(func)
    def new_func(*args, **kwargs):
      """Deprecation wrapper."""
      invalid_args = []
      for (i, arg_name) in deprecated_positions:
        if i < len(args):
          invalid_args.append(arg_name)
      if is_varargs_deprecated and len(args) > len(arg_spec.args):
        invalid_args.append(arg_spec.varargs)
      if is_kwargs_deprecated and kwargs:
        invalid_args.append(arg_spec.keywords)
      for arg_name in deprecated_arg_names:
        if arg_name in kwargs:
          invalid_args.append(arg_name)
      for arg_name in invalid_args:
        logging.warning(
            'From %s: calling %s (from %s) with %s is deprecated and will '
            'be removed after %s.\nInstructions for updating:\n%s',
            _call_location(), decorator_utils.get_qualified_name(func),
            func.__module__, arg_name, date, instructions)
      return func(*args, **kwargs)
    new_func.__doc__ = _add_deprecated_arg_notice_to_docstring(
        func.__doc__, date, instructions)
    return new_func
  return deprecated_wrapper

Example 7

Project: scrapy-inline-requests Source File: __init__.py
def inline_requests(method_or_func):
    """A decorator to use coroutine-like spider callbacks.

    Example:

    .. code:: python

        class MySpider(Spider):

            @inline_callbacks
            def parse(self, response):
                next_url = response.urjoin('?next')
                try:
                    next_resp = yield Request(next_url)
                except Exception as e:
                    self.logger.exception("An error occurred.")
                    return
                else:
                    yield {"next_url": next_resp.url}


    You must conform with the following conventions:

    * The decorated method must be a spider method.
    * The decorated method must use the ``yield`` keyword or return a generator.
    * The decorated method must accept ``response`` as the first argument.
    * The decorated method should yield ``Request`` objects without neither
      ``callback`` nor ``errback`` set.

    If your requests don't come back to the generator try setting the flag to
    handle all http statuses:

    .. code:: python

                request.meta['handle_httpstatus_all'] = True

    """
    args = get_args(method_or_func)
    if not args:
        raise TypeError("Function must accept at least one argument.")
    # XXX: hardcoded convention of 'self' as first argument for methods
    if args[0] == 'self':
        def wrapper(self, response, **kwargs):
            callback = create_bound_method(method_or_func, self)

            genwrapper = RequestGenerator(callback, **kwargs)
            return genwrapper(response)
    else:
        warnings.warn("Decorating a non-method function will be deprecated",
                      ScrapyDeprecationWarning, stacklevel=1)

        def wrapper(response, **kwargs):
            genwrapper = RequestGenerator(method_or_func, **kwargs)
            return genwrapper(response)

    return wraps(method_or_func)(wrapper)

Example 8

Project: viewfinder Source File: gen.py
def coroutine(func):
    """Decorator for asynchronous generators.

    Any generator that yields objects from this module must be wrapped
    in either this decorator or `engine`.

    Coroutines may "return" by raising the special exception
    `Return(value) <Return>`.  In Python 3.3+, it is also possible for
    the function to simply use the ``return value`` statement (prior to
    Python 3.3 generators were not allowed to also return values).
    In all versions of Python a coroutine that simply wishes to exit
    early may use the ``return`` statement without a value.

    Functions with this decorator return a `.Future`.  Additionally,
    they may be called with a ``callback`` keyword argument, which
    will be invoked with the future's result when it resolves.  If the
    coroutine fails, the callback will not be run and an exception
    will be raised into the surrounding `.StackContext`.  The
    ``callback`` argument is not visible inside the decorated
    function; it is handled by the decorator itself.

    From the caller's perspective, ``@gen.coroutine`` is similar to
    the combination of ``@return_future`` and ``@gen.engine``.
    """
    @functools.wraps(func)
    def wrapper(*args, **kwargs):
        runner = None
        future = TracebackFuture()

        if 'callback' in kwargs:
            callback = kwargs.pop('callback')
            IOLoop.current().add_future(
                future, lambda future: callback(future.result()))

        def handle_exception(typ, value, tb):
            try:
                if runner is not None and runner.handle_exception(typ, value, tb):
                    return True
            except Exception:
                typ, value, tb = sys.exc_info()
            future.set_exc_info((typ, value, tb))
            return True
        with ExceptionStackContext(handle_exception) as deactivate:
            try:
                result = func(*args, **kwargs)
            except (Return, StopIteration) as e:
                result = getattr(e, 'value', None)
            except Exception:
                deactivate()
                future.set_exc_info(sys.exc_info())
                return future
            else:
                if isinstance(result, types.GeneratorType):
                    def final_callback(value):
                        deactivate()
                        future.set_result(value)
                    runner = Runner(result, final_callback)
                    runner.run()
                    return future
            deactivate()
            future.set_result(result)
        return future
    return wrapper

Example 9

Project: asyncio Source File: coroutines.py
Function: coroutine
def coroutine(func):
    """Decorator to mark coroutines.

    If the coroutine is not yielded from before it is destroyed,
    an error message is logged.
    """
    if _inspect_iscoroutinefunction(func):
        # In Python 3.5 that's all we need to do for coroutines
        # defiend with "async def".
        # Wrapping in CoroWrapper will happen via
        # 'sys.set_coroutine_wrapper' function.
        return func

    if inspect.isgeneratorfunction(func):
        coro = func
    else:
        @functools.wraps(func)
        def coro(*args, **kw):
            res = func(*args, **kw)
            if (futures.isfuture(res) or inspect.isgenerator(res) or
                isinstance(res, CoroWrapper)):
                res = yield from res
            elif _AwaitableABC is not None:
                # If 'func' returns an Awaitable (new in 3.5) we
                # want to run it.
                try:
                    await_meth = res.__await__
                except AttributeError:
                    pass
                else:
                    if isinstance(res, _AwaitableABC):
                        res = yield from await_meth()
            return res

    if not _DEBUG:
        if _types_coroutine is None:
            wrapper = coro
        else:
            wrapper = _types_coroutine(coro)
    else:
        @functools.wraps(func)
        def wrapper(*args, **kwds):
            w = CoroWrapper(coro(*args, **kwds), func=func)
            if w._source_traceback:
                del w._source_traceback[-1]
            # Python < 3.5 does not implement __qualname__
            # on generator objects, so we set it manually.
            # We use getattr as some callables (such as
            # functools.partial may lack __qualname__).
            w.__name__ = getattr(func, '__name__', None)
            w.__qualname__ = getattr(func, '__qualname__', None)
            return w

    wrapper._is_coroutine = True  # For iscoroutinefunction().
    return wrapper

Example 10

Project: SickGear Source File: testing.py
def gen_test(func=None, timeout=None):
    """Testing equivalent of ``@gen.coroutine``, to be applied to test methods.

    ``@gen.coroutine`` cannot be used on tests because the `.IOLoop` is not
    already running.  ``@gen_test`` should be applied to test methods
    on subclasses of `AsyncTestCase`.

    Example::

        class MyTest(AsyncHTTPTestCase):
            @gen_test
            def test_something(self):
                response = yield gen.Task(self.fetch('/'))

    By default, ``@gen_test`` times out after 5 seconds. The timeout may be
    overridden globally with the ``ASYNC_TEST_TIMEOUT`` environment variable,
    or for each test with the ``timeout`` keyword argument::

        class MyTest(AsyncHTTPTestCase):
            @gen_test(timeout=10)
            def test_something_slow(self):
                response = yield gen.Task(self.fetch('/'))

    .. versionadded:: 3.1
       The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment
       variable.

    .. versionchanged:: 4.0
       The wrapper now passes along ``*args, **kwargs`` so it can be used
       on functions with arguments.
    """
    if timeout is None:
        timeout = get_async_test_timeout()

    def wrap(f):
        # Stack up several decorators to allow us to access the generator
        # object itself.  In the innermost wrapper, we capture the generator
        # and save it in an attribute of self.  Next, we run the wrapped
        # function through @gen.coroutine.  Finally, the coroutine is
        # wrapped again to make it synchronous with run_sync.
        #
        # This is a good case study arguing for either some sort of
        # extensibility in the gen decorators or cancellation support.
        @functools.wraps(f)
        def pre_coroutine(self, *args, **kwargs):
            result = f(self, *args, **kwargs)
            if isinstance(result, GeneratorType):
                self._test_generator = result
            else:
                self._test_generator = None
            return result

        coro = gen.coroutine(pre_coroutine)

        @functools.wraps(coro)
        def post_coroutine(self, *args, **kwargs):
            try:
                return self.io_loop.run_sync(
                    functools.partial(coro, self, *args, **kwargs),
                    timeout=timeout)
            except TimeoutError as e:
                # run_sync raises an error with an unhelpful traceback.
                # If we throw it back into the generator the stack trace
                # will be replaced by the point where the test is stopped.
                self._test_generator.throw(e)
                # In case the test contains an overly broad except clause,
                # we may get back here.  In this case re-raise the original
                # exception, which is better than nothing.
                raise
        return post_coroutine

    if func is not None:
        # Used like:
        #     @gen_test
        #     def f(self):
        #         pass
        return wrap(func)
    else:
        # Used like @gen_test(timeout=10)
        return wrap

Example 11

Project: qcore Source File: caching.py
def memoize_with_ttl(ttl_secs=60 * 60 * 24):
    """Memoizes return values of the decorated function for a given time-to-live.

    Similar to l0cache, but the cache persists for the duration of the process, unless clear_cache()
    is called on the function or the time-to-live expires. By default, the time-to-live is set to
    24 hours.

    """

    error_msg = 'Incorrect usage of qcore.caching.memoize_with_ttl: ' \
                'ttl_secs must be a positive integer.'
    assert_is_instance(ttl_secs, six.integer_types, error_msg)
    assert_gt(ttl_secs, 0, error_msg)

    def cache_fun(fun):
        argspec = inspect.getargspec(fun)
        arg_names = argspec.args
        kwargs_defaults = get_kwargs_defaults(argspec)

        def cache_key(args, kwargs):
            return repr(get_args_tuple(args, kwargs, arg_names, kwargs_defaults))

        @functools.wraps(fun)
        def new_fun(*args, **kwargs):
            k = cache_key(args, kwargs)
            current_time = int(time.time())

            # k is not in the cache; perform the function and cache the result.
            if k not in new_fun.__cache or k not in new_fun.__cache_times:
                new_fun.__cache[k] = fun(*args, **kwargs)
                new_fun.__cache_times[k] = current_time
                return new_fun.__cache[k]

            # k is in the cache at this point. Check if the ttl has expired;
            # if so, recompute the value and cache it.
            cache_time = new_fun.__cache_times[k]
            if current_time - cache_time > ttl_secs:
                new_fun.__cache[k] = fun(*args, **kwargs)
                new_fun.__cache_times[k] = current_time

            # finally, return the cached result.
            return new_fun.__cache[k]

        def clear_cache():
            """Removes all cached values for this function."""
            new_fun.__cache.clear()
            new_fun.__cache_times.clear()

        def dirty(*args, **kwargs):
            """Dirties the function for a given set of arguments."""
            k = cache_key(args, kwargs)
            new_fun.__cache.pop(k, None)
            new_fun.__cache_times.pop(k, None)

        new_fun.__cache = {}
        new_fun.__cache_times = {}
        new_fun.clear_cache = clear_cache
        new_fun.dirty = dirty
        return new_fun
    return cache_fun

Example 12

Project: webargs Source File: core.py
    def use_args(self, argmap, req=None, locations=None, as_kwargs=False, validate=None):
        """Decorator that injects parsed arguments into a view function or method.

        Example usage with Flask: ::

            @app.route('/echo', methods=['get', 'post'])
            @parser.use_args({'name': fields.Str()})
            def greet(args):
                return 'Hello ' + args['name']

        :param argmap: Either a `marshmallow.Schema`, a `dict`
            of argname -> `marshmallow.fields.Field` pairs, or a callable
            which accepts a request and returns a `marshmallow.Schema`.
        :param tuple locations: Where on the request to search for values.
        :param bool as_kwargs: Whether to insert arguments as keyword arguments.
        :param callable validate: Validation function that receives the dictionary
            of parsed arguments. If the function returns ``False``, the parser
            will raise a :exc:`ValidationError`.
        """
        locations = locations or self.locations
        request_obj = req
        # Optimization: If argmap is passed as a dictionary, we only need
        # to generate a Schema once
        if isinstance(argmap, collections.Mapping):
            argmap = argmap2schema(argmap)()

        def decorator(func):
            req_ = request_obj

            @functools.wraps(func)
            def wrapper(*args, **kwargs):
                req_obj = req_

                # if as_kwargs is passed, must include all args
                force_all = as_kwargs

                if not req_obj:
                    req_obj = self.get_request_from_view_args(func, args, kwargs)
                # NOTE: At this point, argmap may be a Schema, or a callable
                parsed_args = self.parse(argmap, req=req_obj,
                                         locations=locations, validate=validate,
                                         force_all=force_all)
                if as_kwargs:
                    kwargs.update(parsed_args)
                    return func(*args, **kwargs)
                else:
                    # Add parsed_args after other positional arguments
                    new_args = args + (parsed_args, )
                    return func(*new_args, **kwargs)
            return wrapper
        return decorator

Example 13

Project: flask-cache Source File: __init__.py
    def cached(self, timeout=None, key_prefix='view/%s', unless=None):
        """
        Decorator. Use this to cache a function. By default the cache key
        is `view/request.path`. You are able to use this decorator with any
        function by changing the `key_prefix`. If the token `%s` is located
        within the `key_prefix` then it will replace that with `request.path`

        Example::

            # An example view function
            @cache.cached(timeout=50)
            def big_foo():
                return big_bar_calc()

            # An example misc function to cache.
            @cache.cached(key_prefix='MyCachedList')
            def get_list():
                return [random.randrange(0, 1) for i in range(50000)]

            my_list = get_list()

        .. note::

            You MUST have a request context to actually called any functions
            that are cached.

        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it. These attributes are readable/writable.

                **uncached**
                    The original undecorated function

                **cache_timeout**
                    The cache timeout value for this function. For a custom value
                    to take affect, this must be set before the function is called.

                **make_cache_key**
                    A function used in generating the cache_key used.

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param key_prefix: Default 'view/%(request.path)s'. Beginning key to .
                           use for the cache key.

                           .. versionadded:: 0.3.4
                               Can optionally be a callable which takes no arguments
                               but returns a string that will be used as the cache_key.

        :param unless: Default None. Cache will *always* execute the caching
                       facilities unless this callable is true.
                       This will bypass the caching entirely.
        """

        def decorator(f):
            @functools.wraps(f)
            def decorated_function(*args, **kwargs):
                #: Bypass the cache entirely.
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                try:
                    cache_key = decorated_function.make_cache_key(*args, **kwargs)
                    rv = self.cache.get(cache_key)
                except Exception:
                    if current_app.debug:
                        raise
                    logger.exception("Exception possibly due to cache backend.")
                    return f(*args, **kwargs)

                if rv is None:
                    rv = f(*args, **kwargs)
                    try:
                        self.cache.set(cache_key, rv,
                                   timeout=decorated_function.cache_timeout)
                    except Exception:
                        if current_app.debug:
                            raise
                        logger.exception("Exception possibly due to cache backend.")
                        return f(*args, **kwargs)
                return rv

            def make_cache_key(*args, **kwargs):
                if callable(key_prefix):
                    cache_key = key_prefix()
                elif '%s' in key_prefix:
                    cache_key = key_prefix % request.path
                else:
                    cache_key = key_prefix

                return cache_key

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = make_cache_key

            return decorated_function
        return decorator

Example 14

Project: rosbridge_suite Source File: concurrent.py
Function: return_future
def return_future(f):
    """Decorator to make a function that returns via callback return a
    `Future`.

    The wrapped function should take a ``callback`` keyword argument
    and invoke it with one argument when it has finished.  To signal failure,
    the function can simply raise an exception (which will be
    captured by the `.StackContext` and passed along to the ``Future``).

    From the caller's perspective, the callback argument is optional.
    If one is given, it will be invoked when the function is complete
    with `Future.result()` as an argument.  If the function fails, the
    callback will not be run and an exception will be raised into the
    surrounding `.StackContext`.

    If no callback is given, the caller should use the ``Future`` to
    wait for the function to complete (perhaps by yielding it in a
    `.gen.engine` function, or passing it to `.IOLoop.add_future`).

    Usage::

        @return_future
        def future_func(arg1, arg2, callback):
            # Do stuff (possibly asynchronous)
            callback(result)

        @gen.engine
        def caller(callback):
            yield future_func(arg1, arg2)
            callback()

    Note that ``@return_future`` and ``@gen.engine`` can be applied to the
    same function, provided ``@return_future`` appears first.  However,
    consider using ``@gen.coroutine`` instead of this combination.
    """
    replacer = ArgReplacer(f, 'callback')

    @functools.wraps(f)
    def wrapper(*args, **kwargs):
        future = TracebackFuture()
        callback, args, kwargs = replacer.replace(
            lambda value=_NO_RESULT: future.set_result(value),
            args, kwargs)

        def handle_error(typ, value, tb):
            future.set_exc_info((typ, value, tb))
            return True
        exc_info = None
        with ExceptionStackContext(handle_error):
            try:
                result = f(*args, **kwargs)
                if result is not None:
                    raise ReturnValueIgnoredError(
                        "@return_future should not be used with functions "
                        "that return values")
            except:
                exc_info = sys.exc_info()
                raise
        if exc_info is not None:
            # If the initial synchronous part of f() raised an exception,
            # go ahead and raise it to the caller directly without waiting
            # for them to inspect the Future.
            raise_exc_info(exc_info)

        # If the caller passed in a callback, schedule it to be called
        # when the future resolves.  It is important that this happens
        # just before we return the future, or else we risk confusing
        # stack contexts with multiple exceptions (one here with the
        # immediate exception, and again when the future resolves and
        # the callback triggers its exception by calling future.result()).
        if callback is not None:
            def run_callback(future):
                result = future.result()
                if result is _NO_RESULT:
                    callback()
                else:
                    callback(future.result())
            future.add_done_callback(wrap(run_callback))
        return future
    return wrapper

Example 15

Project: django-easyextjs4 Source File: __init__.py
    @staticmethod
    def StaticEvent(pId = None, pEventName = None, pClassName = None, pNameSpace = None, pParams = None, pInterval = None, pUrl = None, pUrlApis = None, pSession = None):
        
        # Define the provider id that will be define on the javascript side
        if pId is not None and not isinstance(pId,str):
            raise ExtJSError('pId must be a string')            
        
        # Force the event name that will be fire on the javascript side. If it's not specify the event name it's build automatically with the concatanation of
        # the name space, the classe name and the Python function name define as an event
        if pEventName is not None and not isinstance(pEventName,str):
            raise ExtJSError('pEventName must be a string')            

        # You can overwrite the classe but becarefull. The class name will be use to build the name of the event when the answer of the event it sent back.
        # If it's not specify it will take the name of the class
        if pClassName is not None and not isinstance(pClassName,str):
            raise ExtJSError('pClassName must be a string')            
        
        # pNameSpace is define to create a uniq name. Your must be sure it doesn't exist. If it's not specify it will take the name space of the class
        if pNameSpace is not None and not isinstance(pNameSpace,str):
            raise ExtJSError('pNameSpace must be a string')            
        
        # pInterval define how often to poll the server-side in milliseconds. If it's not define by default it's set to every 3 seconds by ExtJS. 
        if pInterval is not None and not isinstance(pInterval,int):
            raise ExtJSError('pInterval must be an integer')            
        
        # Specify the keywork for the URL. This keywork will be associate with the current event. The URL must be uniq for each event. 
        # By default the URL it's build like this: 'Evt' + '<Name space>' + '<Class name>' + 'Event Name' 
        if pUrl is not None and not isinstance(pUrl,str):
            raise ExtJSError('pUrl must be a string')            

        # Specify the javascript file. If it's not define it will take the same as one define for the class.
        if pUrlApis is not None and not isinstance(pUrlApis,str):
            raise ExtJSError('pUrlApis must be a string')            
        
        if pParams is not None and not (type(pParams) == list or type(pParams) == dict or type(pParams) == str or type(pParams) == int or type(pParams) == long or  type(pParams) == float):
            raise ExtJSError('pParams must be a list, dict, string, int, long or float')

        if pSession is not None:
            if isinstance(pSession,bool) and pSession == True:
                pSession = Ext.sessionFromRequest
            elif not inspect.isfunction(pSession):
                raise ExtJSError('pSession must be method or boolean. If it\'s a method it must return a session object. If it\'s boolean with True it will return session from a django request.')
        
        lEventInfo = Ext.__Instance()
        
        lEventInfo.UrlApis = pUrlApis
        lEventInfo.Url = pUrl
        lEventInfo.Id = pId
        lEventInfo.EventName = pEventName
        lEventInfo.ClassName = pClassName
        lEventInfo.NameSpace = pNameSpace
        lEventInfo.Params = pParams
        lEventInfo.Interval = pInterval
        lEventInfo.Session = pSession  
        
        def decorator(pEvent):
    
            if type(pEvent) == staticmethod:
                raise ExtJSError('You must declare @staticmethod before @Ext.StaticEvent')
    
            lArgs = inspect.getargspec(pEvent)
            lParams = list(lArgs.args)
            
            if lEventInfo.Session is not None:
                if 'pSession' not in lArgs.args:
                    raise ExtJSError('You must declare a parameter pSession')
                else:
                    # Remove pSession will be transmit automaticaly by the method Request
                    if lParams != []:
                        # Check if pSession is the first parameter
                        if lParams.index('pSession') != 0:
                            raise ExtJSError('pSession must be the first parameter')
                        lParams = [lVal for lVal in lParams if lVal != 'pSession']
                        
            lEventInfo.Name = pEvent.__name__
            lEventInfo.Args = lParams
            lEventInfo.VarArgs = lArgs.varargs
            lEventInfo.Keywords = lArgs.keywords
            lEventInfo.Defaults = lArgs.defaults
            lEventInfo.Call = pEvent
            
            Ext.__EVENTS[pEvent.__name__] = lEventInfo
        
            @functools.wraps(pEvent)
            def wrapper(*pArgs, **pKwargs):
                lRet = pEvent(*pArgs,**pKwargs)
                return lRet
                
            return wrapper
    
        return decorator

Example 16

Project: djangopackages Source File: decorators.py
def lru_cache(maxsize=100):
    '''Least-recently-used cache decorator.

    Arguments to the cached function must be hashable.
    Cache performance statistics stored in f.hits and f.misses.
    Clear the cache with f.clear().
    http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used

    '''
    maxqueue = maxsize * 10
    def decorating_function(user_function,
            len=len, iter=iter, tuple=tuple, sorted=sorted, KeyError=KeyError):
        cache = {}                  # mapping of args to results
        queue = collections.deque() # order that keys have been used
        refcount = Counter()        # times each key is in the queue
        sentinel = object()         # marker for looping around the queue
        kwd_mark = object()         # separate positional and keyword args

        # lookup optimizations (ugly but fast)
        queue_append, queue_popleft = queue.append, queue.popleft
        queue_appendleft, queue_pop = queue.appendleft, queue.pop

        @functools.wraps(user_function)
        def wrapper(*args, **kwds):
            # cache key records both positional and keyword args
            key = args
            if kwds:
                key += (kwd_mark,) + tuple(sorted(kwds.items()))

            # record recent use of this key
            queue_append(key)
            refcount[key] += 1

            # get cache entry or compute if not found
            try:
                result = cache[key]
                wrapper.hits += 1
            except KeyError:
                result = user_function(*args, **kwds)
                cache[key] = result
                wrapper.misses += 1

                # purge least recently used cache entry
                if len(cache) > maxsize:
                    key = queue_popleft()
                    refcount[key] -= 1
                    while refcount[key]:
                        key = queue_popleft()
                        refcount[key] -= 1
                    del cache[key], refcount[key]

            # periodically compact the queue by eliminating duplicate keys
            # while preserving order of most recent access
            if len(queue) > maxqueue:
                refcount.clear()
                queue_appendleft(sentinel)
                for key in ifilterfalse(refcount.__contains__,
                                        iter(queue_pop, sentinel)):
                    queue_appendleft(key)
                    refcount[key] = 1


            return result

        def clear():
            cache.clear()
            queue.clear()
            refcount.clear()
            wrapper.hits = wrapper.misses = 0

        wrapper.hits = wrapper.misses = 0
        wrapper.clear = clear
        return wrapper
    return decorating_function

Example 17

Project: django-memoize Source File: __init__.py
    def memoize(self, timeout=DEFAULT_TIMEOUT, make_name=None, unless=None):
        """
        Use this to cache the result of a function, taking its arguments into
        account in the cache key.

        Information on
        `Memoization <http://en.wikipedia.org/wiki/Memoization>`_.

        Example::

            @memoize(timeout=50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 1000)

        .. code-block:: pycon

            >>> big_foo(5, 2)
            753
            >>> big_foo(5, 3)
            234
            >>> big_foo(5, 2)
            753

        .. note::

            The returned decorated function now has three function attributes
            assigned to it.

                **uncached**
                    The original undecorated function. readable only

                **cache_timeout**
                    The cache timeout value for this function. For a custom
                    value to take affect, this must be set before the function
                    is called.

                    readable and writable

                **make_cache_key**
                    A function used in generating the cache_key used.

                    readable and writable


        :param timeout: Default: 300. If set to an integer, will cache
                        for that amount of time. Unit of time is in seconds.
        :param make_name: Default None. If set this is a function that accepts
                          a single argument, the function name, and returns a
                          new string to be used as the function name.
                          If not set then the function name is used.
        :param unless: Default None. Cache will *always* execute the caching
                       facilities unelss this callable is true.
                       This will bypass the caching entirely.

        """

        def memoize(f):
            @functools.wraps(f)
            def decorated_function(*args, **kwargs):
                #: bypass cache
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                try:
                    cache_key = decorated_function.make_cache_key(
                        f, *args, **kwargs
                    )
                    rv = self.get(cache_key)
                except Exception:
                    if settings.DEBUG:
                        raise
                    logger.exception(
                        "Exception possibly due to cache backend."
                    )
                    return f(*args, **kwargs)

                if rv is None:
                    rv = f(*args, **kwargs)
                    try:
                        self.set(
                            cache_key, rv,
                            timeout=decorated_function.cache_timeout
                        )
                    except Exception:
                        if settings.DEBUG:
                            raise
                        logger.exception(
                            "Exception possibly due to cache backend."
                        )
                return rv

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = self._memoize_make_cache_key(
                make_name, decorated_function
            )
            decorated_function.delete_memoized = (
                lambda: self.delete_memoized(f)
            )

            return decorated_function
        return memoize

Example 18

Project: flask-caching Source File: __init__.py
    def cached(self, timeout=None, key_prefix='view/%s', unless=None,
               forced_update=None):
        """Decorator. Use this to cache a function. By default the cache key
        is `view/request.path`. You are able to use this decorator with any
        function by changing the `key_prefix`. If the token `%s` is located
        within the `key_prefix` then it will replace that with `request.path`

        Example::

            # An example view function
            @cache.cached(timeout=50)
            def big_foo():
                return big_bar_calc()

            # An example misc function to cache.
            @cache.cached(key_prefix='MyCachedList')
            def get_list():
                return [random.randrange(0, 1) for i in range(50000)]

            my_list = get_list()

        .. note::

            You MUST have a request context to actually called any functions
            that are cached.

        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it. These attributes are readable/writable.

                **uncached**
                    The original undecorated function

                **cache_timeout**
                    The cache timeout value for this function. For a
                    custom value to take affect, this must be set before the
                    function is called.

                **make_cache_key**
                    A function used in generating the cache_key used.

        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.

        :param key_prefix: Default 'view/%(request.path)s'. Beginning key to .
                           use for the cache key. `request.path` will be the
                           actual request path, or in cases where the
                           `make_cache_key`-function is called from other
                           views it will be the expected URL for the view
                           as generated by Flask's `url_for()`.

                           .. versionadded:: 0.3.4
                               Can optionally be a callable which takes
                               no arguments but returns a string that will
                               be used as the cache_key.

        :param unless: Default None. Cache will *always* execute the caching
                       facilities unless this callable is true.
                       This will bypass the caching entirely.

        :param forced_update: Default None. If this callable is true,
                              cache value will be updated regardless cache
                              is expired or not. Useful for background
                              renewal of cached functions.
        """

        def decorator(f):
            @functools.wraps(f)
            def decorated_function(*args, **kwargs):
                #: Bypass the cache entirely.
                if self._bypass_cache(unless, f, *args, **kwargs):
                    return f(*args, **kwargs)

                try:
                    cache_key = _make_cache_key(args, kwargs, use_request=True)

                    if callable(forced_update) and forced_update() is True:
                        rv = None
                    else:
                        rv = self.cache.get(cache_key)
                except Exception:
                    if current_app.debug:
                        raise
                    logger.exception("Exception possibly due to "
                                     "cache backend.")
                    return f(*args, **kwargs)

                if rv is None:
                    rv = f(*args, **kwargs)
                    try:
                        self.cache.set(
                            cache_key, rv,
                            timeout=decorated_function.cache_timeout
                        )
                    except Exception:
                        if current_app.debug:
                            raise
                        logger.exception("Exception possibly due to "
                                         "cache backend.")
                return rv

            def make_cache_key(*args, **kwargs):
                # Convert non-keyword arguments (which is the way
                # `make_cache_key` expects them) to keyword arguments
                # (the way `url_for` expects them)
                argspec = inspect.getargspec(f)
                for arg_name, arg in zip(argspec.args, args):
                    kwargs[arg_name] = arg

                return _make_cache_key(args, kwargs, use_request=False)

            def _make_cache_key(args, kwargs, use_request):
                if callable(key_prefix):
                    cache_key = key_prefix()
                elif '%s' in key_prefix:
                    if use_request:
                        cache_key = key_prefix % request.path
                    else:
                        cache_key = key_prefix % url_for(f.__name__, **kwargs)
                else:
                    cache_key = key_prefix

                return cache_key

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = make_cache_key

            return decorated_function
        return decorator

Example 19

Project: django_render Source File: url_refactor.py
def __param(method_name, *p_args, **p_kwargs):
    """
    @get('param1', 'param2')
    @get(param1={'name':'parameter_name', 'type':int, 'default':0})
    @get(param1={'type':int, 'default':0})
    @get(param1={'type':int })
    @get(param1=('param_name', int, 0))
    @get(param1=(int, 0))
    @get(param1=int)
    """

    def paramed_decorator(func):
        @functools.wraps(func)
        def decorated(*args, **kwargs):
            request = args[0]
            req_param = deepcopy(request.GET)
            req_param.update(request.POST)
            m = {'get': request.GET, 'post': request.POST, 'param': req_param}
            method = m[method_name]
            for k, v in p_kwargs.items():
                _name = None
                _type = None
                _default = None

                # logging.debug(v)
                if type(v) == str:
                    _type = str
                    _name = v
                elif type(v) == dict:
                    if 'name' in v:
                        _name = v['name']
                    if 'type' in v:
                        _type = v['type']
                    if 'default' in v:
                        _default = v['default']
                elif type(v) == tuple and len(v) == 3:
                    _name = v[0]
                    _type = v[1]
                    _default = v[2]
                elif type(v) == tuple and len(v) == 2:
                    _type = v[0]
                    _default = v[1]
                elif type(v) == type:
                    _type = v
                elif v in (_Type.str_list, _Type.int_list, _Type.json, _Type.file):
                    _type = v

                if _name is None:
                    _name = k
                if _type is None:
                    _type = str

                has_key = True
                try:
                    if _type == _Type.file:
                        if method_name != 'post':
                            return HttpResponse(
                                json.dumps({'rt': False,
                                            'message': "The file parameter <{}> should in POST method".format(
                                                _name)}, separators=(',', ':')),
                                content_type=CONTENT_TYPE_JSON)
                        origin_v = request.FILES.get(_name, None)
                    else:
                        origin_v = ','.join(method.getlist(_name)).strip()
                        if len(origin_v) == 0:
                            has_key = False
                except KeyError:
                    has_key = False
                if has_key:
                    if _type == bool:
                        origin_v = origin_v.lower()
                        if origin_v == 'false' or origin_v == '0' or origin_v == 'off':
                            value = False
                        elif origin_v == 'true' or origin_v == 'on':
                            value = True
                        else:
                            value = bool(origin_v)
                    elif _type == _Type.str_list:
                        value = [item for item in origin_v.split(',') if len(item) > 0]
                    elif _type == _Type.int_list:
                        value = [int(item) for item in origin_v.split(',')]
                    elif _type == _Type.json:
                        try:
                            value = json.loads(origin_v)
                        except ValueError:
                            return HttpResponse(
                                json.dumps({'rt': False, 'message': "No JSON object could be decoded"},
                                           separators=(',', ':')),
                                content_type=CONTENT_TYPE_JSON)
                    elif _type == _Type.file:
                        value = origin_v
                        pass
                    elif _type == str:
                        value = origin_v
                    else:
                        value = _type(origin_v)
                else:
                    if _default is not None:
                        value = _default
                    else:
                        return HttpResponse(
                            json.dumps({'rt': False, 'message': 'Please specify the parameter : ' + _name + ";"},
                                       separators=(',', ':')),
                            content_type=CONTENT_TYPE_JSON)
                kwargs.update({k: value})

            for k in p_args:
                try:
                    kwargs.update({k: method[k].encode('utf-8')})
                except KeyError:
                    return HttpResponse(json.dumps({'rt': False, 'message': 'Please specify the parameter : ' + k},
                                                   separators=(',', ':')),
                                        content_type=CONTENT_TYPE_JSON)
            return func(*args, **kwargs)

        return decorated

    return paramed_decorator

Example 20

Project: kingpin Source File: thrift_client_mixin.py
    def __new__(mcs, classname, bases, classdict):
        # We assume thrift-client class is the first base of the subclass
        # of PooledThriftClientMixin.
        _thrift_client_cls = bases[0]

        # The following two functions are meant to be injected into
        # the created subclass of PooledThriftClientMixin as its
        # member functions.
        def _ensure_use_pooled_client(self, method, timeout=5000,
                                      expiration=5000, e2e_timeout=5000):
            """Ensure a call is made exclusively on a client in the connection
            pool.

            Args:
                method: a method from a thrift-client class.
                timeout: the maximum time (ms) to wait for a connection.
                expiration: the maximum time (ms) the method may execute using
                    a connection before an exception may be raised.
                e2e_timeout: the end-to-end timeout (ms), including both the
                    time spent on acquire the connection and rpc, and all the
                    retries if there is any.

            Returns:
                a wrapped version of method that grabs a client from the pool
                and uses the client to perform the method.

            """
            @functools.wraps(method)
            def wrap_with_pool(*args, **kwargs):
                if self.client_pool.qsize() == 0:
                    log.info("Contention on thrift-pool %s. Empty pool." %
                             self.client_pool.pool_name)
                with self.client_pool.get_connection(
                        timeout=timeout / MILLIS_PER_SEC,
                        expiration=expiration / MILLIS_PER_SEC,
                        replace_if=self.conn_replace_policy,
                        e2e_timeout=e2e_timeout / MILLIS_PER_SEC) as client:
                    return method(client, *args, **kwargs)

            return wrap_with_pool

        def __init__(self, host_provider, pool_size=5, timeout=5000,
                     retry_policy=None, connection_wait_timeout=5000,
                     connection_expiration=5000, statsd_client=dummy_statsd,
                     socket_connection_timeout=None,
                     always_retry_on_new_host=False,
                     retry_count=3,
                     conn_replace_policy=default_thrift_client_replace_if,
                     protocol_factory=DEFAULT_THRIFT_PROTOCOL_FACTORY,
                     e2e_timeout=5000,
                     is_ssl=False, validate=True, ca_certs=None,
                     failed_retry_policy_log_sample_rate=1.0):
            """Constructor for PooledThriftClientMixin's subclass.

            The five timeout parameters interact in the following way:

                1. Wait 'connection_wait_timeout' to acquire a connection from
                   the pool. If timeout, give up and raise an exception (most
                   likely gevent.queue.Empty).

                2. Once a connection is acquired, the number of milliseconds
                   specified by 'connection_expiration' is available for the
                   connection, including all retries.  If more time elapses,
                   a services.utils.connection_pool.ExpiredConnection exception
                   is raised.

                3. If the underlying connection needs to be established for
                   first time, we will only wait up to
                   ``socket_connection_timeout``, if connection timeout
                   happens, we will get socket error, and the retry
                   mechanism will start to kick in. By default, it is None,
                   which will fallback to timeout specified.

                4. If the RPC fails or takes more than 'timeout' milliseconds
                   and the 'connection_expiration' limit has not been eached,
                   the RPC will be retried up to three times.

                5. ``e2e_timeout`` is the end-to-end timeout setting that take
                    both ``connection_wait_timeout`` and
                    ``connection_expiration``. If you only want to specify one
                    timeout on your client, specify this one.

            In general, the ExpiredConnection exception should not be caught in
            the context of retry logic. The exception is meant to be used as
            the last resort to prevent connection leaks caused by clients
            holding the connection forever.

            If the desired behavior is to guarantee time for all retries, the
            recommended client strategy is to set 'connection_expiration' to at
            least 'timeout' multiplied by the number of retries allowed (3).

            Args:
                host_provider: a HostProvider object to that provide a list of
                available "host:port".
                pool_size: max number of clients in the pool.
                socket_connection_timeout: the socket timeout (ms) for
                    establishing the underlying socket connection for the
                    first time.
                timeout: the socket timeout (ms) passed ot the connection pool.
                retry_policy: an instance of RetryPolicy. It should have a
                    function 'should_retry' that takes an exception as argument
                    and return a boolean value indicating whether the RPC
                    should be retried. See RetryPolicy in retry_policy.py for
                    the default retry policy.
                connection_wait_timeout: the maximum time (ms) to wait for a
                    connection to become available in the connection pool.
                connection_expiration: the maximum time (ms) a connection is
                    allowed to be out of the connection pool.
                statsd_client: a statsd client to report stats.
                socket_connection_timeout: timeout when connecting to a thrift
                    server.
                always_retry_on_new_host: whether to always retry on a new host.
                    By default, only the last retry is on a new host.
                retry_count: total number of attempts before fail a request.
                conn_replace_policy: a function that takes one argument which
                    is an exception and return a boolean indicating whether
                    to discard the connection for the exception. By default,
                    a connection is discarded for all exceptions other than user
                    defined thrift exceptions.
                protocol_factory: thrift protocol factory.
                e2e_timeout: end-to-end timeout in milliseconds. It includes
                    time taken for establishing connection, retries if
                    necessary, etc. If you only specify one timeout on your
                    client, this is the one you should specify.
                is_ssl: use SSL connection or not, default is False.
                validate: Set to False to disable SSL certificate validation.
                ca_certs: Filename to the Certificate Authority pem file.
                failed_retry_policy_log_sample_rate: sometimes the "failed retry policy
                    becomes too overwhelming so you can set a logging
                    sample rate which ranges from [0, 1.0].

            """
            # This is the mixin class for the objects we create in the pool.
            class _ThriftClientMixinClass(_thrift_client_cls,
                                          ThriftClientMixin):
                # noinspection PyMethodParameters
                def get_connection_exception_class(mixin_self):
                    return self.get_connection_exception_class()

            def _close_conn(client):
                """Close the socket maintained by the client."""
                client.teardown_connection()

            # For every method in _ThriftClientMixinClass, create a
            # corresponding method with the same name but work with the client
            # in the pool.
            for method_name, method in getmembers(_ThriftClientMixinClass,
                                                  predicate=ismethod):
                # Ignore private methods.
                if method_name[0][0] == '_':
                    continue

                # Ignore methods not in _thrift_client_cls.
                client_attr = getattr(_thrift_client_cls, method_name, None)
                if client_attr is None or not ismethod(client_attr):
                    continue

                # Create delegating method in self for method in
                # _ThriftClientMixinClass (whose methods are the same as
                # those in _thrift_client_cls.)
                #
                # Note the created self.method_name is *not* bound
                # to self when it's invoked. It's an unbound method
                # from _ThriftClientMixinClass decorated by
                # _ensure_use_pooled_client() which bounds 'method' with a
                # client from the pool.
                setattr(self, method_name,
                        self._ensure_use_pooled_client(
                            method,
                            timeout=connection_wait_timeout,
                            expiration=connection_expiration,
                            e2e_timeout=e2e_timeout))

            # Pool name
            pool_name = "%s.%s" % (_thrift_client_cls.__module__,
                                   _thrift_client_cls.__name__)
            # The pool that contains non-greenlet-safe mixin clients.
            self.client_pool = ConnectionPool(
                pool_name=pool_name,
                pool_size=pool_size,
                close_conn_f=_close_conn,
                conn_cls=_ThriftClientMixinClass,
                host_provider=host_provider,
                timeout=timeout,
                statsd_client=statsd_client,
                retry_policy=retry_policy,
                socket_connection_timeout=socket_connection_timeout,
                always_retry_on_new_host=always_retry_on_new_host,
                retry_count=retry_count,
                protocol_factory=protocol_factory,
                is_ssl=is_ssl,
                validate=validate,
                ca_certs=ca_certs,
                failed_retry_policy_log_sample_rate=failed_retry_policy_log_sample_rate)

            # Function that decide whether to discard a connection when
            # exception happens.
            self.conn_replace_policy = conn_replace_policy

        # Inject the two member functions in PooledThriftClientMixin's
        # subclass.
        classdict['_ensure_use_pooled_client'] = _ensure_use_pooled_client
        classdict['__init__'] = __init__

        return super(PooledThriftClientMixinMetaclass, mcs).__new__(
            mcs, classname, bases, classdict)

Example 21

Project: django-url-framework Source File: controller.py
def get_controller_urlconf(controller_class, site=None):
    controller_name = get_controller_name(controller_class)
    actions = get_actions(controller_class)
    urlpatterns = _patterns()
    urlpatterns_with_args = _patterns()
    def wrap_call(_controller_name, _action_name, _action_func):
        """Wrapper for the function called by the url."""
        def wrapper(*args, **kwargs):
            request, args = args[0], args[1:]
            return autoview_function(site, request, _controller_name, controller_class, _action_name, *args, **kwargs)
        return wraps(_action_func)(wrapper)

    for action_name, action_func in actions.items():
        named_url = '%s_%s' % (get_controller_name(controller_class, with_prefix=False), get_action_name(action_func) )
        named_url = getattr(action_func, 'named_url', named_url)
        replace_dict = {'action':action_name.replace("__","/")}
        wrapped_call = wrap_call(controller_name, action_name, action_func)
        urlconf_prefix = getattr(controller_class, 'urlconf_prefix', None)
        action_urlpatterns = _patterns()
        index_action_with_args_urlconf = _patterns()

        if hasattr(action_func, 'urlconf'):
            """Define custom urlconf patterns for this action."""
            for new_urlconf in action_func.urlconf:
                action_urlpatterns += _patterns(url(new_urlconf, wrapped_call, name=named_url))
        
        if getattr(action_func, 'urlconf_erase', False) == False:
            """Do not generate default URL patterns if we define 'urlconf_erase' for this action."""
            
            if action_name == 'index':
                # No root URL is generated if we have no index action.

                object_id_arg_name, has_default = _get_arg_name_and_default(action_func)
                if object_id_arg_name is not None:
                    replace_dict['object_id_arg_name'] = object_id_arg_name
                    index_action_with_args_urlconf += _patterns(url(r'^(?P<%(object_id_arg_name)s>[\w-]+)/$' % replace_dict, wrapped_call, name=named_url))
                if has_default:
                    action_urlpatterns += _patterns(url(r'^$', wrapped_call, name=named_url))

            else:
                if hasattr(action_func, 'url_parameters'):
                    arguments = action_func.url_parameters
                    replace_dict['url_parameters'] = arguments
                    action_urlpatterns += _patterns(url(r'^%(action)s/%(url_parameters)s$' % replace_dict, wrapped_call, name=named_url))

                else:
                    object_id_arg_name, has_default = _get_arg_name_and_default(action_func)
                    if object_id_arg_name is not None:
                        replace_dict['object_id_arg_name'] = object_id_arg_name
                        action_urlpatterns += _patterns(url(r'^%(action)s/(?P<%(object_id_arg_name)s>[\w-]+)/$' % replace_dict, wrapped_call, name=named_url))
                    if has_default:
                        action_urlpatterns += _patterns(url(r'^%(action)s/$' % replace_dict, wrapped_call, name=named_url))

        if urlconf_prefix:
            action_urlpatterns_with_prefix = _patterns()
            for _urlconf in urlconf_prefix:
                action_urlpatterns_with_prefix+=_patterns(url(_urlconf, include(action_urlpatterns)))
            urlpatterns+=action_urlpatterns_with_prefix

            action_urlpatterns_with_args_with_prefix = _patterns()
            for _urlconf in urlconf_prefix:
                action_urlpatterns_with_args_with_prefix+=_patterns(url(_urlconf, include(action_urlpatterns_with_args_with_prefix)))

            urlpatterns_with_args+=action_urlpatterns_with_args_with_prefix
        else:
            urlpatterns+=action_urlpatterns
            urlpatterns_with_args+=index_action_with_args_urlconf

    return urlpatterns+urlpatterns_with_args

Example 22

Project: python-future Source File: __init__.py
def disallow_types(argnums, disallowed_types):
    """
    A decorator that raises a TypeError if any of the given numbered
    arguments is of the corresponding given type (e.g. bytes or unicode
    string).

    For example:

        @disallow_types([0, 1], [unicode, bytes])
        def f(a, b):
            pass

    raises a TypeError when f is called if a unicode object is passed as
    `a` or a bytes object is passed as `b`.

    This also skips over keyword arguments, so 

        @disallow_types([0, 1], [unicode, bytes])
        def g(a, b=None):
            pass

    doesn't raise an exception if g is called with only one argument a,
    e.g.:

        g(b'Byte string')

    Example use:

    >>> class newbytes(object):
    ...     @disallow_types([1], [unicode])
    ...     def __add__(self, other):
    ...          pass

    >>> newbytes('1234') + u'1234'      #doctest: +IGNORE_EXCEPTION_DETAIL 
    Traceback (most recent call last):
      ...
    TypeError: can't concat 'bytes' to (unicode) str
    """

    def decorator(function):

        @functools.wraps(function)
        def wrapper(*args, **kwargs):
            # These imports are just for this decorator, and are defined here
            # to prevent circular imports:
            from .newbytes import newbytes
            from .newint import newint
            from .newstr import newstr

            errmsg = "argument can't be {0}"
            for (argnum, mytype) in zip(argnums, disallowed_types):
                # Handle the case where the type is passed as a string like 'newbytes'.
                if isinstance(mytype, str) or isinstance(mytype, bytes):
                    mytype = locals()[mytype]

                # Only restrict kw args only if they are passed:
                if len(args) <= argnum:
                    break

                # Here we use type() rather than isinstance() because
                # __instancecheck__ is being overridden. E.g.
                # isinstance(b'abc', newbytes) is True on Py2.
                if type(args[argnum]) == mytype:
                    raise TypeError(errmsg.format(mytype))

            return function(*args, **kwargs)
        return wrapper
    return decorator

Example 23

Project: SickRage Source File: testing.py
def gen_test(func=None, timeout=None):
    """Testing equivalent of ``@gen.coroutine``, to be applied to test methods.

    ``@gen.coroutine`` cannot be used on tests because the `.IOLoop` is not
    already running.  ``@gen_test`` should be applied to test methods
    on subclasses of `AsyncTestCase`.

    Example::

        class MyTest(AsyncHTTPTestCase):
            @gen_test
            def test_something(self):
                response = yield gen.Task(self.fetch('/'))

    By default, ``@gen_test`` times out after 5 seconds. The timeout may be
    overridden globally with the ``ASYNC_TEST_TIMEOUT`` environment variable,
    or for each test with the ``timeout`` keyword argument::

        class MyTest(AsyncHTTPTestCase):
            @gen_test(timeout=10)
            def test_something_slow(self):
                response = yield gen.Task(self.fetch('/'))

    .. versionadded:: 3.1
       The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment
       variable.

    .. versionchanged:: 4.0
       The wrapper now passes along ``*args, **kwargs`` so it can be used
       on functions with arguments.
    """
    if timeout is None:
        timeout = get_async_test_timeout()

    def wrap(f):
        # Stack up several decorators to allow us to access the generator
        # object itself.  In the innermost wrapper, we capture the generator
        # and save it in an attribute of self.  Next, we run the wrapped
        # function through @gen.coroutine.  Finally, the coroutine is
        # wrapped again to make it synchronous with run_sync.
        #
        # This is a good case study arguing for either some sort of
        # extensibility in the gen decorators or cancellation support.
        @functools.wraps(f)
        def pre_coroutine(self, *args, **kwargs):
            result = f(self, *args, **kwargs)
            if isinstance(result, GeneratorType) or iscoroutine(result):
                self._test_generator = result
            else:
                self._test_generator = None
            return result

        if iscoroutinefunction(f):
            coro = pre_coroutine
        else:
            coro = gen.coroutine(pre_coroutine)

        @functools.wraps(coro)
        def post_coroutine(self, *args, **kwargs):
            try:
                return self.io_loop.run_sync(
                    functools.partial(coro, self, *args, **kwargs),
                    timeout=timeout)
            except TimeoutError as e:
                # run_sync raises an error with an unhelpful traceback.
                # Throw it back into the generator or coroutine so the stack
                # trace is replaced by the point where the test is stopped.
                self._test_generator.throw(e)
                # In case the test contains an overly broad except clause,
                # we may get back here.  In this case re-raise the original
                # exception, which is better than nothing.
                raise
        return post_coroutine

    if func is not None:
        # Used like:
        #     @gen_test
        #     def f(self):
        #         pass
        return wrap(func)
    else:
        # Used like @gen_test(timeout=10)
        return wrap

Example 24

Project: kingpin Source File: thrift_client_mixin.py
def ensure_connection(service_name, method_name, method):
    """Ensure that client is connected before executing method.

    .. note:: Class to which this decorator is applied **must** have
       ``get_connection_exception_class()`` method that would return the
       class of the exception to be thrown when retryable connection
       exception is repacked.

    This decorator can only be applied to class methods,
    not a non-class function.

    Args:
        method_name: A string, the name of the method to be ensured.
        method: A string, the actual method to be ensured.

    Returns:
        Whatever the executed method returns.

    """
    @functools.wraps(method)
    def method_wrapper(self, *args, **kwargs):

        req_timeout_ms = kwargs.pop('rpc_timeout_ms', self.timeout)
        conn_timeout_ms = kwargs.pop('rpc_timeout_ms',
                                     self.socket_connection_timeout)
        if conn_timeout_ms is None:
            conn_timeout_ms = req_timeout_ms
        retries_left = self.retry_count
        while retries_left:
            start_time = datetime.datetime.now()
            try:
                # Ensure connection.
                try:
                    self.connect(conn_timeout_ms, req_timeout_ms)
                except socket.timeout:
                    raise ThriftConnectionTimeoutError()
                result = method(self._client, *args, **kwargs)
                time_taken = datetime.datetime.now() - start_time
                # compute time taken into milliseconds
                time_taken_ms = time_taken.total_seconds() * 1000
                self.statsd_client.timing(
                    "client.requests.{0}.{1}".format(service_name, method_name),
                    time_taken_ms, sample_rate=0.001)
                self.refresh_connection_if_needed()
                return result
            except TApplicationException as e:
                handler_args = args_to_str(*args, **kwargs)
                time_taken = datetime.datetime.now() - start_time
                # compute time taken into milliseconds
                time_taken_ms = time_taken.total_seconds() * 1000
                log.info(
                    "Thrift call failed TApplicationException : %s(%s) : "
                    "%s:%d : time_taken_ms : %s : %s" % (
                        method_name, handler_args, self.host,
                        self.port, time_taken_ms, e))
                raise
            except Exception as e:
                t, v, tb = sys.exc_info()
                retries_left -= 1
                handler_args = args_to_str(*args, **kwargs)
                time_taken = datetime.datetime.now() - start_time
                # compute time taken into milliseconds
                time_taken_ms = time_taken.total_seconds() * 1000

                # application exception, if it is retriable as determined by
                # RetryPolicy then we simply raise the exception, no connection
                # teardown is needed, because the exception was thrown by the
                # server and transported back to the client.
                if _is_application_exception(e):
                    retry_policy_to_apply = self.retry_policy
                    if not retry_policy_to_apply:
                        retry_policy_to_apply = DEFAULT_RETRY_POLICY
                    if not retry_policy_to_apply.should_retry(e):
                        if random.random() < self.failed_retry_policy_log_sample_rate:
                            # Sample logging in case logging is too overwhelming.
                            log.info(
                                "Thrift call failed retry policy : %s(%s) :"
                                "%s:%d : time_taken_ms : %s : %s" % (
                                    method_name, handler_args, self.host,
                                    self.port, time_taken_ms, e))
                            # raise exception to stop it from being retried
                        raise t, v, tb
                elif _is_rpc_timeout(e):
                    # rpc socket timeout, not connection socket timeout
                    log.info(
                        "Thrift call failed rpc timeout : %s(%s) :"
                        "%s:%d : time_taken_ms : %s : %s" % (
                            method_name, handler_args, self.host,
                            self.port, time_taken_ms, e))
                    self.statsd_client.increment(
                        "errors.thriftclient.RpcTimeoutError",
                        sample_rate=0.01,
                        tags={'client': self.host})
                    # socket timeout, only reliable way to recover is to tear
                    # down the connection, it is probably good to select a
                    # new host, regardless whether we should retry this request
                    # or not.
                    self.teardown_connection(select_new_host=True)
                    # TODO(Yongsheng): temporarily disable this feature, we need
                    # a way to gauge the server healthiness before we can bring
                    # this feature back.
                    # raise exception to keep it from being retried.
                    # raise self.get_connection_exception_class()(e)
                else:
                    # at this point, we assume it is connectivity issue,
                    # socket read/write errors, or failing to establish
                    # connection, we will need to tear down the connection
                    # and re-establish it for subsequent calls
                    log.info(
                        "Thrift client connection fail : %s(%s) : %s:%d : "
                        "retries_left=%d : time_taken_ms : %s  %r",
                        method_name, handler_args, self.host,
                        self.port, retries_left, time_taken_ms, e)
                    self.statsd_client.increment(
                        "errors.thriftclient.ConnectionError",
                        sample_rate=0.01,
                        tags={'client': self.host})
                    # By default, for the first two retries, try the same host
                    # to rule out intermittent connectivity issue. For the last
                    # retry select a new host randomly.
                    # If ``always_retry_on_new_host`` is set True, always retry
                    # on a new host.
                    if self.always_retry_on_new_host or retries_left == 1:
                        # turn this on when we are ready to penalize bad hosts
                        # self._host_selector.invalidate()
                        self.teardown_connection(select_new_host=True)
                    else:
                        self.teardown_connection(select_new_host=False)

                # Retriable errors, but no retries left, bail.
                if not retries_left:
                    log.info(
                        "Thrift call failed all retries : %s(%s) : "
                        "%s:%d : time_taken_ms: %s %s" % (
                            method_name, handler_args, self.host, self.port,
                            time_taken_ms, e))
                    self.statsd_client.increment(
                        "errors.thriftclient.AllConnectionError",
                        sample_rate=0.01,
                        tags={'client': self.host})
                    # Repack the message and raise as a different exception.
                    raise self.get_connection_exception_class()(e), None, tb

    return method_wrapper

Example 25

Project: PyFITS Source File: util.py
Function: deprecated
def deprecated(since, message='', name='', alternative='', pending=False):
    """
    Used to mark a function as deprecated.

    To mark an attribute as deprecated, replace that attribute with a
    depcrecated property.

    Parameters
    ------------
    since : str
        The release at which this API became deprecated.  This is required.

    message : str, optional
        Override the default deprecation message.  The format specifier
        %(func)s may be used for the name of the function, and %(alternative)s
        may be used in the deprecation message to insert the name of an
        alternative to the deprecated function.

    name : str, optional
        The name of the deprecated function; if not provided the name is
        automatically determined from the passed in function, though this is
        useful in the case of renamed functions, where the new function is just
        assigned to the name of the deprecated function.  For example:
            def new_function():
                ...
            oldFunction = new_function

    alternative : str, optional
        An alternative function that the user may use in place of the
        deprecated function.  The deprecation warning will tell the user about
        this alternative if provided.

    pending : bool, optional
        If True, uses a PyfitsPendingDeprecationWarning instead of a
        PyfitsDeprecationWarning.

    """

    def deprecate(func, message=message, name=name, alternative=alternative,
                  pending=pending):
        if isinstance(func, classmethod):
            try:
                func = func.__func__
            except AttributeError:
                # classmethods in Python2.6 and below lack the __func__
                # attribute so we need to hack around to get it
                method = func.__get__(None, object)
                if hasattr(method, '__func__'):
                    func = method.__func__
                elif hasattr(method, 'im_func'):
                    func = method.im_func
                else:
                    # Nothing we can do really...  just return the original
                    # classmethod
                    return func
            is_classmethod = True
        else:
            is_classmethod = False

        if not name:
            name = func.__name__

        altmessage = ''
        if not message or type(message) == type(deprecate):
            if pending:
                message = ('The %(func)s function will be deprecated in a '
                           'future version.')
            else:
                message = (
                    'The %(func)s function is deprecated as of version '
                    '%(since)s and may be removed in a future version.')
            if alternative:
                altmessage = '\n\n        Use %s instead.' % alternative

        message = ((message % {'func': name, 'alternative': alternative,
                               'since': since}) + altmessage)

        @functools.wraps(func)
        def deprecated_func(*args, **kwargs):
            if pending:
                category = PyfitsPendingDeprecationWarning
            else:
                category = PyfitsDeprecationWarning

            warnings.warn(message, category, stacklevel=2)

            return func(*args, **kwargs)

        old_doc = deprecated_func.__doc__
        if not old_doc:
            old_doc = ''
        old_doc = textwrap.dedent(old_doc).strip('\n')
        altmessage = altmessage.strip()
        if not altmessage:
            altmessage = message.strip()
        new_doc = (('\n.. deprecated:: %(since)s'
                    '\n    %(message)s\n\n' %
                    {'since': since, 'message': altmessage.strip()}) + old_doc)
        if not old_doc:
            # This is to prevent a spurious 'unexected unindent' warning from
            # docutils when the original docstring was blank.
            new_doc += r'\ '

        deprecated_func.__doc__ = new_doc

        if is_classmethod:
            deprecated_func = classmethod(deprecated_func)
        return deprecated_func

    if type(message) == type(deprecate):
        return deprecate(message)

    return deprecate

Example 26

Project: python-semanticversion Source File: base.py
    @classmethod
    def _comparison_functions(cls, partial=False):
        """Retrieve comparison methods to apply on version components.

        This is a private API.

        Args:
            partial (bool): whether to provide 'partial' or 'strict' matching.

        Returns:
            5-tuple of cmp-like functions.
        """

        def prerelease_cmp(a, b):
            """Compare prerelease components.

            Special rule: a version without prerelease component has higher
            precedence than one with a prerelease component.
            """
            if a and b:
                return identifier_list_cmp(a, b)
            elif a:
                # Versions with prerelease field have lower precedence
                return -1
            elif b:
                return 1
            else:
                return 0

        def build_cmp(a, b):
            """Compare build metadata.

            Special rule: there is no ordering on build metadata.
            """
            if a == b:
                return 0
            else:
                return NotImplemented

        def make_optional(orig_cmp_fun):
            """Convert a cmp-like function to consider 'None == *'."""
            @functools.wraps(orig_cmp_fun)
            def alt_cmp_fun(a, b):
                if a is None or b is None:
                    return 0
                return orig_cmp_fun(a, b)

            return alt_cmp_fun

        if partial:
            return [
                base_cmp,  # Major is still mandatory
                make_optional(base_cmp),
                make_optional(base_cmp),
                make_optional(prerelease_cmp),
                make_optional(build_cmp),
            ]
        else:
            return [
                base_cmp,
                base_cmp,
                base_cmp,
                prerelease_cmp,
                build_cmp,
            ]

Example 27

Project: st2 Source File: base.py
def jsexpose(arg_types=None, body_cls=None, status_code=None, content_type='application/json',
             method=None):
    """
    :param arg_types: A list of types for the function arguments (e.g. [str, str, int, bool]).
    :type arg_types: ``list``

    :param body_cls: Request body class. If provided, this class will be used to create an instance
                     out of the request body.
    :type body_cls: :class:`object`

    :param status_code: Response status code.
    :type status_code: ``int``

    :param content_type: Response content type.
    :type content_type: ``str``
    """
    pecan_json_decorate = pecan.expose(
        content_type=content_type,
        generic=False)

    def decorate(f):
        @functools.wraps(f)
        def callfunction(*args, **kwargs):
            args = list(args)
            more = [args.pop(0)]

            def cast_value(value_type, value):
                if value_type == bool:
                    def cast_func(value):
                        return value.lower() in ['1', 'true']
                else:
                    cast_func = value_type

                result = cast_func(value)
                return result

            if body_cls:
                if pecan.request.body:
                    data = pecan.request.json

                    obj = body_cls(**data)
                    try:
                        obj = obj.validate()
                    except (jsonschema.ValidationError, ValueError) as e:
                        raise exc.HTTPBadRequest(detail=e.message,
                                                 comment=traceback.format_exc())
                    except Exception as e:
                        raise exc.HTTPInternalServerError(detail=e.message,
                                                          comment=traceback.format_exc())
                else:
                    obj = None

                more.append(obj)

            if arg_types:
                # Cast and transform arguments based on the provided arg_types specification
                result_args, result_kwargs = get_controller_args_for_types(func=f,
                                                                           arg_types=arg_types,
                                                                           args=args,
                                                                           kwargs=kwargs)
                more = more + result_args
                kwargs.update(result_kwargs)

            args = tuple(more) + tuple(args)

            noop_codes = [http_client.NOT_IMPLEMENTED,
                          http_client.METHOD_NOT_ALLOWED,
                          http_client.FORBIDDEN]

            if status_code and status_code in noop_codes:
                pecan.response.status = status_code
                return json_encode(None)

            try:
                result = f(*args, **kwargs)
            except TypeError as e:
                e = get_exception_for_type_error(func=f, exc=e)
                raise e
            except Exception as e:
                e = get_exception_for_uncaught_api_error(func=f, exc=e)
                raise e

            if status_code:
                pecan.response.status = status_code
            if content_type == 'application/json':
                if is_debugging_enabled():
                    indent = 4
                else:
                    indent = None
                return json_encode(result, indent=indent)
            else:
                return result

        pecan_json_decorate(callfunction)

        return callfunction

    return decorate

Example 28

Project: greenhouse Source File: dns.py
Function: getnameinfo
@functools.wraps(socket.getnameinfo)
def getnameinfo(address, flags):
    dns.build_resolver()
    try:
        host, port = address
    except (ValueError, TypeError):
        if not isinstance(address, tuple):
            del address
            raise TypeError('getnameinfo() argument must be a tuple')
        else:
            raise socket.gaierror(
                socket.EAI_NONAME, "Name or service not known")

    if (flags & socket.NI_NAMEREQD) and (flags & socket.NI_NUMERICHOST):
        raise socket.gaierror(
            socket.EAI_NONAME, "Name or service not known")

    if dns.is_ipv4(host):
        try:
            name = dns.reversename.from_address(host)

            results = dns.resolver_obj.query(name, dns.rdatatype.PTR)
            if len(results) > 1:
                raise socket.error(
                    "sockaddr resolved to multiple addresses")

            host = results[0].target.to_text(omit_final_dot=True)
        except dns.exception.Timeout, exc:
            if flags & socket.NI_NAMEREQD:
                raise socket.gaierror(socket.EAI_AGAIN, 'Lookup timed out')
        except dns.resolver.NXDOMAIN:
            return (host, str(port))
        except dns.exception.DNSException, exc:
            if flags & socket.NI_NAMEREQD:
                raise socket.gaierror(
                    socket.EAI_NONAME, "Name or service not known")

    else:
        try:
            ips = dns.resolve(host)

            if len(ips) > 1:
                raise socket.error('sockaddr resolved to multiple addresses')

            if flags & socket.NI_NUMERICHOST:
                host = ips[0].to_text()
        except dns.exception.Timeout, exc:
            if flags & socket.NI_NAMEREQD:
                raise socket.gaierror(socket.EAI_AGAIN, 'Lookup timed out')
        except dns.exception.DNSException, exc:
            if flags & socket.NI_NAMEREQD:
                raise socket.gaierror(
                    socket.EAI_NONAME, "Name or service not known")

    if flags & socket.NI_NUMERICSERV:
        port = str(port)
    else:
        port = socket.getservbyport(
            port, (flags & socket.NI_DGRAM) and 'udp' or 'tcp')

    return host, port

Example 29

Project: attention-lvcsr Source File: base.py
Function: lazy
def lazy(allocation=None, initialization=None):
    """Makes the initialization lazy.

    This decorator allows the user to define positional arguments which
    will not be needed until the allocation or initialization stage of the
    brick. If these arguments are not passed, it will automatically replace
    them with a custom ``None`` object. It is assumed that the missing
    arguments can be set after initialization by setting attributes with
    the same name.

    Parameters
    ----------
    allocation : list
        A list of argument names that are needed for allocation.
    initialization : list
        A list of argument names that are needed for initialization.

    Examples
    --------
    >>> class SomeBrick(Brick):
    ...     @lazy(allocation=['a'], initialization=['b'])
    ...     def __init__(self, a, b, c='c', d=None):
    ...         print(a, b, c, d)
    >>> brick = SomeBrick('a')
    a NoneInitialization c None
    >>> brick = SomeBrick(d='d', b='b')
    NoneAllocation b c d

    """
    if not allocation:
        allocation = []
    if not initialization:
        initialization = []

    def lazy_wrapper(init):
        def lazy_init(*args, **kwargs):
            self = args[0]
            self.allocation_args = (getattr(self, 'allocation_args',
                                            []) + allocation)
            self.initialization_args = (getattr(self, 'initialization_args',
                                                []) + initialization)
            kwargs = dict_union(args_to_kwargs(args, init), kwargs)
            for allocation_arg in allocation:
                kwargs.setdefault(allocation_arg, NoneAllocation)
            for initialization_arg in initialization:
                kwargs.setdefault(initialization_arg, NoneInitialization)
            return init(**kwargs)
        wraps(init)(lazy_init)
        return lazy_init
    return lazy_wrapper

Example 30

Project: flask-cache Source File: __init__.py
    def memoize(self, timeout=None, make_name=None, unless=None):
        """
        Use this to cache the result of a function, taking its arguments into
        account in the cache key.

        Information on
        `Memoization <http://en.wikipedia.org/wiki/Memoization>`_.

        Example::

            @cache.memoize(timeout=50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 1000)

        .. code-block:: pycon

            >>> big_foo(5, 2)
            753
            >>> big_foo(5, 3)
            234
            >>> big_foo(5, 2)
            753

        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it.

                **uncached**
                    The original undecorated function. readable only

                **cache_timeout**
                    The cache timeout value for this function. For a custom value
                    to take affect, this must be set before the function is called.

                    readable and writable

                **make_cache_key**
                    A function used in generating the cache_key used.

                    readable and writable


        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param make_name: Default None. If set this is a function that accepts
                          a single argument, the function name, and returns a
                          new string to be used as the function name. If not set
                          then the function name is used.
        :param unless: Default None. Cache will *always* execute the caching
                       facilities unelss this callable is true.
                       This will bypass the caching entirely.

        .. versionadded:: 0.5
            params ``make_name``, ``unless``
        """

        def memoize(f):
            @functools.wraps(f)
            def decorated_function(*args, **kwargs):
                #: bypass cache
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                try:
                    cache_key = decorated_function.make_cache_key(f, *args, **kwargs)
                    rv = self.cache.get(cache_key)
                except Exception:
                    if current_app.debug:
                        raise
                    logger.exception("Exception possibly due to cache backend.")
                    return f(*args, **kwargs)

                if rv is None:
                    rv = f(*args, **kwargs)
                    try:
                        self.cache.set(cache_key, rv,
                                   timeout=decorated_function.cache_timeout)
                    except Exception:
                        if current_app.debug:
                            raise
                        logger.exception("Exception possibly due to cache backend.")
                return rv

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = self._memoize_make_cache_key(
                                                make_name, decorated_function)
            decorated_function.delete_memoized = lambda: self.delete_memoized(f)

            return decorated_function
        return memoize

Example 31

Project: molecular-design-toolkit Source File: callsigs.py
def args_from(original_function,
              only=None,
              allexcept=None,
              inject_kwargs=None,
              inject_docs=None,
              wraps=None,
              update_docstring_args=False):
    """
    Decorator to transfer call signatures - helps to hide ugly *args and **kwargs in delegated calls

    Args:
        original_function (callable): the function to take the call signature from
        only (List[str]): only transfer these arguments (incompatible with `allexcept`)
        wraps (bool): Transfer docuementation and attributes from original_function to
            decorated_function, using functools.wraps (default: True if call signature is
            unchanged, False otherwise)
        allexcept (List[str]): transfer all except these arguments (incompatible with `only`)
        inject_kwargs (dict): Inject new kwargs into the call signature
            (of the form ``{argname: defaultvalue}``)
        inject_docs (dict): Add or modifies argument docuementation (requires google-style
            docstrings) with a dict of the form `{argname: "(type): description"}`
        update_docstring_args (bool): Update "arguments" section of the docstring using the
           original function's docuementation (requires google-style docstrings and wraps=False)

    Note:
        To use arguments from a classes' __init__ method, pass the class itself as
        ``original_function`` - this will also allow us to inject the docuementation

    Returns:
        Decorator function
    """
    # NEWFEATURE - verify arguments?

    if only and allexcept:
        raise ValueError('Error in keyword arguments - '
                         'pass *either* "only" or "allexcept", not both')

    origname = get_qualified_name(original_function)

    if hasattr(original_function, '__signature__'):
        sig = original_function.__signature__.replace()
    else:
        sig = funcsigs.signature(original_function)

    # Modify the call signature if necessary
    if only or allexcept or inject_kwargs:
        wraps = if_not_none(wraps, False)
        newparams = []
        if only:
            for param in only:
                newparams.append(sig.parameters[param])
        elif allexcept:
            for name, param in sig.parameters.iteritems():
                if name not in allexcept:
                    newparams.append(param)
        else:
            newparams = sig.parameters.values()
        if inject_kwargs:
            for name, default in inject_kwargs.iteritems():
                newp = funcsigs.Parameter(name, funcsigs.Parameter.POSITIONAL_OR_KEYWORD,
                                          default=default)
                newparams.append(newp)

        newparams.sort(key=lambda param: param._kind)
        sig = sig.replace(parameters=newparams)

    else:
        wraps = if_not_none(wraps, True)

    # Get the docstring arguments
    if update_docstring_args:
        original_docs = GoogleDocArgumentInjector(original_function.__doc__)
        argument_docstrings = collections.OrderedDict((p.name, original_docs.args[p.name])
                                                      for p in newparams)

    def decorator(f):
        """Modify f's call signature (using the `__signature__` attribute)"""
        if wraps:
            fname = original_function.__name__
            f = functools.wraps(original_function)(f)
            f.__name__ = fname  # revert name change
        else:
            fname = f.__name__
        f.__signature__ = sig

        if update_docstring_args or inject_kwargs:
            if not update_docstring_args:
                argument_docstrings = GoogleDocArgumentInjector(f.__doc__).args
            docs = GoogleDocArgumentInjector(f.__doc__)
            docs.args = argument_docstrings

            if not hasattr(f, '__orig_docs'):
                f.__orig_docs = []
            f.__orig_docs.append(f.__doc__)

            f.__doc__ = docs.new_docstring()

        # Only for building sphinx docuementation:
        if os.environ.get('SPHINX_IS_BUILDING_DOCS', ""):
            sigstring = '%s%s\n' % (fname, sig)
            if hasattr(f, '__doc__') and f.__doc__ is not None:
                f.__doc__ = sigstring + f.__doc__
            else:
                f.__doc__ = sigstring
        return f

    return decorator

Example 32

Project: django-easyextjs4 Source File: __init__.py
Function: class
    @staticmethod
    def Class(pUrlApis = None, pUrl = None, pId = None, pTimeOut = None, pNameSpace = None, pSession = None):
        
        if pId is not None and not isinstance(pId,str):
            raise ExtJSError('pId must be a string')            
        
        if pNameSpace is not None and not isinstance(pNameSpace,str):
            raise ExtJSError('pNameSpace must be a string')            
        
        if pTimeOut is not None and not isinstance(pTimeOut,int):
            raise ExtJSError('pTimeOut must be an integer')            
        
        if pUrl is not None and not isinstance(pUrl,str):
            raise ExtJSError('pUrl must be a string')            
        
        if pUrlApis is not None and not isinstance(pUrlApis,str):
            raise ExtJSError('pUrlApis must be a string')            
        
        if pSession is not None:
            if isinstance(pSession,bool) and pSession == True:
                pSession = Ext.sessionFromRequest
            elif not inspect.isfunction(pSession):
                raise ExtJSError('pSession must be method or boolean. If it\'s a method it must return a session object. If it\'s boolean with True it will return session from a django request.')

        if pUrlApis is None:
            pUrlApis = 'api.js'
        
        lExt = Ext.__Instance()
        
        lExt.UrlApis = pUrlApis
        lExt.Url = pUrl
        lExt.Id = pId
        lExt.TimeOut = pTimeOut
        lExt.NameSpace = pNameSpace
        
        def decorator(pClass):
    
            if hasattr(pClass,'__ExtJS'):
                raise ExtJSError('Class %s already register for ExtJS' % pClass.__name__)
            
            # Store ExtJS informations on the class            
            pClass.__ExtJS = lExt
            
            # Valid and store Javascript API
            if lExt.UrlApis not in Ext.__URLSAPI:
                Ext.__URLSAPI[lExt.UrlApis] = list()
            else:
                lFirstClass = Ext.__URLSAPI[lExt.UrlApis][0]
                lExtFirst = lFirstClass.__ExtJS
                if lExt.NameSpace is None:
                    # The first class has define a name space it will spread to other classes that have the same UrlApis  
                    lExt.NameSpace = lExtFirst.NameSpace
                else:
                    # For an UrlApis we must define the same name space
                    if lExt.NameSpace != lExtFirst.NameSpace: 
                        raise ExtJSError('Class "%s": A same javascript API ("%s") can not be define with two differents name space.' % (pClass.__name__,lExt.UrlApis))
                
            if lExt.Url is not None:
                lUrl = lExt.Url
            else:
                lUrl = 'Default'
                
                if lExt.NameSpace is not None:
                    lUrl = lExt.NameSpace
                
            lExt.Url = 'Rpc' + lUrl    

            if lExt.Url not in Ext.__URLSRPC:
                 Ext.__URLSRPC[lExt.Url] = dict()
                 
            Ext.__URLSRPC[lExt.Url][pClass.__name__] = pClass
            
            if pClass not in Ext.__URLSAPI[lExt.UrlApis]: 
                Ext.__URLSAPI[lExt.UrlApis].append(pClass)
            
            # Register methods
            lExt.StaticMethods = Ext.__METHODS

            # Register events
            lExt.StaticEvents = dict()

            for lKey in Ext.__EVENTS:
                lEvent = Ext.__EVENTS[lKey]
                if lEvent.ClassName is None:
                    lEvent.ClassName = pClass.__name__
                if lEvent.NameSpace is None:
                    lEvent.NameSpace = lExt.NameSpace
                if lEvent.Url is None:
                    lEvent.Url = 'Evt' + lEvent.NameSpace + lEvent.ClassName + lEvent.Name
                if lEvent.UrlApis is None:
                    lEvent.UrlApis = lExt.UrlApis
                if lEvent.Url in Ext.__URLSEVT:     
                    raise ExtJSError('Url "%s" for event "%s" already exist' % (lEvent.Url, lEvent.Name))
                Ext.__URLSEVT[lEvent.Url] = pClass
                if lEvent.UrlApis not in Ext.__URLSAPI:
                    Ext.__URLSAPI[lEvent.UrlApis] = list()
                if pClass not in Ext.__URLSAPI[lEvent.UrlApis]:
                    Ext.__URLSAPI[lEvent.UrlApis].append(pClass)
                lExt.StaticEvents[lEvent.Url] = lEvent
            
            # Apply a session method if the Session method is not already set by the method
            if pSession is not None:
                for lMethod in lExt.StaticMethods:
                    lMethodInfo = lExt.StaticMethods[lMethod] 
                    if lMethodInfo.Session is None:
                        lParams = list(lMethodInfo.Args)
                        if 'pSession' not in lParams:
                            raise ExtJSError('Method \'%s\' must declare a parameter pSession' % lMethodInfo.Name)
                        else:
                            # Check if pSession is the first parameter
                            if lParams.index('pSession') != 0:
                                raise ExtJSError('Method \'%s\' pSession must be the first parameter' % lMethodInfo.Name)
                            lParams = [lVal for lVal in lParams if lVal != 'pSession']
                            lMethodInfo.Session = pSession
                            lMethodInfo.Args = lParams
                for lEvent in lExt.StaticEvents:
                    lEventInfo = lExt.StaticEvents[lEvent] 
                    if lEventInfo.Session is None:
                        lParams = list(lEventInfo.Args)
                        if 'pSession' not in lParams:
                            raise ExtJSError('Event \'%s\' must declare a parameter pSession' % lEventInfo.Name)
                        else:
                            # Check if pSession is the first parameter
                            if lParams.index('pSession') != 0:
                                raise ExtJSError('Event \'%s\' pSession must be the first parameter' % lEventInfo.Name)
                            lParams = [lVal for lVal in lParams if lVal != 'pSession']
                            lEventInfo.Session = pSession
                            lEventInfo.Args = lParams

            Ext.__METHODS = dict()
            Ext.__EVENTS = dict()   
                
            @functools.wraps(pClass)
            def wrapper(*pArgs, **pKwargs):
                lNewObj = pClass(*pArgs,**pKwargs)
                return lNewObj
                
            return wrapper
    
        return decorator

Example 33

Project: rosbridge_suite Source File: testing.py
def gen_test(func=None, timeout=None):
    """Testing equivalent of ``@gen.coroutine``, to be applied to test methods.

    ``@gen.coroutine`` cannot be used on tests because the `.IOLoop` is not
    already running.  ``@gen_test`` should be applied to test methods
    on subclasses of `AsyncTestCase`.

    Example::

        class MyTest(AsyncHTTPTestCase):
            @gen_test
            def test_something(self):
                response = yield gen.Task(self.fetch('/'))

    By default, ``@gen_test`` times out after 5 seconds. The timeout may be
    overridden globally with the ``ASYNC_TEST_TIMEOUT`` environment variable,
    or for each test with the ``timeout`` keyword argument::

        class MyTest(AsyncHTTPTestCase):
            @gen_test(timeout=10)
            def test_something_slow(self):
                response = yield gen.Task(self.fetch('/'))

    .. versionadded:: 3.1
       The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment
       variable.

    .. versionchanged:: 4.0
       The wrapper now passes along ``*args, **kwargs`` so it can be used
       on functions with arguments.
    """
    if timeout is None:
        timeout = get_async_test_timeout()

    def wrap(f):
        # Stack up several decorators to allow us to access the generator
        # object itself.  In the innermost wrapper, we capture the generator
        # and save it in an attribute of self.  Next, we run the wrapped
        # function through @gen.coroutine.  Finally, the coroutine is
        # wrapped again to make it synchronous with run_sync.
        #
        # This is a good case study arguing for either some sort of
        # extensibility in the gen decorators or cancellation support.
        @functools.wraps(f)
        def pre_coroutine(self, *args, **kwargs):
            result = f(self, *args, **kwargs)
            if isinstance(result, types.GeneratorType):
                self._test_generator = result
            else:
                self._test_generator = None
            return result

        coro = gen.coroutine(pre_coroutine)

        @functools.wraps(coro)
        def post_coroutine(self, *args, **kwargs):
            try:
                return self.io_loop.run_sync(
                    functools.partial(coro, self, *args, **kwargs),
                    timeout=timeout)
            except TimeoutError as e:
                # run_sync raises an error with an unhelpful traceback.
                # If we throw it back into the generator the stack trace
                # will be replaced by the point where the test is stopped.
                self._test_generator.throw(e)
                # In case the test contains an overly broad except clause,
                # we may get back here.  In this case re-raise the original
                # exception, which is better than nothing.
                raise
        return post_coroutine

    if func is not None:
        # Used like:
        #     @gen_test
        #     def f(self):
        #         pass
        return wrap(func)
    else:
        # Used like @gen_test(timeout=10)
        return wrap

Example 34

Project: topaz Source File: gateway.py
Function: generate_wrapper
    def generate_wrapper(self):
        if hasattr(self.func, "__wraps__"):
            wrapped_func = self.func.__wraps__
        else:
            wrapped_func = self.func

        code = wrapped_func.__code__
        if wrapped_func.__defaults__ is not None:
            defaults = wrapped_func.__defaults__
            default_start = code.co_argcount - len(defaults)
        else:
            defaults = []
            default_start = None
        argspec = self.argspec
        self_cls = self.self_cls
        func = self.func

        argnames = code.co_varnames[:code.co_argcount]
        argcount = 0
        for arg in argnames:
            argcount += arg.startswith("w_") or arg in argspec
        min_args = argcount
        for arg, default in zip(reversed(argnames), reversed(defaults)):
            min_args -= arg.startswith("w_") or arg in argspec
        unrolling_argnames = unrolling_iterable(enumerate(argnames))
        takes_args_w = "args_w" in argnames

        @functools.wraps(self.func)
        def wrapper(self, space, args_w, block):
            if (len(args_w) < min_args or
                (not takes_args_w and len(args_w) > argcount)):
                raise space.error(space.w_ArgumentError,
                    "wrong number of arguments (%d for %d)" % (len(args_w), min_args)
                )
            args = ()
            arg_count = 0
            args_w_seen = False
            for i, argname in unrolling_argnames:
                if argname == "self":
                    assert isinstance(self, self_cls)
                    args += (self,)
                elif argname == "args_w":
                    if args_w_seen:
                        raise SystemError("args_w cannot be repeated")
                    args += (args_w[arg_count:],)
                    args_w_seen = True
                elif argname == "block":
                    args += (block,)
                elif argname == "space":
                    args += (space,)
                elif argname.startswith("w_") or argname in argspec:
                    if args_w_seen:
                        raise SystemError("args_w must be the last argument accepted")
                    if len(args_w) > arg_count:
                        if argname.startswith("w_"):
                            args += (args_w[arg_count],)
                        elif argname in argspec:
                            args += (getattr(Coerce, argspec[argname])(space, args_w[arg_count]),)
                    elif default_start is not None and i >= default_start:
                        args += (defaults[i - default_start],)
                    else:
                        raise SystemError("bad arg count")
                    arg_count += 1
                else:
                    raise SystemError("%r not implemented" % argname)
            w_res = func(*args)
            if w_res is None:
                w_res = space.w_nil
            return w_res
        return wrapper

Example 35

Project: pika Source File: test_utils.py
def retry_assertion(timeout_sec, retry_interval_sec=0.1):
    """Creates a decorator that retries the decorated function or
    method only upon `AssertionError` exception at the given retry interval
    not to exceed the overall given timeout.

    :param float timeout_sec: overall timeout in seconds
    :param float retry_interval_sec: amount of time to sleep
        between retries in seconds.

    :returns: decorator that implements the following behavior

    1. This decorator guarantees to call the decorated function or method at
    least once.
    2. It passes through all exceptions besides `AssertionError`, preserving the
    original exception and its traceback.
    3. If no exception, it returns the return value from the decorated function/method.
    4. It sleeps `time.sleep(retry_interval_sec)` between retries.
    5. It checks for expiry of the overall timeout before sleeping.
    6. If the overall timeout is exceeded, it re-raises the latest `AssertionError`,
    preserving its original traceback
    """

    def retry_assertion_decorator(func):
        """Decorator"""

        @functools.wraps(func)
        def retry_assertion_wrap(*args, **kwargs):
            """The wrapper"""

            num_attempts = 0
            start_time = time.time()

            while True:
                num_attempts += 1

                try:
                    result = func(*args, **kwargs)
                except AssertionError:

                    now = time.time()
                    # Compensate for time adjustment
                    if now < start_time:
                        start_time = now

                    if (now - start_time) > timeout_sec:
                        logging.exception(
                            'Exceeded retry timeout of %s sec in %s attempts '
                            'with func %r. Caller\'s stack:\n%s',
                            timeout_sec, num_attempts, func,
                            ''.join(traceback.format_stack()))
                        raise

                    logging.debug('Attempt %s failed; retrying %r in %s sec.',
                                  num_attempts, func, retry_interval_sec)

                    time.sleep(retry_interval_sec)
                else:
                    logging.debug('%r succeeded at attempt %s',
                                  func, num_attempts)
                    return result

        return retry_assertion_wrap

    return retry_assertion_decorator

Example 36

Project: tuskar Source File: lockutils.py
def synchronized(name, lock_file_prefix, external=False, lock_path=None):
    """Synchronization decorator.

    Decorating a method like so::

        @synchronized('mylock')
        def foo(self, *args):
           ...

    ensures that only one thread will execute the foo method at a time.

    Different methods can share the same lock::

        @synchronized('mylock')
        def foo(self, *args):
           ...

        @synchronized('mylock')
        def bar(self, *args):
           ...

    This way only one of either foo or bar can be executing at a time.

    The lock_file_prefix argument is used to provide lock files on disk with a
    meaningful prefix. The prefix should end with a hyphen ('-') if specified.

    The external keyword argument denotes whether this lock should work across
    multiple processes. This means that if two different workers both run a
    a method decorated with @synchronized('mylock', external=True), only one
    of them will execute at a time.

    The lock_path keyword argument is used to specify a special location for
    external lock files to live. If nothing is set, then CONF.lock_path is
    used as a default.
    """

    def wrap(f):
        @functools.wraps(f)
        def inner(*args, **kwargs):
            # NOTE(soren): If we ever go natively threaded, this will be racy.
            #              See http://stackoverflow.com/questions/5390569/dyn
            #              amically-allocating-and-destroying-mutexes
            sem = _semaphores.get(name, semaphore.Semaphore())
            if name not in _semaphores:
                # this check is not racy - we're already holding ref locally
                # so GC won't remove the item and there was no IO switch
                # (only valid in greenthreads)
                _semaphores[name] = sem

            with sem:
                LOG.debug(_('Got semaphore "%(lock)s" for method '
                            '"%(method)s"...'), {'lock': name,
                                                 'method': f.__name__})

                # NOTE(mikal): I know this looks odd
                if not hasattr(local.strong_store, 'locks_held'):
                    local.strong_store.locks_held = []
                local.strong_store.locks_held.append(name)

                try:
                    if external and not CONF.disable_process_locking:
                        LOG.debug(_('Attempting to grab file lock "%(lock)s" '
                                    'for method "%(method)s"...'),
                                  {'lock': name, 'method': f.__name__})
                        cleanup_dir = False

                        # We need a copy of lock_path because it is non-local
                        local_lock_path = lock_path
                        if not local_lock_path:
                            local_lock_path = CONF.lock_path

                        if not local_lock_path:
                            cleanup_dir = True
                            local_lock_path = tempfile.mkdtemp()

                        if not os.path.exists(local_lock_path):
                            fileutils.ensure_tree(local_lock_path)

                        # NOTE(mikal): the lock name cannot contain directory
                        # separators
                        safe_name = name.replace(os.sep, '_')
                        lock_file_name = '%s%s' % (lock_file_prefix, safe_name)
                        lock_file_path = os.path.join(local_lock_path,
                                                      lock_file_name)

                        try:
                            lock = InterProcessLock(lock_file_path)
                            with lock:
                                LOG.debug(_('Got file lock "%(lock)s" at '
                                            '%(path)s for method '
                                            '"%(method)s"...'),
                                          {'lock': name,
                                           'path': lock_file_path,
                                           'method': f.__name__})
                                retval = f(*args, **kwargs)
                        finally:
                            LOG.debug(_('Released file lock "%(lock)s" at '
                                        '%(path)s for method "%(method)s"...'),
                                      {'lock': name,
                                       'path': lock_file_path,
                                       'method': f.__name__})
                            # NOTE(vish): This removes the tempdir if we needed
                            #             to create one. This is used to
                            #             cleanup the locks left behind by unit
                            #             tests.
                            if cleanup_dir:
                                shutil.rmtree(local_lock_path)
                    else:
                        retval = f(*args, **kwargs)

                finally:
                    local.strong_store.locks_held.remove(name)

            return retval
        return inner
    return wrap

Example 37

Project: mock Source File: trace_decorator.py
def traceLog(log=None):
    def decorator(func):
        @functools.wraps(func)
        def trace(*args, **kw):
            # default to logger that was passed by module, but
            # can override by passing logger=foo as function parameter.
            # make sure this doesn't conflict with one of the parameters
            # you are expecting

            filename = os.path.normcase(inspect.getsourcefile(func))
            func_name = func.__name__
            if hasattr(func, 'func_code'):
                lineno = func.func_code.co_firstlineno
            else:
                lineno = func.__code__.co_firstlineno

            l2 = kw.get('logger', log)
            if l2 is None:
                l2 = logging.getLogger("trace.%s" % func.__module__)
            if isinstance(l2, basestring):
                l2 = logging.getLogger(l2)

            message = "ENTER %s("
            message = message + ', '.join([repr(arg) for arg in args])
            if args and kw:
                message += ', '
            for k, v in list(kw.items()):
                message = message + "%s=%s" % (k, repr(v))
            message = message + ")"

            frame = inspect.getouterframes(inspect.currentframe())[1][0]
            doLog(l2, logging.INFO, os.path.normcase(frame.f_code.co_filename),
                  frame.f_lineno, message, args=[func_name], exc_info=None,
                  func=frame.f_code.co_name)
            try:
                result = "Bad exception raised: Exception was not a derived "\
                         "class of 'Exception'"
                try:
                    result = func(*args, **kw)
                except (KeyboardInterrupt, Exception) as e:
                    result = "EXCEPTION RAISED"
                    doLog(l2, logging.INFO, filename, lineno,
                          "EXCEPTION: %s\n", args=[e],
                          exc_info=sys.exc_info(), func=func_name)
                    raise
            finally:
                doLog(l2, logging.INFO, filename, lineno,
                      "LEAVE %s --> %s\n", args=[func_name, result],
                      exc_info=None, func=func_name)

            return result
        return trace
    return decorator

Example 38

Project: BlogCatke Source File: __init__.py
Function: acl
def acl(method):

    # 检查
    def check(rule,roles):

        if rule.get('deny',False):
            for r in roles :
                if r in rule['deny'] :
                    return False

        if rule.get('allow',False):
            for r in roles :
                if r in rule['allow'] :
                    return True

        return False



    @functools.wraps(method)
    def wrapper(self, transforms, *args, **kwargs):
        # 唯一标识
        URI  = self.__class__.__module__ + '.' + self.__class__.__name__
        # 访问规则
        rules = self.settings['acls']
        # 当前用户
        current_user = self.current_user


        # 格式化角色
        roles = []
        if None == current_user:
            roles.append('ACL_NO_ROLE')

        elif utils.Validators.is_dict(current_user):
            if False == current_user.has_key('roles') \
               or 0 == len(current_user['roles']):

                roles.append('ACL_NO_ROLE')
            else:
                roles.append('ACL_HAS_ROLE')

                for r in current_user['roles']:
                    roles.append(r)

        for r in rules:
            if r['URI'].find('*') == -1 and r['URI'] == URI :
                if False == check(r,roles) :
                    self._transforms = transforms
                    self.on_access_denied()
                    return self.finish()

            elif URI.find(r['URI'].split('*')[0]) == 0:
                if False == check(r,roles) :
                    self._transforms = transforms
                    self.on_access_denied()
                    return self.finish()

        return method(self, transforms, *args, **kwargs)

    return wrapper

Example 39

Project: xarray Source File: plot.py
Function: plot_2d
def _plot2d(plotfunc):
    """
    Decorator for common 2d plotting logic

    Also adds the 2d plot method to class _PlotMethods
    """
    commondoc = """
    Parameters
    ----------
    darray : DataArray
        Must be 2 dimensional, unless creating faceted plots
    x : string, optional
        Coordinate for x axis. If None use darray.dims[1]
    y : string, optional
        Coordinate for y axis. If None use darray.dims[0]
    ax : matplotlib axes object, optional
        If None, uses the current axis
    row : string, optional
        If passed, make row faceted plots on this dimension name
    col : string, optional
        If passed, make column faceted plots on this dimension name
    col_wrap : integer, optional
        Use together with ``col`` to wrap faceted plots
    xincrease : None, True, or False, optional
        Should the values on the x axes be increasing from left to right?
        if None, use the default for the matplotlib function
    yincrease : None, True, or False, optional
        Should the values on the y axes be increasing from top to bottom?
        if None, use the default for the matplotlib function
    add_colorbar : Boolean, optional
        Adds colorbar to axis
    add_labels : Boolean, optional
        Use xarray metadata to label axes
    vmin, vmax : floats, optional
        Values to anchor the colormap, otherwise they are inferred from the
        data and other keyword arguments. When a diverging dataset is inferred,
        setting one of these values will fix the other by symmetry around
        ``center``. Setting both values prevents use of a diverging colormap.
        If discrete levels are provided as an explicit list, both of these
        values are ignored.
    cmap : matplotlib colormap name or object, optional
        The mapping from data values to color space. If not provided, this
        will be either be ``viridis`` (if the function infers a sequential
        dataset) or ``RdBu_r`` (if the function infers a diverging dataset).
        When when `Seaborn` is installed, ``cmap`` may also be a `seaborn`
        color palette. If ``cmap`` is seaborn color palette and the plot type
        is not ``contour`` or ``contourf``, ``levels`` must also be specified.
    colors : discrete colors to plot, optional
        A single color or a list of colors. If the plot type is not ``contour``
        or ``contourf``, the ``levels`` argument is required.
    center : float, optional
        The value at which to center the colormap. Passing this value implies
        use of a diverging colormap. Setting it to ``False`` prevents use of a
        diverging colormap.
    robust : bool, optional
        If True and ``vmin`` or ``vmax`` are absent, the colormap range is
        computed with 2nd and 98th percentiles instead of the extreme values.
    extend : {'neither', 'both', 'min', 'max'}, optional
        How to draw arrows extending the colorbar beyond its limits. If not
        provided, extend is inferred from vmin, vmax and the data limits.
    levels : int or list-like object, optional
        Split the colormap (cmap) into discrete color intervals.
    subplot_kws : dict, optional
        Dictionary of keyword arguments for matplotlib subplots. Only applies
        to FacetGrid plotting.
    cbar_ax : matplotlib Axes, optional
        Axes in which to draw the colorbar.
    cbar_kwargs : dict, optional
        Dictionary of keyword arguments to pass to the colorbar.
    **kwargs : optional
        Additional arguments to wrapped matplotlib function

    Returns
    -------
    artist :
        The same type of primitive artist that the wrapped matplotlib
        function returns
    """

    # Build on the original docstring
    plotfunc.__doc__ = '\n'.join((plotfunc.__doc__, commondoc))

    @functools.wraps(plotfunc)
    def newplotfunc(darray, x=None, y=None, ax=None, row=None, col=None,
                    col_wrap=None, xincrease=True, yincrease=True,
                    add_colorbar=None, add_labels=True, vmin=None, vmax=None,
                    cmap=None, center=None, robust=False, extend=None,
                    levels=None, colors=None, subplot_kws=None,
                    cbar_ax=None, cbar_kwargs=None, **kwargs):
        # All 2d plots in xarray share this function signature.
        # Method signature below should be consistent.

        # Decide on a default for the colorbar before facetgrids
        if add_colorbar is None:
            add_colorbar = plotfunc.__name__ != 'contour'

        # Handle facetgrids first
        if row or col:
            allargs = locals().copy()
            allargs.update(allargs.pop('kwargs'))

            # Need the decorated plotting function
            allargs['plotfunc'] = globals()[plotfunc.__name__]

            return _easy_facetgrid(**allargs)

        import matplotlib.pyplot as plt

        # colors is mutually exclusive with cmap
        if cmap and colors:
            raise ValueError("Can't specify both cmap and colors.")
        # colors is only valid when levels is supplied or the plot is of type
        # contour or contourf
        if colors and (('contour' not in plotfunc.__name__) and (not levels)):
            raise ValueError("Can only specify colors with contour or levels")
        # we should not be getting a list of colors in cmap anymore
        # is there a better way to do this test?
        if isinstance(cmap, (list, tuple)):
            warnings.warn("Specifying a list of colors in cmap is deprecated. "
                          "Use colors keyword instead.",
                          DeprecationWarning, stacklevel=3)

        if ax is None:
            ax = plt.gca()

        xlab, ylab = _infer_xy_labels(darray=darray, x=x, y=y)

        # better to pass the ndarrays directly to plotting functions
        xval = darray[xlab].values
        yval = darray[ylab].values
        zval = darray.to_masked_array(copy=False)

        # May need to transpose for correct x, y labels
        # xlab may be the name of a coord, we have to check for dim names
        if darray[xlab].dims[-1] == darray.dims[0]:
            zval = zval.T

        _ensure_plottable(xval, yval)

        if 'contour' in plotfunc.__name__ and levels is None:
            levels = 7  # this is the matplotlib default

        cmap_kwargs = {'plot_data': zval.data,
                       'vmin': vmin,
                       'vmax': vmax,
                       'cmap': colors if colors else cmap,
                       'center': center,
                       'robust': robust,
                       'extend': extend,
                       'levels': levels,
                       'filled': plotfunc.__name__ != 'contour',
                       }

        cmap_params = _determine_cmap_params(**cmap_kwargs)

        if 'contour' in plotfunc.__name__:
            # extend is a keyword argument only for contour and contourf, but
            # passing it to the colorbar is sufficient for imshow and
            # pcolormesh
            kwargs['extend'] = cmap_params['extend']
            kwargs['levels'] = cmap_params['levels']

        # This allows the user to pass in a custom norm coming via kwargs
        kwargs.setdefault('norm', cmap_params['norm'])

        ax, primitive = plotfunc(xval, yval, zval, ax=ax,
                                 cmap=cmap_params['cmap'],
                                 vmin=cmap_params['vmin'],
                                 vmax=cmap_params['vmax'],
                                 **kwargs)

        # Label the plot with metadata
        if add_labels:
            ax.set_xlabel(xlab)
            ax.set_ylabel(ylab)
            ax.set_title(darray._title_for_slice())

        if add_colorbar:
            cbar_kwargs = {} if cbar_kwargs is None else dict(cbar_kwargs)
            cbar_kwargs.setdefault('extend', cmap_params['extend'])
            if cbar_ax is None:
                cbar_kwargs.setdefault('ax', ax)
            else:
                cbar_kwargs.setdefault('cax', cbar_ax)
            cbar = plt.colorbar(primitive, **cbar_kwargs)
            if darray.name and add_labels and 'label' not in cbar_kwargs:
                cbar.set_label(darray.name, rotation=90)
        elif cbar_ax is not None or cbar_kwargs is not None:
            # inform the user about keywords which aren't used
            raise ValueError("cbar_ax and cbar_kwargs can't be used with "
                             "add_colorbar=False.")

        _update_axes_limits(ax, xincrease, yincrease)

        return primitive

    # For use as DataArray.plot.plotmethod
    @functools.wraps(newplotfunc)
    def plotmethod(_PlotMethods_obj, x=None, y=None, ax=None, row=None,
                   col=None, col_wrap=None, xincrease=True, yincrease=True,
                   add_colorbar=None, add_labels=True, vmin=None, vmax=None,
                   cmap=None, colors=None, center=None, robust=False,
                   extend=None, levels=None, subplot_kws=None,
                   cbar_ax=None, cbar_kwargs=None, **kwargs):
        """
        The method should have the same signature as the function.

        This just makes the method work on Plotmethods objects,
        and passes all the other arguments straight through.
        """
        allargs = locals()
        allargs['darray'] = _PlotMethods_obj._da
        allargs.update(kwargs)
        for arg in ['_PlotMethods_obj', 'newplotfunc', 'kwargs']:
            del allargs[arg]
        return newplotfunc(**allargs)

    # Add to class _PlotMethods
    setattr(_PlotMethods, plotmethod.__name__, plotmethod)

    return newplotfunc

Example 40

Project: viewflow Source File: decorators.py
def flow_job(func):
    """
    Decorator that prepares celery task for execution.

    Makes celery job function with the following signature
    `(flow_task-strref, process_pk, task_pk, **kwargs)`

    Expects actual celery job function which has the following signature `(activation, **kwargs)`
    If celery task class implements activation interface, job function is
    called without activation instance `(**kwargs)`

    Process instance is locked only before and after the function execution.
    Please avoid any process state modification during the celery job.
    """
    @functools.wraps(func)
    def _wrapper(*args, **kwargs):
        flow_task_strref = kwargs.pop('flow_task_strref') if 'flow_task_strref' in kwargs else args[0]
        process_pk = kwargs.pop('process_pk') if 'process_pk' in kwargs else args[1]
        task_pk = kwargs.pop('task_pk') if 'task_pk' in kwargs else args[2]
        flow_task = import_task_by_ref(flow_task_strref)

        lock = flow_task.flow_class.lock_impl(flow_task.flow_class.instance)

        # start
        with transaction.atomic(), lock(flow_task.flow_class, process_pk):
            try:
                task = flow_task.flow_class.task_class.objects.get(pk=task_pk)
                if task.status == STATUS.CANCELED:
                    return
            except flow_task.flow_class.task_class.DoesNotExist:
                # There was rollback on job task created transaction,
                # we don't need to do the job
                return
            else:
                activation = flow_task.activation_class()
                activation.initialize(flow_task, task)
                if task.status == STATUS.SCHEDULED:
                    activation.start()
                else:
                    activation.restart()

        # execute
        try:
            result = func(activation, **kwargs)
        except Exception as exc:
            # mark as error
            with transaction.atomic(), lock(flow_task.flow_class, process_pk):
                task = flow_task.flow_class.task_class.objects.get(pk=task_pk)
                activation = flow_task.activation_class()
                activation.initialize(flow_task, task)
                activation.error(comments="{}\n{}".format(exc, traceback.format_exc()))
            raise
        else:
            # mark as done
            with transaction.atomic(), lock(flow_task.flow_class, process_pk):
                task = flow_task.flow_class.task_class.objects.get(pk=task_pk)
                activation = flow_task.activation_class()
                activation.initialize(flow_task, task)
                activation.done()

            return result

    return _wrapper

Example 41

Project: flask-caching Source File: __init__.py
    def memoize(self, timeout=None, make_name=None, unless=None,
                forced_update=None):
        """Use this to cache the result of a function, taking its arguments
        into account in the cache key.

        Information on
        `Memoization <http://en.wikipedia.org/wiki/Memoization>`_.

        Example::

            @cache.memoize(timeout=50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 1000)

        .. code-block:: pycon

            >>> big_foo(5, 2)
            753
            >>> big_foo(5, 3)
            234
            >>> big_foo(5, 2)
            753

        .. versionadded:: 0.4
            The returned decorated function now has three function attributes
            assigned to it.

                **uncached**
                    The original undecorated function. readable only

                **cache_timeout**
                    The cache timeout value for this function.
                    For a custom value to take affect, this must be
                    set before the function is called.

                    readable and writable

                **make_cache_key**
                    A function used in generating the cache_key used.

                    readable and writable


        :param timeout: Default None. If set to an integer, will cache for that
                        amount of time. Unit of time is in seconds.
        :param make_name: Default None. If set this is a function that accepts
                          a single argument, the function name, and returns a
                          new string to be used as the function name.
                          If not set then the function name is used.
        :param unless: Default None. Cache will *always* execute the caching
                       facilities unelss this callable is true.
                       This will bypass the caching entirely.
        :param forced_update: Default None. If this callable is true,
                              cache value will be updated regardless cache
                              is expired or not. Useful for background
                              renewal of cached functions.

        .. versionadded:: 0.5
            params ``make_name``, ``unless``
        """

        def memoize(f):
            @functools.wraps(f)
            def decorated_function(*args, **kwargs):
                #: bypass cache
                if self._bypass_cache(unless, f, *args, **kwargs):
                    return f(*args, **kwargs)

                try:
                    cache_key = decorated_function.make_cache_key(
                        f, *args, **kwargs
                    )
                    if callable(forced_update) and forced_update() is True:
                        rv = None
                    else:
                        rv = self.cache.get(cache_key)
                except Exception:
                    if current_app.debug:
                        raise
                    logger.exception("Exception possibly due to "
                                     "cache backend.")
                    return f(*args, **kwargs)

                if rv is None:
                    rv = f(*args, **kwargs)
                    try:
                        self.cache.set(
                            cache_key, rv,
                            timeout=decorated_function.cache_timeout
                        )
                    except Exception:
                        if current_app.debug:
                            raise
                        logger.exception("Exception possibly due to "
                                         "cache backend.")
                return rv

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = self._memoize_make_cache_key(
                make_name, decorated_function, forced_update
            )
            decorated_function.delete_memoized = \
                lambda: self.delete_memoized(f)

            return decorated_function
        return memoize

Example 42

Project: qiita Source File: oauth2.py
def authenticate_oauth(f):
    """Decorate methods to require valid Oauth2 Authorization header[1]

    If a valid header is given, the handoff is done and the page is rendered.
    If an invalid header is given, a 400 error code is returned and the json
    error message is automatically sent.

    Returns
    -------
    Sends oauth2 formatted error JSON if authorizaton fails

    Notes
    -----
    Expects handler to be a tornado RequestHandler or subclass

    References
    ----------
    [1] The OAuth 2.0 Authorization Framework.
    http://tools.ietf.org/html/rfc6749
    """
    @functools.wraps(f)
    def wrapper(handler, *args, **kwargs):
        header = handler.request.headers.get('Authorization', None)
        if header is None:
            _oauth_error(handler, 'Oauth2 error: invalid access token',
                         'invalid_request')
            return
        token_info = header.split()
        # Based on RFC6750 if reply is not 2 elements in the format of:
        # ['Bearer', token] we assume a wrong reply
        if len(token_info) != 2 or token_info[0] != 'Bearer':
            _oauth_error(handler, 'Oauth2 error: invalid access token',
                         'invalid_grant')
            return

        token = token_info[1]
        db_token = r_client.hgetall(token)
        if not db_token:
            # token has timed out or never existed
            _oauth_error(handler, 'Oauth2 error: token has timed out',
                         'invalid_grant')
            return
        # Check daily rate limit for key if password style key
        if db_token['grant_type'] == 'password':
            limit_key = '%s_%s_daily_limit' % (db_token['client_id'],
                                               db_token['user'])
            limiter = r_client.get(limit_key)
            if limiter is None:
                # Set limit to 5,000 requests per day
                r_client.setex(limit_key, 5000, 86400)
            else:
                r_client.decr(limit_key)
                if int(r_client.get(limit_key)) <= 0:
                    _oauth_error(
                        handler, 'Oauth2 error: daily request limit reached',
                        'invalid_grant')
                    return

        return f(handler, *args, **kwargs)
    return wrapper

Example 43

Project: chainer Source File: condition.py
def repeat_with_success_at_least(times, min_success):
    """Decorator for multiple trial of the test case.

    The decorated test case is launched multiple times.
    The case is judged as passed at least specified number of trials.
    If the number of successful trials exceeds `min_success`,
    the remaining trials are skipped.

    Args:
        times(int): The number of trials.
        min_success(int): Threshold that the decorated test
            case is regarded as passed.

    """

    assert times >= min_success

    def _repeat_with_success_at_least(f):
        @functools.wraps(f)
        def wrapper(*args, **kwargs):
            assert len(args) > 0
            instance = args[0]
            assert isinstance(instance, unittest.TestCase)
            success_counter = 0
            failure_counter = 0
            results = []

            def fail():
                msg = '\nFail: {0}, Success: {1}'.format(
                    failure_counter, success_counter)
                if len(results) > 0:
                    first = results[0]
                    errs = first.failures + first.errors
                    if len(errs) > 0:
                        err_msg = '\n'.join(fail[1] for fail in errs)
                        msg += '\n\nThe first error message:\n' + err_msg
                instance.fail(msg)

            for _ in six.moves.range(times):
                suite = unittest.TestSuite()
                suite.addTest(
                    unittest.FunctionTestCase(
                        lambda: f(*args, **kwargs),
                        setUp=instance.setUp,
                        tearDown=instance.tearDown))

                result = QuietTestRunner().run(suite)
                if result.wasSuccessful():
                    success_counter += 1
                else:
                    results.append(result)
                    failure_counter += 1
                if success_counter >= min_success:
                    instance.assertTrue(True)
                    return
                if failure_counter > times - min_success:
                    fail()
                    return
            fail()
        return wrapper
    return _repeat_with_success_at_least

Example 44

Project: SickGear Source File: concurrent.py
def return_future(f):
    """Decorator to make a function that returns via callback return a
    `Future`.

    The wrapped function should take a ``callback`` keyword argument
    and invoke it with one argument when it has finished.  To signal failure,
    the function can simply raise an exception (which will be
    captured by the `.StackContext` and passed along to the ``Future``).

    From the caller's perspective, the callback argument is optional.
    If one is given, it will be invoked when the function is complete
    with `Future.result()` as an argument.  If the function fails, the
    callback will not be run and an exception will be raised into the
    surrounding `.StackContext`.

    If no callback is given, the caller should use the ``Future`` to
    wait for the function to complete (perhaps by yielding it in a
    `.gen.engine` function, or passing it to `.IOLoop.add_future`).

    Usage:

    .. testcode::

        @return_future
        def future_func(arg1, arg2, callback):
            # Do stuff (possibly asynchronous)
            callback(result)

        @gen.engine
        def caller(callback):
            yield future_func(arg1, arg2)
            callback()

    ..

    Note that ``@return_future`` and ``@gen.engine`` can be applied to the
    same function, provided ``@return_future`` appears first.  However,
    consider using ``@gen.coroutine`` instead of this combination.
    """
    replacer = ArgReplacer(f, 'callback')

    @functools.wraps(f)
    def wrapper(*args, **kwargs):
        future = TracebackFuture()
        callback, args, kwargs = replacer.replace(
            lambda value=_NO_RESULT: future.set_result(value),
            args, kwargs)

        def handle_error(typ, value, tb):
            future.set_exc_info((typ, value, tb))
            return True
        exc_info = None
        with ExceptionStackContext(handle_error):
            try:
                result = f(*args, **kwargs)
                if result is not None:
                    raise ReturnValueIgnoredError(
                        "@return_future should not be used with functions "
                        "that return values")
            except:
                exc_info = sys.exc_info()
                raise
        if exc_info is not None:
            # If the initial synchronous part of f() raised an exception,
            # go ahead and raise it to the caller directly without waiting
            # for them to inspect the Future.
            future.result()

        # If the caller passed in a callback, schedule it to be called
        # when the future resolves.  It is important that this happens
        # just before we return the future, or else we risk confusing
        # stack contexts with multiple exceptions (one here with the
        # immediate exception, and again when the future resolves and
        # the callback triggers its exception by calling future.result()).
        if callback is not None:
            def run_callback(future):
                result = future.result()
                if result is _NO_RESULT:
                    callback()
                else:
                    callback(future.result())
            future.add_done_callback(wrap(run_callback))
        return future
    return wrapper

Example 45

Project: OWAPI Source File: v3_util.py
def with_ratelimit(bucket: str, timelimit: int=None, max_reqs: int=0):
    """
    Defines a function to rate limit for.

    Rate limits are stored in `rates.yml`.
    """

    # Compile regular expressions
    def _rl_inner1(func):
        @functools.wraps(func)
        async def _rl_inner2(ctx: HTTPRequestContext, *args, **kwargs):
            """
            Inner ratelimit function.
            """
            if ctx.app.config["owapi_disable_ratelimits"]:
                # Don't bother with ratelimits.
                return await func(ctx, *args, **kwargs)

            # only ratelimit if we have redis. Can't make this decision in
            # outer functions because they are called before globalsettings are set
            if ctx.app.config["owapi_use_redis"]:
                import aioredis
                assert isinstance(ctx.redis, aioredis.Redis)
                # Get the IP.
                ip = ctx.request.ip
                if ip == "127.0.0.1":
                    # We don't want to rate limit localhost.
                    ip = ctx.request.headers.get("X-Real-IP")

                # Build the ratelimit string.
                built = "{bucket}:{ip}:ratelimit".format(bucket=bucket, ip=ip)

                # Check the user agent before.
                user_agent = ctx.request.headers.get("User-Agent")
                if user_agent is None:
                    return BAD_USERAGENT

                if check_default_useragents(user_agent):
                    return BAD_USERAGENT

                # Load the rate limit based on the regular expression provided.
                for regex, rates in compiled:
                    if regex.match(user_agent):
                        break
                else:
                    # UH OH
                    raise RuntimeError("Failed to match User-Agent - did you wipe rates.yml?")

                _timelimit = timelimit or rates.get("time", 1)
                _max_reqs = max_reqs or rates.get("max_reqs", 1)

                # Redis-based ratelimiting.
                # First, check if the key even exists.
                if not (await ctx.redis.exists(built)):
                    # LPUSH, and EXPIRE it.
                    await ctx.redis.lpush(built, _max_reqs)
                    await ctx.redis.expire(built, _timelimit)
                else:
                    # LLEN it.
                    tries = await ctx.redis.llen(built)
                    if tries >= max_reqs:
                        # 429 You Are Being Ratelimited.
                        ttl = await ctx.redis.ttl(built)
                        return {"error": 429, "msg": "you are being ratelimited"}, 429, {"Retry-After": ttl}

                    # LPUSH a `1` or something onto the edge of the list.
                    # The actual value doesn't matter.
                    await ctx.redis.lpush(built, 1)

            # Now, await the underlying function.
            return await func(ctx, *args, **kwargs)

        return _rl_inner2

    return _rl_inner1

Example 46

Project: premailer Source File: cache.py
def function_cache(expected_max_entries=1000):
    """
        function_cache is a decorator for caching function call
        the argument to the wrapped function must be hashable else
        it will not work

        expected_max_entries is for protecting cache failure. If cache
        misses more than this number the cache will turn off itself.
        Specify None you sure that the cache  will not cause memory
        limit problem.

        Args:
            expected_max_entries(integer OR None): will raise if not correct

        Returns:
            function

    """
    if (
        expected_max_entries is not None and
        not isinstance(expected_max_entries, int)
    ):
        raise TypeError(
            'Expected expected_max_entries to be an integer or None'
        )

    # indicator of cache missed
    sentinel = object()

    def decorator(func):
        cached = _Cache()

        @functools.wraps(func)
        def inner(*args, **kwargs):
            if cached.off:
                return func(*args, **kwargs)

            keys = args
            if kwargs:
                sorted_items = sorted(kwargs.items())
                for item in sorted_items:
                    keys += item

            hashed = hash(_HashedSeq(keys))
            result = cached.cache.get(hashed, sentinel)
            if result is sentinel:
                cached.missed += 1
                result = func(*args, **kwargs)
                cached.cache[hashed] = result
                # # something is wrong if we are here more than expected
                # # empty and turn it off
                if (
                    expected_max_entries is not None and
                    cached.missed > expected_max_entries
                ):
                    cached.off = True
                    cached.cache.clear()

            return result

        return inner
    return decorator

Example 47

Project: chainer Source File: helper.py
def for_dtypes_combination(types, names=('dtype',), full=None):
    """Decorator that checks the fixture with a product set of dtypes.

    Args:
         types(list of dtypes): dtypes to be tested.
         names(list of str): Argument names to which dtypes are passed.
         full(bool): If ``True``, then all combinations
             of dtypes will be tested.
             Otherwise, the subset of combinations will be tested
             (see the description below).

    Decorator adds the keyword arguments specified by ``names``
    to the test fixture. Then, it runs the fixtures in parallel
    with passing (possibly a subset of) the product set of dtypes.
    The range of dtypes is specified by ``types``.

    The combination of dtypes to be tested changes depending
    on the option ``full``. If ``full`` is ``True``,
    all combinations of ``types`` are tested.
    Sometimes, such an exhaustive test can be costly.
    So, if ``full`` is ``False``, only the subset of possible
    combinations is tested. Specifically, at first,
    the shuffled lists of ``types`` are made for each argument
    name in ``names``.
    Let the lists be ``D1``, ``D2``, ..., ``Dn``
    where :math:`n` is the number of arguments.
    Then, the combinations to be tested will be ``zip(D1, ..., Dn)``.
    If ``full`` is ``None``, the behavior is switched
    by setting the environment variable ``CUPY_TEST_FULL_COMBINATION=1``.

    For example, let ``types`` be ``[float16, float32, float64]``
    and ``names`` be ``['a_type', 'b_type']``. If ``full`` is ``True``,
    then the decorated test fixture is executed with all
    :math:`2^3` patterns. On the other hand, if ``full`` is ``False``,
    shuffled lists are made for ``a_type`` and ``b_type``.
    Suppose the lists are ``(16, 64, 32)`` for ``a_type`` and
    ``(32, 64, 16)`` for ``b_type`` (prefixes are removed for short).
    Then the combinations of ``(a_type, b_type)`` to be tested are
    ``(16, 32)``, ``(64, 64)`` and ``(32, 16)``.
    """

    if full is None:
        full = int(os.environ.get('CUPY_TEST_FULL_COMBINATION', '0')) != 0

    if full:
        combination = parameterized.product({name: types for name in names})
    else:
        ts = []
        for _ in range(len(names)):
            # Make shuffled list of types for each name
            t = list(types)
            random.shuffle(t)
            ts.append(t)

        combination = [dict(zip(names, typs)) for typs in zip(*ts)]

    def decorator(impl):
        @functools.wraps(impl)
        def test_func(self, *args, **kw):
            for dtypes in combination:
                kw_copy = kw.copy()
                kw_copy.update(dtypes)

                try:
                    impl(self, *args, **kw_copy)
                except Exception:
                    print(dtypes)
                    raise

        return test_func
    return decorator

Example 48

Project: pypowervm Source File: retry.py
def retry(tries=3, delay_func=NO_DELAY,
          retry_except=None, http_codes=DFT_RETRY_CODES, test_func=None,
          resp_checker=NO_CHECKER, limit_except=None, argmod_func=NO_ARGMOD):
    """Retry method decorator.

    :param tries: The max number of calls to the wrapped method.
    :param delay_func: A method to delay before retrying.
        Defaults to no delay.
        The parameters that are sent are:
            - the number of the current try
            - the maximum number of tries
            - the arguments to the decorated method
            - the keyword arguments to the decorated method
        No return value is expected.
    :param retry_except: An exception class (or tuple thereof) to retry if
        received.  Defaults to no exceptions besides the HttpError which is
        handled separately by the http_codes parameter.
    :param http_codes: A list of http response codes to retry if received.
        Default is to not handle any specific http codes.
    :param test_func: A method to call to determine whether to retry. This
        method takes precedence over http codes. That is, if specified, the
        http codes are not considered.
        The parameters that are sent are:
            - the exception that was received
            - the number of the current try
            - the maximum number of tries
            - the arguments to the decorated method
            - the keyword arguments to the decorated method
        The return value is expected to be boolean, True or False, where
            True means to retry the decorated method.
    :param resp_checker: A method to call when no exception is caught, to
        check the response and determine whether to retry.
        The parameters that are sent are:
            - the number of the current try
            - the maximum number of tries
            - the arguments to the decorated method
            - the keyword arguments to the decorated method
        The return value is expected to be boolean, True or False, where
            True means to retry the decorated method.
    :param limit_except: An exception to raise if the number of tries is
        exhausted.
    :param argmod_func: A method to call after delay_func, before retrying, to
                        modify the arguments to the main method.  The input
                        parameters are:
                            - the number of the current try
                            - the maximum number of tries
                            - the non-keyword arguments to the decorated method
                            - the keyword arguments to the decorated method
                        The return is expected to a list and a dict of the
                        new arguments to the decorated method.
                        Example:
                        def argmod(t, m, *a, **k):
                            l = list(a)
                            l[0] += 1
                            k['foo'] = bar
                            return l, k
    :returns: The return value of the wrapped method.
    """
    def _retry(func):
        @functools.wraps(func)
        def __retry(*args, **kwds):
            def _raise_exc():
                if _limit_except:
                    raise _limit_except
                else:
                    raise

            def _test_retry(e):
                # Determine if an exception should be raised
                if (not _test_func(e, try_, _tries, *args, **kwds) or
                        try_ == _tries):
                    _raise_exc()
                # Otherwise, we will continue trying
                return

            def _log_response_retry(try_, max_tries, uri, resp_code):
                LOG.warning(_('Attempt %(retry)d of total %(total)d for URI '
                              '%(uri)s.  Error was a known retry response '
                              'code: %(resp_code)s'),
                            {'retry': try_, 'total': max_tries, 'uri': uri,
                             'resp_code': resp_code})

            def _log_exception_retry(try_, max_tries, exc):
                LOG.warning(_('Attempt %(retry)d of %(total)d failed.  Will '
                              'retry. The exception was:\n %(except)s.'),
                            {'retry': try_, 'total': max_tries, 'except': exc})

            # Standardize input
            # For some reason, if we use the parms in an 'if' directly
            # python throws an exception.  Assigning them avoids it.
            _tries = tries
            _retry_except = retry_except
            _http_codes = http_codes
            _test_func = test_func
            _resp_checker = resp_checker
            _limit_except = limit_except
            _argmod_func = argmod_func

            if _retry_except is None:
                _retry_except = ()
            if _http_codes is None:
                _http_codes = ()
            caller_test_func = _test_func is not None
            if not caller_test_func:
                _test_func = NO_TEST
            if _resp_checker is None:
                _resp_checker = NO_CHECKER
            # Start retries
            for try_ in moves.range(1, _tries+1):
                try:
                    resp = func(*args, **kwds)
                    # No exception raised, call the response checker
                    # If we're on the last iteration, we return the response.
                    # The response checker should raise an exception if
                    # it doesn't want this behavior.
                    if (not _resp_checker(resp, try_, _tries, *args, **kwds)
                            or try_ == _tries):
                        return resp
                except exc.HttpError as e:
                    if caller_test_func or e.response.status in _http_codes:
                        _test_retry(e)
                        _log_response_retry(try_, _tries, e.response.reqpath,
                                            e.response.status)
                    else:
                        _raise_exc()
                except _retry_except as e:
                    _test_retry(e)
                    _log_exception_retry(try_, _tries, e)
                # If we get here then we're going to retry
                delay_func(try_, _tries, *args, **kwds)
                # Adjust arguments if necessary
                args, kwds = _argmod_func(try_, _tries, *args, **kwds)
        return __retry
    return _retry

Example 49

Project: pyafipws Source File: utils.py
Function: inicializar_y_capturar_excepciones
def inicializar_y_capturar_excepciones(func):
    "Decorador para inicializar y capturar errores (version para webservices)"
    @functools.wraps(func)
    def capturar_errores_wrapper(self, *args, **kwargs):
        try:
            # inicializo (limpio variables)
            self.Errores = []           # listas de str para lenguajes legados
            self.Observaciones = []
            self.errores = []           # listas de dict para usar en python
            self.observaciones = []
            self.Eventos = []
            self.Traceback = self.Excepcion = ""
            self.ErrCode = self.ErrMsg = self.Obs = ""
            # limpio variables especificas del webservice:
            self.inicializar()
            # actualizo los parámetros
            kwargs.update(self.params_in)
            # limpio los parámetros
            self.params_in = {}
            self.params_out = {}
            # llamo a la función (con reintentos)
            retry = self.reintentos + 1
            while retry:
                try:
                    retry -= 1
                    return func(self, *args, **kwargs)
                except socket.error, e:
                    if e[0] not in (10054, 10053):
                        # solo reintentar si el error es de conexión
                        # (10054, 'Connection reset by peer')
                        # (10053, 'Software caused connection abort')
                        raise
                    else:
                        if DEBUG: print e, "Reintentando..."
                        self.log(exception_info().get("msg", ""))

        except SoapFault, e:
            # guardo destalle de la excepción SOAP
            self.ErrCode = unicode(e.faultcode)
            self.ErrMsg = unicode(e.faultstring)
            self.Excepcion = u"%s: %s" % (e.faultcode, e.faultstring, )
            if self.LanzarExcepciones:
                raise
        except Exception, e:
            ex = exception_info()
            self.Traceback = ex.get("tb", "")
            try:
                self.Excepcion = ex.get("msg", "")
            except:
                self.Excepcion = u"<no disponible>"
            if self.LanzarExcepciones:
                raise
            else:
                self.ErrMsg = self.Excepcion
        finally:
            # guardo datos de depuración
            if self.client:
                self.XmlRequest = self.client.xml_request
                self.XmlResponse = self.client.xml_response
    return capturar_errores_wrapper

Example 50

Project: ipopo Source File: utilities.py
def SynchronizedClassMethod(*locks_attr_names, **kwargs):
    """
    A synchronizer decorator for class methods. An AttributeError can be raised
    at runtime if the given lock attribute doesn't exist or if it is None.

    If a parameter ``sorted`` is found in ``kwargs`` and its value is True,
    then the list of locks names will be sorted before locking.

    :param locks_attr_names: A list of the lock(s) attribute(s) name(s) to be
                             used for synchronization
    :return: The decorator method, surrounded with the lock
    """
    # Filter the names (remove empty ones)
    locks_attr_names = [lock_name
                        for lock_name in locks_attr_names
                        if lock_name]

    if not locks_attr_names:
        raise ValueError("The lock names list can't be empty")

    if 'sorted' not in kwargs or kwargs['sorted']:
        # Sort the lock names if requested
        # (locking always in the same order reduces the risk of dead lock)
        locks_attr_names = list(locks_attr_names)
        locks_attr_names.sort()

    def wrapped(method):
        """
        The wrapping method

        :param method: The wrapped method
        :return: The wrapped method
        :raise AttributeError: The given attribute name doesn't exist
        """
        @functools.wraps(method)
        def synchronized(self, *args, **kwargs):
            """
            Calls the wrapped method with a lock
            """
            # Raises an AttributeError if needed
            locks = [getattr(self, attr_name)
                     for attr_name in locks_attr_names]
            locked = collections.deque()
            i = 0

            try:
                # Lock
                for lock in locks:
                    if lock is None:
                        # No lock...
                        raise AttributeError(
                            "Lock '{0}' can't be None in class {1}"
                            .format(locks_attr_names[i], type(self).__name__))

                    # Get the lock
                    i += 1
                    lock.acquire()
                    locked.appendleft(lock)

                # Use the method
                return method(self, *args, **kwargs)

            finally:
                # Unlock what has been locked in all cases
                for lock in locked:
                    lock.release()

                locked.clear()
                del locks[:]

        return synchronized

    # Return the wrapped method
    return wrapped
See More Examples - Go to Next Page
Page 1 Selected Page 2 Page 3