sys.modules

Here are the examples of the python api sys.modules taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

125 Examples 7

Example 101

Project: luci-py Source File: isolateserver_load_test.py
def main():
  colorama.init()

  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
  parser.add_option(
      '-I', '--isolate-server',
      metavar='URL', default='',
      help='Isolate server to use')
  parser.add_option(
      '--namespace', default='temporary%d-gzip' % time.time(), metavar='XX',
      help='Namespace to use on the server, default: %default')
  parser.add_option(
      '--threads', type='int', default=16, metavar='N',
      help='Parallel worker threads to use, default:%default')

  data_group = optparse.OptionGroup(parser, 'Amount of data')
  graph.unit_option(
      data_group, '--items', default=0, help='Number of items to upload')
  graph.unit_option(
      data_group, '--max-size', default=0,
      help='Loop until this amount of data was transferred')
  graph.unit_option(
      data_group, '--mid-size', default=100*1024,
      help='Rough average size of each item, default:%default')
  parser.add_option_group(data_group)

  ui_group = optparse.OptionGroup(parser, 'Result histogram')
  ui_group.add_option(
      '--columns', type='int', default=graph.get_console_width(), metavar='N',
      help='Width of histogram, default:%default')
  ui_group.add_option(
      '--buckets', type='int', default=20, metavar='N',
      help='Number of histogram\'s buckets, default:%default')
  parser.add_option_group(ui_group)

  log_group = optparse.OptionGroup(parser, 'Logging')
  log_group.add_option(
      '--dump', metavar='FOO.JSON', help='Dumps to json file')
  log_group.add_option(
      '-v', '--verbose', action='store_true', help='Enable logging')
  parser.add_option_group(log_group)

  options, args = parser.parse_args()

  logging.basicConfig(level=logging.INFO if options.verbose else logging.FATAL)
  if args:
    parser.error('Unsupported args: %s' % args)
  if bool(options.max_size) == bool(options.items):
    parser.error(
        'Use one of --max-size or --items.\n'
        '  Use --max-size if you want to run it until NN bytes where '
        'transfered.\n'
        '  Otherwise use --items to run it for NN items.')
  options.isolate_server = options.isolate_server.rstrip('/')
  if not options.isolate_server:
    parser.error('--isolate-server is required.')

  print(
      ' - Using %d thread,  items=%d,  max-size=%d,  mid-size=%d' % (
      options.threads, options.items, options.max_size, options.mid_size))

  start = time.time()

  random_pool = Randomness()
  print(' - Generated pool after %.1fs' % (time.time() - start))

  columns = [('index', 0), ('data', 0), ('size', options.items)]
  progress = Progress(columns)
  storage = isolateserver.get_storage(options.isolate_server, options.namespace)
  do_item = functools.partial(
      send_and_receive,
      random_pool,
      storage,
      progress)

  # TODO(maruel): Handle Ctrl-C should:
  # - Stop adding tasks.
  # - Stop scheduling tasks in ThreadPool.
  # - Wait for the remaining ungoing tasks to complete.
  # - Still print details and write the json file.
  with threading_utils.ThreadPoolWithProgress(
      progress, options.threads, options.threads, 0) as pool:
    if options.items:
      for _ in xrange(options.items):
        pool.add_task(0, do_item, gen_size(options.mid_size))
        progress.print_update()
    elif options.max_size:
      # This one is approximate.
      total = 0
      while True:
        size = gen_size(options.mid_size)
        progress.update_item('', size=1)
        progress.print_update()
        pool.add_task(0, do_item, size)
        total += size
        if total >= options.max_size:
          break
    results = sorted(pool.join())

  print('')
  print(' - Took %.1fs.' % (time.time() - start))
  print('')
  print_results(results, options.columns, options.buckets)
  if options.dump:
    with open(options.dump, 'w') as f:
      json.dump(results, f, separators=(',',':'))
  return 0

Example 102

Project: luci-py Source File: swarming_load_test_client.py
def main():
  colorama.init()
  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
  parser.add_option(
      '-S', '--swarming',
      metavar='URL', default='',
      help='Swarming server to use')
  swarming.add_filter_options(parser)
  parser.set_defaults(dimensions=[('os', swarming_load_test_bot.OS_NAME)])

  group = optparse.OptionGroup(parser, 'Load generated')
  group.add_option(
      '-s', '--send-rate', type='float', default=16., metavar='RATE',
      help='Rate (item/s) of sending requests as a float, default: %default')
  group.add_option(
      '-D', '--duration', type='float', default=60., metavar='N',
      help='Duration (s) of the sending phase of the load test, '
           'default: %default')
  group.add_option(
      '-m', '--concurrent', type='int', default=200, metavar='N',
      help='Maximum concurrent on-going requests, default: %default')
  group.add_option(
      '-t', '--timeout', type='float', default=15*60., metavar='N',
      help='Task expiration and timeout to get results, the task itself will '
           'have %ds less than the value provided. Default: %%default' %
               TIMEOUT_OVERHEAD)
  group.add_option(
      '-o', '--output-size', type='int', default=100, metavar='N',
      help='Bytes sent to stdout, default: %default')
  group.add_option(
      '--sleep', type='int', default=60, metavar='N',
      help='Amount of time the bot should sleep, e.g. faking work, '
           'default: %default')
  parser.add_option_group(group)

  group = optparse.OptionGroup(parser, 'Display options')
  group.add_option(
      '--columns', type='int', default=graph.get_console_width(), metavar='N',
      help='For histogram display, default:%default')
  group.add_option(
      '--buckets', type='int', default=20, metavar='N',
      help='Number of buckets for histogram display, default:%default')
  parser.add_option_group(group)

  parser.add_option(
      '--dump', metavar='FOO.JSON', help='Dumps to json file')
  parser.add_option(
      '-v', '--verbose', action='store_true', help='Enables logging')

  options, args = parser.parse_args()
  logging.basicConfig(level=logging.INFO if options.verbose else logging.FATAL)
  if args:
    parser.error('Unsupported args: %s' % args)
  options.swarming = options.swarming.rstrip('/')
  if not options.swarming:
    parser.error('--swarming is required.')
  if options.duration <= 0:
    parser.error('Needs --duration > 0. 0.01 is a valid value.')
  swarming.process_filter_options(parser, options)

  total = int(round(options.send_rate * options.duration))
  print(
      'Sending %.1f i/s for %ds with max %d parallel requests; timeout %.1fs; '
      'total %d' %
        (options.send_rate, options.duration, options.concurrent,
        options.timeout, total))
  print('[processing/processed/todo]')

  # This is used so there's no clash between runs and actual real usage.
  unique = ''.join(random.choice(string.ascii_letters) for _ in range(8))
  columns = [('processing', 0), ('processed', 0), ('todo', 0)]
  progress = threading_utils.Progress(columns)
  index = 0
  results = []
  with threading_utils.ThreadPoolWithProgress(
      progress, 1, options.concurrent, 0) as pool:
    try:
      start = time.time()
      while True:
        duration = time.time() - start
        if duration > options.duration:
          break
        should_have_triggered_so_far = int(round(duration * options.send_rate))
        while index < should_have_triggered_so_far:
          pool.add_task(
              0,
              trigger_task,
              options.swarming,
              options.dimensions,
              options.sleep,
              options.output_size,
              progress,
              unique,
              options.timeout,
              index)
          progress.update_item('', todo=1)
          index += 1
          progress.print_update()
        time.sleep(0.01)
      progress.update_item('Getting results for on-going tasks.', raw=True)
      for i in pool.iter_results():
        results.append(i)
        # This is a bit excessive but it's useful in the case where some tasks
        # hangs, so at least partial data is available.
        if options.dump:
          results.sort()
          if os.path.exists(options.dump):
            os.rename(options.dump, options.dump + '.old')
          with open(options.dump, 'wb') as f:
            json.dump(results, f, separators=(',',':'))
      if not options.dump:
        results.sort()
    except KeyboardInterrupt:
      aborted = pool.abort()
      progress.update_item(
          'Got Ctrl-C. Aborted %d unsent tasks.' % aborted,
          raw=True,
          todo=-aborted)
      progress.print_update()
  progress.print_update()
  # At this point, progress is not used anymore.
  print('')
  print(' - Took %.1fs.' % (time.time() - start))
  print('')
  print_results(results, options.columns, options.buckets)
  return 0

Example 103

Project: AoikHotkey Source File: aoikimportutil.py
def import_module_by_name(
    mod_name,
    ns_dir=None,
    sys_use=True,
    sys_add=True,
):
    """Import a module by module name.

    @param mod_name: module name in Python namespace.

    @param ns_dir: load from which namespace dir.
    Namespace dir means the dir is considered as if it's in |sys.path|.
    If |ns_dir| is specified, only load from that dir.
    Otherwise load from any namespace dirs in |sys.path|.
    """
    #
    if ns_dir is None:
        #
        try:
            mod_obj_old = sys.modules[mod_name]
        except KeyError:
            mod_obj_old = None

        #
        if sys_use:
            #
            if mod_obj_old is not None:
                return mod_obj_old

        # 3pRKQd1
        # if not want to use existing module in "sys.modules", need re-import
        # by calling "__import__" at 2eys2rL. But "__import__" will return
        # existing module in "sys.modules", so we must delete existing module
        # before calling "__import__".
        else:
            #
            try:
                del sys.modules[mod_name]
            except KeyError:
                pass

        #
        try:
            # 2eys2rL
            __import__(mod_name)
            # raise ImportError if the module not exists.
            # raise any error from the imported module.
        except Exception:
            #
            if mod_obj_old is not None:
                # restore to "sys.modules" the old module deleted at 3pRKQd1
                sys.modules[mod_name] = mod_obj_old

            #
            raise

        #
        mod_obj = sys.modules[mod_name]

        #
        if not sys_add:
            #
            par_mod = None

            rdot_idx = mod_name.rfind('.')

            if rdot_idx != -1:
                #
                par_mod_name = mod_name[0:rdot_idx]

                mod_sname = mod_name[rdot_idx + 1:]

                # can None
                par_mod = sys.modules.get(par_mod_name, None)

            #
            if mod_obj_old is not None:
                # restore to "sys.modules" the old module deleted at 3pRKQd1
                sys.modules[mod_name] = mod_obj_old

                # restore to parent module's attribute the old module deleted
                # at 3pRKQd1
                if par_mod is not None \
                        and getattr(par_mod, mod_sname, None) is mod_obj:
                    try:
                        setattr(par_mod, mod_sname, mod_obj_old)
                    except AttributeError:
                        pass
            #
            else:
                # delete from "sys.modules" the module newly loaded at 2eys2rL.
                try:
                    del sys.modules[mod_name]
                except KeyError:
                    pass

                #
                if par_mod is not None \
                        and getattr(par_mod, mod_sname, None) is mod_obj:
                    # delete from parent module's attribute the module
                    # newly loaded at 2eys2rL.
                    try:
                        delattr(par_mod, mod_sname)
                    except AttributeError:
                        pass

        #
        return mod_obj

    #
    # assert ns_dir is not None

    #
    mod_file_name_s = mod_name.split('.')
    # |file_name| means the bare name, without extension.
    #
    # E.g. 'a.b.c' to ['a', 'b', 'c']

    #
    parent_mod_name = ''  # change in each iteration below

    mod_file_dir = ns_dir  # change in each iteration below

    for mod_file_name in mod_file_name_s:
        #
        if parent_mod_name == '':
            parent_mod_obj = None

            mod_name = mod_file_name
        else:
            parent_mod_obj = sys.modules[parent_mod_name]

            mod_name = parent_mod_name + '.' + mod_file_name

        #
        if parent_mod_obj:
            __import__(mod_name)

            mod_obj = sys.modules[mod_name]
        else:
            file_handle = None

            try:
                #
                tup = imp.find_module(mod_file_name, [mod_file_dir])
                # raise ImportError

                #
                mod_obj = imp.load_module(mod_name, *tup)
                # raise any error from the imported module.

                #
                file_handle = tup[0]
            finally:
                if file_handle is not None:
                    file_handle.close()

        #
        parent_mod_name = mod_name

        mod_file_dir = os.path.join(mod_file_dir, mod_file_name)

    #
    return mod_obj

Example 104

Project: bloodhound Source File: loader.py
Function: get_plugin_info
def get_plugin_info(env, include_core=False):
    """Return package information about Trac core and installed plugins."""
    path_sources = {}
    
    def find_distribution(module):
        name = module.__name__
        path = get_module_path(module)
        sources = path_sources.get(path)
        if sources is None:
            sources = path_sources[path] = get_sources(path)
        dist = sources.get(name.replace('.', '/') + '.py')
        if dist is None:
            dist = sources.get(name.replace('.', '/') + '/__init__.py')
        if dist is None:
            # This is a plain Python source file, not an egg
            dist = pkg_resources.Distribution(project_name=name,
                                              version='',
                                              location=module.__file__)
        return dist
        
    plugins_dir = get_plugins_dir(env)
    plugins = {}
    from trac.core import ComponentMeta
    for component in ComponentMeta._components:
        module = sys.modules[component.__module__]

        dist = find_distribution(module)
        plugin_filename = None
        if os.path.realpath(os.path.dirname(dist.location)) == plugins_dir:
            plugin_filename = os.path.basename(dist.location)

        if dist.project_name not in plugins:
            readonly = True
            if plugin_filename and os.access(dist.location,
                                             os.F_OK + os.W_OK):
                readonly = False
            # retrieve plugin metadata
            info = get_pkginfo(dist)
            if not info:
                info = {}
                for k in ('author', 'author_email', 'home_page', 'url',
                          'license', 'trac'):
                    v = getattr(module, k, '')
                    if v and isinstance(v, basestring):
                        if k == 'home_page' or k == 'url':
                            k = 'home_page'
                            v = v.replace('$', '').replace('URL: ', '') 
                        else:
                            v = to_unicode(v)
                        info[k] = v
            else:
                # Info found; set all those fields to "None" that have the 
                # value "UNKNOWN" as this is the value for fields that
                # aren't specified in "setup.py"
                for k in info:
                    if info[k] == 'UNKNOWN':
                        info[k] = ''
                    else:
                        # Must be encoded as unicode as otherwise Genshi 
                        # may raise a "UnicodeDecodeError".
                        info[k] = to_unicode(info[k])

            # retrieve plugin version info
            version = dist.version
            if not version:
                version = (getattr(module, 'version', '') or
                           getattr(module, 'revision', ''))
                # special handling for "$Rev$" strings
                version = version.replace('$', '').replace('Rev: ', 'r') 
            plugins[dist.project_name] = {
                'name': dist.project_name, 'version': version,
                'path': dist.location, 'plugin_filename': plugin_filename,
                'readonly': readonly, 'info': info, 'modules': {},
            }
        modules = plugins[dist.project_name]['modules']
        if module.__name__ not in modules:
            summary, description = get_doc(module)
            plugins[dist.project_name]['modules'][module.__name__] = {
                'summary': summary, 'description': description,
                'components': {},
            }
        full_name = module.__name__ + '.' + component.__name__
        summary, description = get_doc(component)
        c = component
        if c in env and not issubclass(c, env.__class__):
            c = component(env)
        modules[module.__name__]['components'][component.__name__] = {
            'full_name': full_name,
            'summary': summary, 'description': description,
            'enabled': env.is_component_enabled(component),
            'required': getattr(c, 'required', False),
        }
    if not include_core:
        for name in plugins.keys():
            if name.lower() == 'trac':
                plugins.pop(name)
    return sorted(plugins.itervalues(),
                  key=lambda p: (p['name'].lower() != 'trac',
                                 p['name'].lower()))

Example 105

Project: geraldo Source File: base.py
    def __new__(cls, name, bases, attrs):
        super_new = super(ModelBase, cls).__new__
        parents = [b for b in bases if isinstance(b, ModelBase)]
        if not parents:
            # If this isn't a subclass of Model, don't do anything special.
            return super_new(cls, name, bases, attrs)

        # Create the class.
        module = attrs.pop('__module__')
        new_class = super_new(cls, name, bases, {'__module__': module})
        attr_meta = attrs.pop('Meta', None)
        abstract = getattr(attr_meta, 'abstract', False)
        if not attr_meta:
            meta = getattr(new_class, 'Meta', None)
        else:
            meta = attr_meta
        base_meta = getattr(new_class, '_meta', None)

        if getattr(meta, 'app_label', None) is None:
            # Figure out the app_label by looking one level up.
            # For 'django.contrib.sites.models', this would be 'sites'.
            model_module = sys.modules[new_class.__module__]
            kwargs = {"app_label": model_module.__name__.split('.')[-2]}
        else:
            kwargs = {}

        new_class.add_to_class('_meta', Options(meta, **kwargs))
        if not abstract:
            new_class.add_to_class('DoesNotExist',
                    subclass_exception('DoesNotExist', ObjectDoesNotExist, module))
            new_class.add_to_class('MultipleObjectsReturned',
                    subclass_exception('MultipleObjectsReturned', MultipleObjectsReturned, module))
            if base_meta and not base_meta.abstract:
                # Non-abstract child classes inherit some attributes from their
                # non-abstract parent (unless an ABC comes before it in the
                # method resolution order).
                if not hasattr(meta, 'ordering'):
                    new_class._meta.ordering = base_meta.ordering
                if not hasattr(meta, 'get_latest_by'):
                    new_class._meta.get_latest_by = base_meta.get_latest_by

        old_default_mgr = None
        if getattr(new_class, '_default_manager', None):
            # We have a parent who set the default manager.
            if new_class._default_manager.model._meta.abstract:
                old_default_mgr = new_class._default_manager
            new_class._default_manager = None

        # Bail out early if we have already created this class.
        m = get_model(new_class._meta.app_label, name, False)
        if m is not None:
            return m

        # Add all attributes to the class.
        for obj_name, obj in attrs.items():
            new_class.add_to_class(obj_name, obj)

        # Do the appropriate setup for any model parents.
        o2o_map = dict([(f.rel.to, f) for f in new_class._meta.local_fields
                if isinstance(f, OneToOneField)])
        for base in parents:
            if not hasattr(base, '_meta'):
                # Things without _meta aren't functional models, so they're
                # uninteresting parents.
                continue
            if not base._meta.abstract:
                if base in o2o_map:
                    field = o2o_map[base]
                    field.primary_key = True
                    new_class._meta.setup_pk(field)
                else:
                    attr_name = '%s_ptr' % base._meta.module_name
                    field = OneToOneField(base, name=attr_name,
                            auto_created=True, parent_link=True)
                    new_class.add_to_class(attr_name, field)
                new_class._meta.parents[base] = field
            else:
                # The abstract base class case.
                names = set([f.name for f in new_class._meta.local_fields + new_class._meta.many_to_many])
                for field in base._meta.local_fields + base._meta.local_many_to_many:
                    if field.name in names:
                        raise FieldError('Local field %r in class %r clashes with field of similar name from abstract base class %r'
                                % (field.name, name, base.__name__))
                    new_class.add_to_class(field.name, copy.deepcopy(field))

        if abstract:
            # Abstract base models can't be instantiated and don't appear in
            # the list of models for an app. We do the final setup for them a
            # little differently from normal models.
            attr_meta.abstract = False
            new_class.Meta = attr_meta
            return new_class

        if old_default_mgr and not new_class._default_manager:
            new_class._default_manager = old_default_mgr._copy_to_model(new_class)
        new_class._prepare()
        register_models(new_class._meta.app_label, new_class)

        # Because of the way imports happen (recursively), we may or may not be
        # the first time this model tries to register with the framework. There
        # should only be one class for each model, so we always return the
        # registered version.
        return get_model(new_class._meta.app_label, name, False)

Example 106

Project: backtrader Source File: plot.py
Function: plot
    def plot(self, strategy, figid=0, numfigs=1, iplot=True, useplotly=False):
        # pfillers={}):
        if not strategy.datas:
            return

        if not len(strategy):
            return

        if iplot:
            if 'ipykernel' in sys.modules:
                matplotlib.use('nbagg')

        # this import must not happen before matplotlib.use
        import matplotlib.pyplot as mpyplot
        self.mpyplot = mpyplot

        self.pinf = PInfo(self.p.scheme)
        self.sortdataindicators(strategy)
        self.calcrows(strategy)

        slen = len(strategy)
        d, m = divmod(slen, numfigs)
        pranges = list()
        for i in range(numfigs):
            a = d * i
            if i == (numfigs - 1):
                d += m  # add remainder to last stint
            b = a + d

            pranges.append([a, b, d])

        figs = []

        for numfig in range(numfigs):
            # prepare a figure
            fig = self.pinf.newfig(figid, numfig, self.mpyplot)
            figs.append(fig)

            self.pinf.pstart, self.pinf.pend, self.pinf.psize = pranges[numfig]
            self.pinf.xstart = self.pinf.pstart
            self.pinf.xend = self.pinf.pend

            self.pinf.clock = strategy
            self.pinf.xreal = self.pinf.clock.datetime.plot(
                self.pinf.pstart, self.pinf.psize)
            self.pinf.xlen = len(self.pinf.xreal)
            self.pinf.x = list(range(self.pinf.xlen))
            # self.pinf.pfillers = {None: []}
            # for key, val in pfillers.items():
            #     pfstart = bisect.bisect_left(val, self.pinf.pstart)
            #     pfend = bisect.bisect_right(val, self.pinf.pend)
            #     self.pinf.pfillers[key] = val[pfstart:pfend]

            # Do the plotting
            # Things that go always at the top (observers)
            self.pinf.xdata = self.pinf.x
            for ptop in self.dplotstop:
                self.plotind(None, ptop, subinds=self.dplotsover[ptop])

            # Create the rest on a per data basis
            dt0, dt1 = self.pinf.xreal[0], self.pinf.xreal[-1]
            for data in strategy.datas:
                self.pinf.xdata = self.pinf.x
                if len(data) < self.pinf.xlen:
                    self.pinf.xdata = xdata = []
                    xreal = self.pinf.xreal
                    dts = data.datetime.plot()
                    for dt in (x for x in dts if dt0 <= x <= dt1):
                        dtidx = bisect.bisect_left(xreal, dt)
                        xdata.append(dtidx)

                for ind in self.dplotsup[data]:
                    self.plotind(
                        data,
                        ind,
                        subinds=self.dplotsover[ind],
                        upinds=self.dplotsup[ind],
                        downinds=self.dplotsdown[ind])

                self.plotdata(data, self.dplotsover[data])

                for ind in self.dplotsdown[data]:
                    self.plotind(
                        data,
                        ind,
                        subinds=self.dplotsover[ind],
                        upinds=self.dplotsup[ind],
                        downinds=self.dplotsdown[ind])

            cursor = MultiCursor(
                fig.canvas, list(self.pinf.daxis.values()),
                useblit=True,
                horizOn=True, vertOn=True,
                horizMulti=False, vertMulti=True,
                horizShared=True, vertShared=False,
                color='black', lw=1, ls=':')

            self.pinf.cursors.append(cursor)

            # Put the subplots as indicated by hspace
            fig.subplots_adjust(hspace=self.pinf.sch.plotdist,
                                top=0.98, left=0.05, bottom=0.05, right=0.95)

            laxis = list(self.pinf.daxis.values())

            # Find last axis which is not a twinx (date locator fails there)
            i = -1
            while True:
                lastax = laxis[i]
                if lastax not in self.pinf.vaxis:
                    break

                i -= 1

            self.setlocators(lastax)  # place the locators/fmts

            # Applying fig.autofmt_xdate if the data axis is the last one
            # breaks the presentation of the date labels. why?
            # Applying the manual rotation with setp cures the problem
            # but the labels from all axis but the last have to be hidden
            for ax in laxis:
                self.mpyplot.setp(ax.get_xticklabels(), visible=False)

            self.mpyplot.setp(lastax.get_xticklabels(), visible=True,
                              rotation=self.pinf.sch.tickrotation)

            # Things must be tight along the x axis (to fill both ends)
            axtight = 'x' if not self.pinf.sch.ytight else 'both'
            self.mpyplot.autoscale(enable=True, axis=axtight, tight=True)

        return figs

Example 107

Project: META-SHARE Source File: views.py
def usagestats (request):
    """  Get usage of fields LR """        
    expand_all = request.POST.get('expandall')    
    selected_class = request.POST.get('class', "")
    selected_field = request.POST.get('field', "")
    selected_model = request.POST.get('model', "")
    selected_filters = request.POST.getlist('filter')

    errors = None
    textvalues = []
    if selected_class != "" and selected_field != "":
        resultset = UsageStats.objects.values('text') \
            .filter(elparent=selected_class.replace("Type_model",""), \
                elname=selected_field.replace("Type_model","")) \
            .annotate(Count('elname'), Sum('count')) \
            .order_by('-elname__count')
        if len(resultset) > 0:
            for item in resultset:
                text = item["text"]
                if selected_field in NOACCESS_FIELDS:
                    text = "<HIDDEN VALUE>"   
                textvalues.append([text, item['elname__count'], item['count__sum']])

    #published resource counter
    lrset = resourceInfoType_model.objects.filter(
        storage_object__publication_status=PUBLISHED,
        storage_object__deleted=False)

    usage_filter = {"required": 0, "optional": 0, "recommended": 0, "never used": 0, "at least one": 0}
    usage_fields = {}
    usagedata = {}    
    usageset = UsageStats.objects.values('elparent','elname').annotate(Count('lrid', distinct=True), \
        Sum('count')).order_by('elparent', '-lrid__count', 'elname')        
    if (len(usageset) > 0):
        for item in usageset:
            usagedata[item['elparent'] +" "+ item['elname']] = [item['count__sum'], item['lrid__count']]                    
    
    _models = [x for x in dir(sys.modules['metashare.repository.models']) if x.endswith('_model')]
    mod = import_module("metashare.repository.models")
    _fields = {}
    _classes = {}
    for _model in _models:
        # get info about classes
        dbfields = getattr(mod, _model).__schema_classes__
        for _class in dbfields:
            _classes[_class] = dbfields[_class]
            
        # get info about fields
        dbfields = getattr(mod, _model).__schema_fields__
        for _component, _field, _required in dbfields:
            verbose_name = None
            model_name = _model
            if "/" in _component:
                items = _component.split("/")
                model_name = items[0]
                _component = items[1]
                _field = items[1]
                verbose_name = model_name + "/" + _component
                if _component in _classes:
                    verbose_name = model_name + "/" + eval(u'{0}._meta.verbose_name'.format(_classes[_component]))
                verbose_name = verbose_name.replace("string_model","")
           
            component_name = eval(u'{0}._meta.verbose_name'.format(_model))
            metaname = model_name +" "+ _component
            if _component in _classes and _field != "docuementUnstructured":
                style = "component"
                metadata_type = model_name
                if not metaname in _fields:
                    if not verbose_name:
                        if not "_set" in _field:
                            verbose_name = eval(u'{0}._meta.get_field("{1}").verbose_name'.format(_model, _field))
                            class_name = eval(u'{0}._meta.verbose_name'.format(_classes[_component]))
                            if verbose_name != class_name:
                                verbose_name = verbose_name + " [" + class_name +"]"
                                style = "instance"
                                metadata_type = _component
                        else:
                            verbose_name = eval(u'{0}._meta.verbose_name'.format(_classes[_component]))
                    added = _add_usage_meta(usage_fields, component_name, _classes[_component], verbose_name, \
                        _required, "component", metadata_type, usagedata.get(metaname.replace("Type_model",""), None), selected_filters, usage_filter)
            
                    # add the sub metadata fields
                    if added and style == "instance":
                        instance_dbfields = getattr(mod, _classes[_component]).__schema_fields__
                        for _icomponent, _ifield, _irequired in instance_dbfields:
                            if not _icomponent in _classes:
                                verbose_name = eval(u'{0}._meta.get_field("{1}").verbose_name'.format(_classes[_component], _ifield))
                                metaname = metadata_type +" "+ _ifield
                                _add_usage_meta(usage_fields, component_name, \
                                    _ifield, verbose_name, _irequired, \
                                    "ifield", metadata_type, \
                                    usagedata.get(metaname.replace("Type_model","").replace("String_model",""), None), 
                                    selected_filters, usage_filter)
                                if selected_class == metadata_type:
                                    selected_class = model_name
            else:
                if not verbose_name:
                    verbose_name = eval(u'{0}._meta.get_field("{1}").verbose_name'.format(_model, _field))
                _add_usage_meta(usage_fields, component_name, \
                    _field, verbose_name, _required, \
                    "field", model_name, \
                    usagedata.get(metaname.replace("Type_model","").replace("String_model",""), None), \
                    selected_filters, usage_filter)       
         
    fields_count = usage_filter["required"] + usage_filter["optional"] + usage_filter["recommended"]
             
    # update usage stats according with the published resources
    lr_usage = UsageStats.objects.values('lrid').distinct().count()
    if (len(lrset) != lr_usage):
        usagethread = updateUsageStats(lrset)
        if usagethread != None:
            errors = "Usage statistics updating is in progress... "+ str(usagethread.getProgress()) +"% completed"
        
    return render_to_response('stats/usagestats.html',
        {'usage_fields': sorted(usage_fields.iteritems()),
        'usage_filter': usage_filter,
        'fields_count': fields_count,
        'lr_count': lr_usage,
        'selected_filters': selected_filters,
        'selected_model': selected_model,
        'selected_class': selected_class,
        'selected_field': selected_field,
        'expand_all': expand_all,
        'textvalues': textvalues,
        'errors': errors,
        'myres': isOwner(request.user.username)},
        context_instance=RequestContext(request))

Example 108

Project: Doger Source File: Commands.py
def admin(req, arg):
	"""
	admin"""
	if len(arg):
		command = arg[0]
		arg = arg[1:]
		if command == "reload":
			for mod in arg:
				reload(sys.modules[mod])
			req.reply("Reloaded")
		elif command == "exec" and Config.config.get("enable_exec", None):
			try:
				exec(" ".join(arg).replace("$", "\n"))
			except Exception as e:
				type, value, tb = sys.exc_info()
				Logger.log("ce", "ERROR in " + req.instance + " : " + req.text)
				Logger.log("ce", repr(e))
				Logger.log("ce", "".join(traceback.format_tb(tb)))
				req.reply(repr(e))
				req.reply("".join(traceback.format_tb(tb)).replace("\n", " || "))
				del tb
		elif command == "ignore":
			Irc.ignore(arg[0], int(arg[1]))
			req.reply("Ignored")
		elif command == "die":
			for instance in Global.instances:
				Global.manager_queue.put(("Disconnect", instance))
			Global.manager_queue.join()
			Blocknotify.stop()
			Global.manager_queue.put(("Die",))
		elif command == "restart":
			for instance in Global.instances:
				Global.manager_queue.put(("Disconnect", instance))
			Global.manager_queue.join()
			Blocknotify.stop()
			os.execv(sys.executable, [sys.executable] + sys.argv)
		elif command == "manager":
			for cmd in arg:
				Global.manager_queue.put(cmd.split("$"))
			req.reply("Sent")
		elif command == "raw":
			Irc.instance_send(req.instance, eval(" ".join(arg)))
		elif command == "config":
			if arg[0] == "save":
				os.rename("Config.py", "Config.py.bak")
				with open("Config.py", "w") as f:
					f.write("config = " + pprint.pformat(Config.config) + "\n")
				req.reply("Done")
			elif arg[0] == "del":
				exec("del Config.config " + " ".join(arg[1:]))
				req.reply("Done")
			else:
				try:
					req.reply(repr(eval("Config.config " + " ".join(arg))))
				except SyntaxError:
					exec("Config.config " + " ".join(arg))
					req.reply("Done")
		elif command == "join":
			Irc.instance_send(req.instance, ("JOIN", arg[0]), priority = 0.1)
		elif command == "part":
			Irc.instance_send(req.instance, ("PART", arg[0]), priority = 0.1)
		elif command == "caches":
			acsize = 0
			accached = 0
			with Global.account_lock:
				for channel in Global.account_cache:
					for user in Global.account_cache[channel]:
						acsize += 1
						if Global.account_cache[channel][user] != None:
							accached += 1
			acchannels = len(Global.account_cache)
			whois = " OK"
			whoisok = True
			for instance in Global.instances:
				tasks = Global.instances[instance].whois_queue.unfinished_tasks
				if tasks:
					if whoisok:
						whois = ""
						whoisok = False
					whois += " %s:%d!" % (instance, tasks)
			req.reply("Account caches: %d user-channels (%d cached) in %d channels; Whois queues:%s" % (acsize, accached, acchannels, whois))
		elif command == "channels":
			inss = ""
			for instance in Global.instances:
				chans = []
				with Global.account_lock:
					for channel in Global.account_cache:
						if instance in Global.account_cache[channel]:
							chans.append(channel)
				inss += " %s:%s" % (instance, ",".join(chans))
			req.reply("Instances:" + inss)
		elif command == "balances":
			database, dogecoind = Transactions.balances()
			req.reply("Dogecoind: %.8f; Database: %.8f" % (dogecoind, database))
		elif command == "blocks":
			info, hashd = Transactions.get_info()
			hashb = Transactions.lastblock.encode("ascii")
			req.reply("Best block: " + hashd + ", Last tx block: " + hashb + ", Blocks: " + str(info.blocks) + ", Testnet: " + str(info.testnet))
		elif command == "lock":
			if len(arg) > 1:
				if arg[1] == "on":
					Transactions.lock(arg[0], True)
				elif arg[1] == "off":
					Transactions.lock(arg[0], False)
				req.reply("Done")
			elif len(arg):
				req.reply("locked" if Transactions.lock(arg[0]) else "not locked")
		elif command == "ping":
			t = time.time()
			Irc.account_names(["."])
			pingtime = time.time() - t
			acc = Irc.account_names([req.nick])[0]
			t = time.time()
			Transactions.balance(acc)
			dbreadtime = time.time() - t
			t = time.time()
			Transactions.lock(acc, False)
			dbwritetime = time.time() - t
			t = time.time()
			Transactions.ping()
			rpctime = time.time() - t
			req.reply("Ping: %f, DB read: %f, DB write: %f, RPC: %f" % (pingtime, dbreadtime, dbwritetime, rpctime))

Example 109

Project: AWS-Lambda-ML-Microservice-Skeleton Source File: build_tools.py
def build_extension(module_path,compiler_name='',build_dir=None,
                    temp_dir=None, verbose=0, **kw):
    """ Build the file given by module_path into a Python extension module.

        build_extensions uses distutils to build Python extension modules.
        kw arguments not used are passed on to the distutils extension
        module.  Directory settings can handle absoulte settings, but don't
        currently expand '~' or environment variables.

        module_path   -- the full path name to the c file to compile.
                         Something like:  /full/path/name/module_name.c
                         The name of the c/c++ file should be the same as the
                         name of the module (i.e. the initmodule() routine)
        compiler_name -- The name of the compiler to use.  On Windows if it
                         isn't given, MSVC is used if it exists (is found).
                         gcc is used as a second choice. If neither are found,
                         the default distutils compiler is used. Acceptable
                         names are 'gcc', 'msvc' or any of the compiler names
                         shown by distutils.ccompiler.show_compilers()
        build_dir     -- The location where the resulting extension module
                         should be placed. This location must be writable.  If
                         it isn't, several default locations are tried.  If the
                         build_dir is not in the current python path, a warning
                         is emitted, and it is added to the end of the path.
                         build_dir defaults to the current directory.
        temp_dir      -- The location where temporary files (*.o or *.obj)
                         from the build are placed. This location must be
                         writable.  If it isn't, several default locations are
                         tried.  It defaults to tempfile.gettempdir()
        verbose       -- 0, 1, or 2.  0 is as quiet as possible. 1 prints
                         minimal information.  2 is noisy.
        **kw          -- keyword arguments. These are passed on to the
                         distutils extension module.  Most of the keywords
                         are listed below.

        Distutils keywords.  These are cut and pasted from Greg Ward's
        distutils.extension.Extension class for convenience:

        sources : [string]
          list of source filenames, relative to the distribution root
          (where the setup script lives), in Unix form (slash-separated)
          for portability.  Source files may be C, C++, SWIG (.i),
          platform-specific resource files, or whatever else is recognized
          by the "build_ext" command as source for a Python extension.
          Note: The module_path file is always appended to the front of this
                list
        include_dirs : [string]
          list of directories to search for C/C++ header files (in Unix
          form for portability)
        define_macros : [(name : string, value : string|None)]
          list of macros to define; each macro is defined using a 2-tuple,
          where 'value' is either the string to define it to or None to
          define it without a particular value (equivalent of "#define
          FOO" in source or -DFOO on Unix C compiler command line)
        undef_macros : [string]
          list of macros to undefine explicitly
        library_dirs : [string]
          list of directories to search for C/C++ libraries at link time
        libraries : [string]
          list of library names (not filenames or paths) to link against
        runtime_library_dirs : [string]
          list of directories to search for C/C++ libraries at run time
          (for shared extensions, this is when the extension is loaded)
        extra_objects : [string]
          list of extra files to link with (eg. object files not implied
          by 'sources', static library that must be explicitly specified,
          binary resource files, etc.)
        extra_compile_args : [string]
          any extra platform- and compiler-specific information to use
          when compiling the source files in 'sources'.  For platforms and
          compilers where "command line" makes sense, this is typically a
          list of command-line arguments, but for other platforms it could
          be anything.
        extra_link_args : [string]
          any extra platform- and compiler-specific information to use
          when linking object files together to create the extension (or
          to create a new static Python interpreter).  Similar
          interpretation as for 'extra_compile_args'.
        export_symbols : [string]
          list of symbols to be exported from a shared extension.  Not
          used on all platforms, and not generally necessary for Python
          extensions, which typically export exactly one symbol: "init" +
          extension_name.
    """
    success = 0
    from numpy.distutils.core import setup
    from numpy.distutils.log import set_verbosity
    set_verbosity(-1)

    # this is a screwy trick to get rid of a ton of warnings on Unix
    import distutils.sysconfig
    distutils.sysconfig.get_config_vars()
    if 'OPT' in distutils.sysconfig._config_vars:
        flags = distutils.sysconfig._config_vars['OPT']
        flags = flags.replace('-Wall','')
        distutils.sysconfig._config_vars['OPT'] = flags

    # get the name of the module and the extension directory it lives in.
    module_dir,cpp_name = os.path.split(os.path.abspath(module_path))
    module_name,ext = os.path.splitext(cpp_name)

    # configure temp and build directories
    temp_dir = configure_temp_dir(temp_dir)
    build_dir = configure_build_dir(build_dir or module_dir)

    # dag. We keep having to add directories to the path to keep
    # object files separated from each other.  gcc2.x and gcc3.x C++
    # object files are not compatible, so we'll stick them in a sub
    # dir based on their version. This will add a SHA-256 check sum
    # (truncated to 32 characters) of the compiler binary to the directory
    # name to keep objects from different compilers in different locations.

    compiler_dir = platform_info.get_compiler_dir(compiler_name)
    temp_dir = os.path.join(temp_dir,compiler_dir)
    distutils.dir_util.mkpath(temp_dir)

    compiler_name = choose_compiler(compiler_name)

    configure_sys_argv(compiler_name,temp_dir,build_dir)

    # the business end of the function
    try:
        if verbose == 1:
            print('Compiling code...')

        # set compiler verboseness 2 or more makes it output results
        if verbose > 1:
            verb = 1
        else:
            verb = 0

        t1 = time.time()
        ext = create_extension(module_path,**kw)
        # the switcheroo on SystemExit here is meant to keep command line
        # sessions from exiting when compiles fail.
        builtin = sys.modules['__builtin__']
        old_SysExit = builtin.__dict__['SystemExit']
        builtin.__dict__['SystemExit'] = CompileError

        # change current working directory to 'build_dir' so compiler won't
        # pick up anything by mistake
        oldcwd = os.path.abspath(os.getcwd())
        os.chdir(build_dir)

        # distutils for MSVC messes with the environment, so we save the
        # current state and restore them afterward.
        import copy
        environ = copy.deepcopy(os.environ)
        try:
            setup(name=module_name, ext_modules=[ext],verbose=verb)
        finally:
            # restore state
            os.environ = environ
            # restore SystemExit
            builtin.__dict__['SystemExit'] = old_SysExit
            # restore working directory to one before setup
            os.chdir(oldcwd)
        t2 = time.time()

        if verbose == 1:
            print('finished compiling (sec): ', t2 - t1)
        success = 1
        configure_python_path(build_dir)
    except SyntaxError:  # TypeError:
        success = 0

    # restore argv after our trick...
    restore_sys_argv()

    return success

Example 110

Project: mongo-connector Source File: connector.py
Function: run
    @log_fatal_exceptions
    def run(self):
        """Discovers the mongo cluster and creates a thread for each primary.
        """
        self.main_conn = self.create_authed_client()
        LOG.always('Source MongoDB version: %s',
                   self.main_conn.admin.command('buildInfo')['version'])

        for dm in self.doc_managers:
            name = dm.__class__.__module__
            module = sys.modules[name]
            version = 'unknown'
            if hasattr(module, '__version__'):
                version = module.__version__
            elif hasattr(module, 'version'):
                version = module.version
            LOG.always('Target DocManager: %s version: %s', name, version)

        self.read_oplog_progress()
        conn_type = None

        try:
            self.main_conn.admin.command("isdbgrid")
        except pymongo.errors.OperationFailure:
            conn_type = "REPLSET"

        if conn_type == "REPLSET":
            # Make sure we are connected to a replica set
            is_master = self.main_conn.admin.command("isMaster")
            if "setName" not in is_master:
                LOG.error(
                    'No replica set at "%s"! A replica set is required '
                    'to run mongo-connector. Shutting down...' % self.address
                )
                return

            # Establish a connection to the replica set as a whole
            self.main_conn.close()
            self.main_conn = self.create_authed_client(
                replicaSet=is_master['setName'])

            # non sharded configuration
            oplog = OplogThread(
                self.main_conn, self.doc_managers, self.oplog_progress,
                self.dest_mapping, **self.kwargs)
            self.shard_set[0] = oplog
            LOG.info('MongoConnector: Starting connection thread %s' %
                     self.main_conn)
            oplog.start()

            while self.can_run:
                shard_thread = self.shard_set[0]
                if not (shard_thread.running and shard_thread.is_alive()):
                    LOG.error("MongoConnector: OplogThread"
                              " %s unexpectedly stopped! Shutting down" %
                              (str(self.shard_set[0])))
                    self.oplog_thread_join()
                    for dm in self.doc_managers:
                        dm.stop()
                    return

                self.write_oplog_progress()
                time.sleep(1)

        else:       # sharded cluster
            while self.can_run is True:

                for shard_doc in retry_until_ok(self.main_conn.admin.command,
                                                'listShards')['shards']:
                    shard_id = shard_doc['_id']
                    if shard_id in self.shard_set:
                        shard_thread = self.shard_set[shard_id]
                        if not (shard_thread.running and shard_thread.is_alive()):
                            LOG.error("MongoConnector: OplogThread "
                                      "%s unexpectedly stopped! Shutting "
                                      "down" %
                                      (str(self.shard_set[shard_id])))
                            self.oplog_thread_join()
                            for dm in self.doc_managers:
                                dm.stop()
                            return

                        self.write_oplog_progress()
                        time.sleep(1)
                        continue
                    try:
                        repl_set, hosts = shard_doc['host'].split('/')
                    except ValueError:
                        cause = "The system only uses replica sets!"
                        LOG.exception("MongoConnector: %s", cause)
                        self.oplog_thread_join()
                        for dm in self.doc_managers:
                            dm.stop()
                        return

                    shard_conn = self.create_authed_client(
                        hosts, replicaSet=repl_set)
                    oplog = OplogThread(
                        shard_conn, self.doc_managers, self.oplog_progress,
                        self.dest_mapping, mongos_client=self.main_conn,
                        **self.kwargs)
                    self.shard_set[shard_id] = oplog
                    msg = "Starting connection thread"
                    LOG.info("MongoConnector: %s %s" % (msg, shard_conn))
                    oplog.start()

        self.oplog_thread_join()
        self.write_oplog_progress()

Example 111

Project: osprey Source File: strategies.py
    def suggest(self, history, searchspace):
        """
        Suggest params to maximize an objective function based on the
        function evaluation history using a tree of Parzen estimators (TPE),
        as implemented in the hyperopt package.

        Use of this function requires that hyperopt be installed.
        """
        # This function is very odd, because as far as I can tell there's
        # no real docuemented API for any of the internals of hyperopt. Its
        # execution model is that hyperopt calls your objective function
        # (instead of merely providing you with suggested points, and then
        # you calling the function yourself), and its very tricky (for me)
        # to use the internal hyperopt data structures to get these predictions
        # out directly.

        # so they path we take in this function is to construct a synthetic
        # hyperopt.Trials database which from the `history`, and then call
        # hyoperopt.fmin with a dummy objective function that logs the value
        # used, and then return that value to our client.

        # The form of the hyperopt.Trials database isn't really docuemented in
        # the code -- most of this comes from reverse engineering it, by
        # running fmin() on a simple function and then inspecting the form of
        # the resulting trials object.
        if 'hyperopt' not in sys.modules:
            raise ImportError('No module named hyperopt')

        random = check_random_state(self.seed)
        hp_searchspace = searchspace.to_hyperopt()

        trials = Trials()
        for i, (params, scores, status) in enumerate(history):
            if status == 'SUCCEEDED':
                # we're doing maximization, hyperopt.fmin() does minimization,
                # so we need to swap the sign
                result = {'loss': -np.mean(scores), 'status': STATUS_OK}
            elif status == 'PENDING':
                result = {'status': STATUS_RUNNING}
            elif status == 'FAILED':
                result = {'status': STATUS_FAIL}
            else:
                raise RuntimeError('unrecognized status: %s' % status)

            # the vals key in the trials dict is basically just the params
            # dict, but enum variables (hyperopt hp.choice() nodes) are
            # different, because the index of the parameter is specified
            # in vals, not the parameter itself.

            vals = {}
            for var in searchspace:
                if isinstance(var, EnumVariable):
                    # get the index in the choices of the parameter, and use
                    # that.
                    matches = [i for i, c in enumerate(var.choices)
                               if c == params[var.name]]
                    assert len(matches) == 1
                    vals[var.name] = matches
                else:
                    # the other big difference is that all of the param values
                    # are wrapped in length-1 lists.
                    vals[var.name] = [params[var.name]]

            trials.insert_trial_doc({
                'misc': {
                    'cmd': ('domain_attachment', 'FMinIter_Domain'),
                    'idxs': dict((k, [i]) for k in hp_searchspace.keys()),
                    'tid': i,
                    'vals': vals,
                    'workdir': None},
                'result': result,
                'tid': i,
                # bunch of fixed fields that hyperopt seems to require
                'owner': None, 'spec': None, 'state': 2, 'book_time': None,
                'exp_key': None, 'refresh_time': None, 'version': 0
                })

        trials.refresh()
        chosen_params_container = []

        def mock_fn(x):
            # http://stackoverflow.com/a/3190783/1079728
            # to get around no nonlocal keywork in python2
            chosen_params_container.append(x)
            return 0

        fmin(fn=mock_fn, algo=tpe.suggest, space=hp_searchspace, trials=trials,
             max_evals=len(trials.trials)+1,
             **self._hyperopt_fmin_random_kwarg(random))
        chosen_params = chosen_params_container[0]

        return chosen_params

Example 112

Project: mapit Source File: mapit_UK_import_boundary_line.py
    def handle_label(self, filename, **options):
        if not options['control']:
            raise Exception("You must specify a control file")
        __import__(options['control'])
        control = sys.modules[options['control']]

        code_version = CodeType.objects.get(code=control.code_version())
        name_type = NameType.objects.get(code='O')
        code_type_os = CodeType.objects.get(code='unit_id')

        print(filename)
        current_generation = Generation.objects.current()
        new_generation = Generation.objects.new()
        if not new_generation:
            raise Exception("No new generation to be used for import!")

        ds = DataSource(filename)
        layer = ds[0]
        for feat in layer:
            name = feat['NAME'].value
            if not isinstance(name, six.text_type):
                name = name.decode('iso-8859-1')

            name = re.sub('\s*\(DET( NO \d+|)\)\s*(?i)', '', name)
            name = re.sub('\s+', ' ', name)

            ons_code = feat['CODE'].value if feat['CODE'].value not in ('999999', '999999999') else None
            unit_id = str(feat['UNIT_ID'].value)
            area_code = feat['AREA_CODE'].value
            patch = self.patch_boundary_line(name, ons_code, unit_id, area_code)
            if 'ons-code' in patch:
                ons_code = patch['ons-code']
            elif 'unit-id' in patch:
                unit_id = patch['unit-id']

            if area_code == 'NCP':
                continue  # Ignore Non Parished Areas

            if ons_code in self.ons_code_to_shape:
                m, poly = self.ons_code_to_shape[ons_code]
                try:
                    m_name = m.names.get(type=name_type).name
                except Name.DoesNotExist:
                    m_name = m.name  # If running without commit for dry run, so nothing being stored in db
                if name != m_name:
                    raise Exception("ONS code %s is used for %s and %s" % (ons_code, name, m_name))
                # Otherwise, combine the two shapes for one area
                poly.append(feat.geom)
                continue

            if unit_id in self.unit_id_to_shape:
                m, poly = self.unit_id_to_shape[unit_id]
                try:
                    m_name = m.names.get(type=name_type).name
                except Name.DoesNotExist:
                    m_name = m.name  # If running without commit for dry run, so nothing being stored in db
                if name != m_name:
                    raise Exception("Unit ID code %s is used for %s and %s" % (unit_id, name, m_name))
                # Otherwise, combine the two shapes for one area
                poly.append(feat.geom)
                continue

            if code_version.code == 'gss' and ons_code:
                country = ons_code[0]  # Hooray!
            elif area_code in ('CED', 'CTY', 'DIW', 'DIS', 'MTW', 'MTD', 'LBW', 'LBO', 'LAC', 'GLA'):
                country = 'E'
            elif code_version.code == 'gss':
                raise Exception(area_code)
            elif (area_code == 'EUR' and 'Scotland' in name) or area_code in ('SPC', 'SPE') or (
                    ons_code and ons_code[0:3] in ('00Q', '00R')):
                country = 'S'
            elif (area_code == 'EUR' and 'Wales' in name) or area_code in ('WAC', 'WAE') or (
                    ons_code and ons_code[0:3] in ('00N', '00P')):
                country = 'W'
            elif area_code in ('EUR', 'UTA', 'UTE', 'UTW', 'CPC'):
                country = 'E'
            else:  # WMC
                # Make sure WMC are loaded after all wards...
                area_within = Area.objects.filter(
                    type__code__in=('UTW', 'UTE', 'MTW', 'COP', 'LBW', 'DIW'),
                    polygons__polygon__contains=feat.geom.geos.point_on_surface)[0]
                country = area_within.country.code
            # Can't do the above ons_code checks with new GSS codes, will have to do more PinP checks
            # Do parents in separate P-in-P code after this is done.

            try:
                check = control.check(name, area_code, country, feat.geom, ons_code=ons_code, commit=options['commit'])
                if check is True:
                    raise Area.DoesNotExist
                if isinstance(check, Area):
                    m = check
                    ons_code = m.codes.get(type=code_version).code
                elif ons_code:
                    m = Area.objects.get(codes__type=code_version, codes__code=ons_code)
                elif unit_id:
                    m = Area.objects.get(
                        codes__type=code_type_os, codes__code=unit_id, generation_high=current_generation)
                    m_name = m.names.get(type=name_type).name
                    if name != m_name:
                        raise Exception("Unit ID code %s is %s in DB but %s in SHP file" % (unit_id, m_name, name))
                else:
                    raise Exception('Area "%s" (%s) has neither ONS code nor unit ID' % (name, area_code))
                if int(options['verbosity']) > 1:
                    print("  Area matched, %s" % (m, ))
            except Area.DoesNotExist:
                print("  New area: %s %s %s %s" % (area_code, ons_code, unit_id, name))
                m = Area(
                    name=name,  # If committing, this will be overwritten by the m.names.update_or_create
                    type=Type.objects.get(code=area_code),
                    country=Country.objects.get(code=country),
                    generation_low=new_generation,
                    generation_high=new_generation,
                )

            if m.generation_high and current_generation and m.generation_high.id < current_generation.id:
                raise Exception("Area %s found, but not in current generation %s" % (m, current_generation))
            m.generation_high = new_generation
            if options['commit']:
                m.save()

            # Make a GEOS geometry only to check for validity:
            g = feat.geom
            geos_g = g.geos
            if not geos_g.valid:
                print("  Geometry of %s %s not valid" % (ons_code, m))
                geos_g = fix_invalid_geos_geometry(geos_g)
                if geos_g is None:
                    raise Exception("The geometry for area %s was invalid and couldn't be fixed" % name)
                    g = None
                else:
                    g = geos_g.ogr

            poly = [g]

            if options['commit']:
                m.names.update_or_create(type=name_type, defaults={'name': name})
            if ons_code:
                self.ons_code_to_shape[ons_code] = (m, poly)
                if options['commit']:
                    m.codes.update_or_create(type=code_version, defaults={'code': ons_code})
            if unit_id:
                self.unit_id_to_shape[unit_id] = (m, poly)
                if options['commit']:
                    m.codes.update_or_create(type=code_type_os, defaults={'code': unit_id})

        if options['commit']:
            save_polygons(self.unit_id_to_shape)
            save_polygons(self.ons_code_to_shape)

Example 113

Project: pombola Source File: south_africa_import_constituency_offices.py
    def handle_label(self, input_filename, **options):

        if options['test']:
            import doctest
            failure_count, _ = doctest.testmod(sys.modules[__name__])
            sys.exit(0 if failure_count == 0 else 1)

        global VERBOSE
        VERBOSE = options['verbose']

        geocode_cache = get_geocode_cache()

        na_member_lookup = get_na_member_lookup()

        # Ensure that all the required kinds and other objects exist:

        ok_constituency_office, _ = OrganisationKind.objects.get_or_create(
            slug='constituency-office',
            name='Constituency Office')
        ok_constituency_area, _ = OrganisationKind.objects.get_or_create(
            slug='constituency-area',
            name='Constituency Area')

        pk_constituency_office, _ = PlaceKind.objects.get_or_create(
            slug='constituency-office',
            name='Constituency Office')
        pk_constituency_area, _ = PlaceKind.objects.get_or_create(
            slug='constituency-area',
            name='Constituency Area')

        ck_address, _ = ContactKind.objects.get_or_create(
            slug='address',
            name='Address')
        ck_email, _ = ContactKind.objects.get_or_create(
            slug='email',
            name='Email')
        ck_fax, _ = ContactKind.objects.get_or_create(
            slug='fax',
            name='Fax')
        ck_telephone, _ = ContactKind.objects.get_or_create(
            slug='voice',
            name='Voice')

        pt_constituency_contact, _ = PositionTitle.objects.get_or_create(
            slug='constituency-contact',
            name='Constituency Contact')
        pt_administrator, _ = PositionTitle.objects.get_or_create(
            slug='administrator',
            name='Administrator')

        ork_has_office, _ = OrganisationRelationshipKind.objects.get_or_create(
            name='has_office')

        organisation_content_type = ContentType.objects.get_for_model(Organisation)

        contact_source = "Data from the party via Geoffrey Kilpin"

        mapit_current_generation = Generation.objects.current()

        with_physical_addresses = 0
        geolocated = 0

        created_administrators = {}

        # There's at least one duplicate row, so detect and ignore any duplicates:
        rows_already_done = set()

        try:

            with open(input_filename) as fp:
                reader = csv.DictReader(fp)
                for row in reader:
                    # Make sure there's no leading or trailing
                    # whitespace, and we have unicode strings:
                    row = dict((k, row[k].decode('UTF-8').strip()) for k in row)
                    # Extract each column:
                    party_code = row['Party Code']
                    name = row['Name']
                    manual_lonlat = row['Manually Geocoded LonLat']
                    province = row['Province']
                    office_or_area = row['Type']
                    party = row['Party']
                    administrator = row['Administrator']
                    telephone = row['Tel']
                    fax = row['Fax']
                    physical_address = row['Physical Address']
                    email = row['E-mail']
                    municipality = row['Municipality']

                    abbreviated_party = party
                    m = re.search(r'\((?:|.*, )([A-Z\+]+)\)', party)
                    if m:
                        abbreviated_party = m.group(1)

                    unique_row_id = (party_code, name, party)

                    if unique_row_id in rows_already_done:
                        continue
                    else:
                        rows_already_done.add(unique_row_id)

                    # Collapse whitespace in the name to a single space:
                    name = re.sub(r'(?ms)\s+', ' ', name)

                    mz_party = Organisation.objects.get(name=party)

                    # At various points, constituency office or areas
                    # have been created with the wrong terminology, so
                    # look for any variant of the names:
                    title_data = {'party': abbreviated_party,
                                  'type': office_or_area,
                                  'party_code': party_code,
                                  'name': name}
                    possible_formats = [
                        u'{party} Constituency Area ({party_code}): {name}',
                        u'{party} Constituency Office ({party_code}): {name}',
                        u'{party} Constituency Area: {name}',
                        u'{party} Constituency Office: {name}']
                    org_slug_possibilities = [slugify(fmt.format(**title_data))
                                              for fmt in possible_formats]

                    if party_code:
                        organisation_name = u"{party} Constituency {type} ({party_code}): {name}".format(**title_data)
                    else:
                        organisation_name = u"{party} Constituency {type}: {name}".format(**title_data)

                    places_to_add = []
                    contacts_to_add = []
                    people_to_add = []
                    administrators_to_add = []

                    for contact_kind, value, in ((ck_email, email),
                                                 (ck_telephone, telephone),
                                                 (ck_fax, fax)):
                        if value:
                            contacts_to_add.append({
                                    'kind': contact_kind,
                                    'value': value,
                                    'source': contact_source})

                    if office_or_area == 'Office':
                        constituency_kind = ok_constituency_office

                        if physical_address:

                            # Sometimes there's lots of whitespace
                            # that splits the physical address from a
                            # P.O. Box address, so look for those cases:
                            pobox_address = None
                            m = re.search(r'(?ms)^(.*)\s{5,}(.*)$', physical_address)
                            if m:
                                physical_address = m.group(1).strip()
                                pobox_address = m.group(2).strip()

                            with_physical_addresses += 1
                            physical_address = physical_address.rstrip(',') + ", South Africa"
                            try:
                                verbose("physical_address: " + physical_address.encode('UTF-8'))
                                if manual_lonlat:
                                    verbose("using manually specified location: " + manual_lonlat)
                                    lon, lat = map(float, manual_lonlat.split(","))
                                else:
                                    lon, lat, geocode_cache = geocode(physical_address, geocode_cache, VERBOSE)
                                    verbose("maps to:")
                                    verbose("http://maps.google.com/maps?q=%f,%f" % (lat, lon))
                                geolocated += 1

                                place_name = u'Approximate position of ' + organisation_name
                                places_to_add.append({
                                    'name': place_name,
                                    'slug': slugify(place_name),
                                    'kind': pk_constituency_office,
                                    'location': Point(lon, lat)})

                                contacts_to_add.append({
                                        'kind': ck_address,
                                        'value': physical_address,
                                        'source': contact_source})

                            except LocationNotFound:
                                verbose("XXX no results found for: " + physical_address)

                            if pobox_address is not None:
                                contacts_to_add.append({
                                        'kind': ck_address,
                                        'value': pobox_address,
                                        'source': contact_source})

                            # Deal with the different formats of MP
                            # and MPL names for different parties:
                            for representative_type in ('MP', 'MPL'):
                                if party in ('African National Congress (ANC)',
                                             "African Peoples' Convention (APC)",
                                             "Azanian People's Organisation (AZAPO)",
                                             'Minority Front (MF)',
                                             'United Christian Democratic Party (UCDP)',
                                             'United Democratic Movement (UDM)',
                                             'African Christian Democratic Party (ACDP)'):
                                    name_strings = re.split(r'\s{4,}',row[representative_type])
                                    for name_string in name_strings:
                                        person = find_pombola_person(name_string, na_member_lookup, VERBOSE)
                                        if person:
                                            people_to_add.append(person)
                                elif party in ('Congress of the People (COPE)',
                                               'Freedom Front + (Vryheidsfront+, FF+)'):
                                    for contact in re.split(r'\s*;\s*', row[representative_type]):
                                        # Strip off the phone number
                                        # and email address before
                                        # resolving:
                                        person = find_pombola_person(
                                            re.sub(r'(?ms)\s*\d.*', '', contact),
                                            na_member_lookup,
                                            VERBOSE
                                        )
                                        if person:
                                            people_to_add.append(person)
                                else:
                                    raise Exception, "Unknown party '%s'" % (party,)

                        if municipality:
                            mapit_municipality = get_mapit_municipality(
                                municipality, province
                            )

                            if mapit_municipality:
                                place_name = u'Municipality associated with ' + organisation_name
                                places_to_add.append({
                                    'name': place_name,
                                    'slug': slugify(place_name),
                                    'kind': pk_constituency_office,
                                    'mapit_area': mapit_municipality})

                    elif office_or_area == 'Area':
                        # At the moment it's only for DA that these
                        # Constituency Areas exist, so check that assumption:
                        if party != 'Democratic Alliance (DA)':
                            raise Exception, "Unexpected party %s with Area" % (party)
                        constituency_kind = ok_constituency_area
                        province = fix_province_name(province)
                        mapit_province = Area.objects.get(
                            type__code='PRV',
                            generation_high__gte=mapit_current_generation,
                            generation_low__lte=mapit_current_generation,
                            name=province)
                        place_name = 'Unknown sub-area of %s known as %s' % (
                            province,
                            organisation_name)
                        places_to_add.append({
                                'name': place_name,
                                'slug': slugify(place_name),
                                'kind': pk_constituency_area,
                                'mapit_area': mapit_province})

                        for representative_type in ('MP', 'MPL'):
                            for contact in re.split(r'(?ms)\s*;\s*', row[representative_type]):
                                person = find_pombola_person(contact, na_member_lookup, VERBOSE)
                                if person:
                                    people_to_add.append(person)

                    else:
                        raise Exception, "Unknown type %s" % (office_or_area,)

                    # The Administrator column might have multiple
                    # administrator contacts, separated by
                    # semi-colons.  Each contact may have notes about
                    # them in brackets, and may be followed by more
                    # than one phone number, separated by slashes.
                    if administrator and administrator.lower() != 'vacant':
                        for administrator_contact in re.split(r'\s*;\s*', administrator):
                            # Strip out any bracketed notes:
                            administrator_contact = re.sub(r'\([^\)]*\)', '', administrator_contact)
                            # Extract any phone number at the end:
                            m = re.search(r'^([^0-9]*)([0-9\s/]*)$', administrator_contact)
                            phone_numbers = []
                            if m:
                                administrator_contact, phones = m.groups()
                                phone_numbers = [s.strip() for s in re.split(r'\s*/\s*', phones)]
                            administrator_contact = administrator_contact.strip()
                            # If there's no name after that, just skip this contact
                            if not administrator_contact:
                                continue
                            administrator_contact = re.sub(r'\s+', ' ', administrator_contact)
                            tuple_to_add = (administrator_contact,
                                            tuple(s for s in phone_numbers
                                                  if s and s != nonexistent_phone_number))
                            verbose("administrator name '%s', numbers '%s'" % tuple_to_add)
                            administrators_to_add.append(tuple_to_add)

                    organisation_kwargs = {
                        'name': organisation_name,
                        'slug': slugify(organisation_name),
                        'kind': constituency_kind}

                    # Check if this office appears to exist already:

                    identifier = None
                    identifier_scheme = "constituency-office/%s/" % (abbreviated_party,)

                    try:
                        if party_code:
                            # If there's something's in the "Party Code"
                            # column, we can check for an identifier and
                            # get the existing object reliable through that.
                            identifier = Identifier.objects.get(identifier=party_code,
                                                                scheme=identifier_scheme)
                            org = identifier.content_object
                        else:
                            # Otherwise use the slug we intend to use, and
                            # look for an existing organisation:
                            org = Organisation.objects.get(slug__in=org_slug_possibilities,
                                                           kind=constituency_kind)
                    except ObjectDoesNotExist:
                        org = Organisation()
                        if party_code:
                            identifier = Identifier(identifier=party_code,
                                                    scheme=identifier_scheme,
                                                    content_type=organisation_content_type)

                    # Make sure we set the same attributes and save:
                    for k, v in organisation_kwargs.items():
                        setattr(org, k, v)

                    if options['commit']:
                        org.save()
                        if party_code:
                            identifier.object_id = org.id
                            identifier.save()

                        # Replace all places associated with this
                        # organisation and re-add them:
                        org.place_set.all().delete()
                        for place_dict in places_to_add:
                            org.place_set.create(**place_dict)

                        # Replace all contact details associated with this
                        # organisation, and re-add them:
                        org.contacts.all().delete()
                        for contact_dict in contacts_to_add:
                            org.contacts.create(**contact_dict)

                        # Remove previous has_office relationships,
                        # between this office and any party, then re-add
                        # this one:
                        OrganisationRelationship.objects.filter(
                            organisation_b=org).delete()
                        OrganisationRelationship.objects.create(
                            organisation_a=mz_party,
                            kind=ork_has_office,
                            organisation_b=org)

                        # Remove all Membership relationships between this
                        # organisation and other people, then recreate them:
                        org.position_set.filter(title=pt_constituency_contact).delete()
                        for person in people_to_add:
                            org.position_set.create(
                                person=person,
                                title=pt_constituency_contact,
                                category='political')

                        # Remove any administrators for this organisation:
                        for position in org.position_set.filter(title=pt_administrator):
                            for contact in position.person.contacts.all():
                                contact.delete()
                            position.person.delete()
                            position.delete()
                        # And create new administrators:
                        for administrator_tuple in administrators_to_add:
                            administrator_name, phone_numbers = administrator_tuple
                            if administrator_tuple in created_administrators:
                                person = created_administrators[administrator_tuple]
                            else:
                                person = Person.objects.create(legal_name=administrator_name,
                                                               slug=slugify(administrator_name))
                                created_administrators[administrator_tuple] = person
                                for phone_number in phone_numbers:
                                    person.contacts.create(kind=ck_telephone,
                                                           value=phone_number,
                                                           source=contact_source)
                            Position.objects.create(person=person,
                                                    organisation=org,
                                                    title=pt_administrator,
                                                    category='political')

        finally:
            write_geocode_cache(geocode_cache)

        verbose("Geolocated %d out of %d physical addresses" % (geolocated, with_physical_addresses))

Example 114

Project: theyworkforyou Source File: base.py
    def __new__(cls, name, bases, attrs):
        super_new = super(ModelBase, cls).__new__
        parents = [b for b in bases if isinstance(b, ModelBase)]
        if not parents:
            # If this isn't a subclass of Model, don't do anything special.
            return super_new(cls, name, bases, attrs)

        # Create the class.
        module = attrs.pop('__module__')
        new_class = super_new(cls, name, bases, {'__module__': module})
        attr_meta = attrs.pop('Meta', None)
        abstract = getattr(attr_meta, 'abstract', False)
        if not attr_meta:
            meta = getattr(new_class, 'Meta', None)
        else:
            meta = attr_meta
        base_meta = getattr(new_class, '_meta', None)

        if getattr(meta, 'app_label', None) is None:
            # Figure out the app_label by looking one level up.
            # For 'django.contrib.sites.models', this would be 'sites'.
            model_module = sys.modules[new_class.__module__]
            kwargs = {"app_label": model_module.__name__.split('.')[-2]}
        else:
            kwargs = {}

        new_class.add_to_class('_meta', Options(meta, **kwargs))
        if not abstract:
            new_class.add_to_class('DoesNotExist',
                    subclass_exception('DoesNotExist', ObjectDoesNotExist, module))
            new_class.add_to_class('MultipleObjectsReturned',
                    subclass_exception('MultipleObjectsReturned', MultipleObjectsReturned, module))
            if base_meta and not base_meta.abstract:
                # Non-abstract child classes inherit some attributes from their
                # non-abstract parent (unless an ABC comes before it in the
                # method resolution order).
                if not hasattr(meta, 'ordering'):
                    new_class._meta.ordering = base_meta.ordering
                if not hasattr(meta, 'get_latest_by'):
                    new_class._meta.get_latest_by = base_meta.get_latest_by

        is_proxy = new_class._meta.proxy

        if getattr(new_class, '_default_manager', None):
            if not is_proxy:
                # Multi-table inheritance doesn't inherit default manager from
                # parents.
                new_class._default_manager = None
                new_class._base_manager = None
            else:
                # Proxy classes do inherit parent's default manager, if none is
                # set explicitly.
                new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
                new_class._base_manager = new_class._base_manager._copy_to_model(new_class)

        # Bail out early if we have already created this class.
        m = get_model(new_class._meta.app_label, name, False)
        if m is not None:
            return m

        # Add all attributes to the class.
        for obj_name, obj in attrs.items():
            new_class.add_to_class(obj_name, obj)

        # All the fields of any type declared on this model
        new_fields = new_class._meta.local_fields + \
                     new_class._meta.local_many_to_many + \
                     new_class._meta.virtual_fields
        field_names = set([f.name for f in new_fields])

        # Basic setup for proxy models.
        if is_proxy:
            base = None
            for parent in [cls for cls in parents if hasattr(cls, '_meta')]:
                if parent._meta.abstract:
                    if parent._meta.fields:
                        raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
                    else:
                        continue
                if base is not None:
                    raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
                else:
                    base = parent
            if base is None:
                    raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
            if (new_class._meta.local_fields or
                    new_class._meta.local_many_to_many):
                raise FieldError("Proxy model '%s' contains model fields."
                        % name)
            while base._meta.proxy:
                base = base._meta.proxy_for_model
            new_class._meta.setup_proxy(base)

        # Do the appropriate setup for any model parents.
        o2o_map = dict([(f.rel.to, f) for f in new_class._meta.local_fields
                if isinstance(f, OneToOneField)])

        for base in parents:
            original_base = base
            if not hasattr(base, '_meta'):
                # Things without _meta aren't functional models, so they're
                # uninteresting parents.
                continue

            parent_fields = base._meta.local_fields + base._meta.local_many_to_many
            # Check for clashes between locally declared fields and those
            # on the base classes (we cannot handle shadowed fields at the
            # moment).
            for field in parent_fields:
                if field.name in field_names:
                    raise FieldError('Local field %r in class %r clashes '
                                     'with field of similar name from '
                                     'base class %r' %
                                        (field.name, name, base.__name__))
            if not base._meta.abstract:
                # Concrete classes...
                while base._meta.proxy:
                    # Skip over a proxy class to the "real" base it proxies.
                    base = base._meta.proxy_for_model
                if base in o2o_map:
                    field = o2o_map[base]
                elif not is_proxy:
                    attr_name = '%s_ptr' % base._meta.module_name
                    field = OneToOneField(base, name=attr_name,
                            auto_created=True, parent_link=True)
                    new_class.add_to_class(attr_name, field)
                else:
                    field = None
                new_class._meta.parents[base] = field
            else:
                # .. and abstract ones.
                for field in parent_fields:
                    new_class.add_to_class(field.name, copy.deepcopy(field))

                # Pass any non-abstract parent classes onto child.
                new_class._meta.parents.update(base._meta.parents)

            # Inherit managers from the abstract base classes.
            new_class.copy_managers(base._meta.abstract_managers)

            # Proxy models inherit the non-abstract managers from their base,
            # unless they have redefined any of them.
            if is_proxy:
                new_class.copy_managers(original_base._meta.concrete_managers)

            # Inherit virtual fields (like GenericForeignKey) from the parent
            # class
            for field in base._meta.virtual_fields:
                if base._meta.abstract and field.name in field_names:
                    raise FieldError('Local field %r in class %r clashes '\
                                     'with field of similar name from '\
                                     'abstract base class %r' % \
                                        (field.name, name, base.__name__))
                new_class.add_to_class(field.name, copy.deepcopy(field))

        if abstract:
            # Abstract base models can't be instantiated and don't appear in
            # the list of models for an app. We do the final setup for them a
            # little differently from normal models.
            attr_meta.abstract = False
            new_class.Meta = attr_meta
            return new_class

        new_class._prepare()
        register_models(new_class._meta.app_label, new_class)

        # Because of the way imports happen (recursively), we may or may not be
        # the first time this model tries to register with the framework. There
        # should only be one class for each model, so we always return the
        # registered version.
        return get_model(new_class._meta.app_label, name, False)

Example 115

Project: Beehive Source File: pprint2.py
def _safe_repr(object, context, maxlevels, level):
    typ = _type(object)
    if typ is str:
        string = object
        string = string.replace('\n', '\\n').replace('\r','\\r').replace('\t','\\t')
        if 'locale' not in _sys.modules:
            return repr(object), True, False
        if "'" in object and '"' not in object:
            closure = '"'
            quotes = {'"': '\\"'}
            string = string.replace('"','\\"')
        else:
            closure = "'"
            quotes = {"'": "\\'"}
            string = string.replace("'", "\\'")
        try:
            string.decode('utf8').encode('gbk')
            return ("%s%s%s" % (closure, string, closure)), True, False
        except:
            pass
        qget = quotes.get
        sio = _StringIO()
        write = sio.write
        for char in object:
            if char.isalpha():
                write(char)
            else:
                write(qget(char, repr(char)[1:-1]))
        return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False

    if typ is unicode:
        string = object.encode("utf8")
        string = string.replace('\n', '\\n').replace('\r','\\r').replace('\t','\\t')
        if "'" in object and '"' not in object:
            closure = '"'
            quotes = {'"': '\\"'}
            string = string.replace('"','\\"')
        else:
            closure = "'"
            quotes = {"'": "\\'"}
            string = string.replace("'", "\\'")
        return ("u%s%s%s" % (closure, string, closure)), True, False

    r = getattr(typ, "__repr__", None)
    if issubclass(typ, dict) and r is dict.__repr__:
        if not object:
            return "{}", True, False
        objid = _id(object)
        if maxlevels and level >= maxlevels:
            return "{...}", False, objid in context
        if objid in context:
            return _recursion(object), False, True
        context[objid] = 1
        readable = True
        recursive = False
        components = []
        append = components.append
        level += 1
        saferepr = _safe_repr
        for k, v in _sorted(object.items()):
            krepr, kreadable, krecur = saferepr(k, context, maxlevels, level)
            vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level)
            append("%s: %s" % (krepr, vrepr))
            readable = readable and kreadable and vreadable
            if krecur or vrecur:
                recursive = True
        del context[objid]
        return "{%s}" % _commajoin(components), readable, recursive

    if (issubclass(typ, list) and r is list.__repr__) or \
       (issubclass(typ, tuple) and r is tuple.__repr__):
        if issubclass(typ, list):
            if not object:
                return "[]", True, False
            format = "[%s]"
        elif _len(object) == 1:
            format = "(%s,)"
        else:
            if not object:
                return "()", True, False
            format = "(%s)"
        objid = _id(object)
        if maxlevels and level >= maxlevels:
            return format % "...", False, objid in context
        if objid in context:
            return _recursion(object), False, True
        context[objid] = 1
        readable = True
        recursive = False
        components = []
        append = components.append
        level += 1
        for o in object:
            orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level)
            append(orepr)
            if not oreadable:
                readable = False
            if orecur:
                recursive = True
        del context[objid]
        return format % _commajoin(components), readable, recursive

    rep = repr(object)
    return rep, (rep and not rep.startswith('<')), False

Example 116

Project: BEurtle Source File: dispatch.py
def _Dispatch(ps, modules, SendResponse, SendFault, nsdict={}, typesmodule=None, 
              gettypecode=gettypecode, rpc=False, docstyle=False, **kw):
    '''Find a handler for the SOAP request in ps; search modules.
    Call SendResponse or SendFault to send the reply back, appropriately.

    Behaviors:
        default -- Call "handler" method with pyobj representation of body root, and return
            a self-describing request (w/typecode).  Parsing done via a typecode from 
            typesmodule, or Any.

        docstyle -- Call "handler" method with ParsedSoap instance and parse result with an
          XML typecode (DOM). Behavior, wrap result in a body_root "Response" appended message.

        rpc -- Specify RPC wrapper of result. Behavior, ignore body root (RPC Wrapper)
           of request, parse all "parts" of message via individual typecodes.  Expect
           the handler to return the parts of the message, whether it is a dict, single instance, 
           or a list try to serialize it as a Struct but if this is not possible put it in an Array.
           Parsing done via a typecode from typesmodule, or Any.

    '''
    global _client_binding
    try:
        what = str(ps.body_root.localName)

        # See what modules have the element name.
        if modules is None:
            modules = ( sys.modules['__main__'], )

        handlers = [ getattr(m, what) for m in modules if hasattr(m, what) ]
        if len(handlers) == 0:
            raise TypeError("Unknown method " + what)

        # Of those modules, see who's callable.
        handlers = [ h for h in handlers if callable(h) ]
        if len(handlers) == 0:
            raise TypeError("Unimplemented method " + what)
        if len(handlers) > 1:
            raise TypeError("Multiple implementations found: " + `handlers`)
        handler = handlers[0]

        _client_binding = ClientBinding(ps)
        if docstyle:
            result = handler(ps.body_root)
            tc = TC.XML(aslist=1, pname=what+'Response')
        elif not rpc:
            try:
                tc = gettypecode(typesmodule, ps.body_root)
            except Exception:
                tc = TC.Any()

            try:
                arg = tc.parse(ps.body_root, ps)
            except EvaluateException, ex:
                SendFault(FaultFromZSIException(ex), **kw)
                return

            try:
                result = handler(arg)
            except Exception,ex:
                SendFault(FaultFromZSIException(ex), **kw)
                return

            try:
                tc = result.typecode
            except AttributeError,ex:
                SendFault(FaultFromZSIException(ex), **kw)
                return

        elif typesmodule is not None:
            kwargs = {}
            for e in _child_elements(ps.body_root):
                try:
                    tc = gettypecode(typesmodule, e)
                except Exception:
                    tc = TC.Any()

                try:
                    kwargs[str(e.localName)] = tc.parse(e, ps)
                except EvaluateException, ex:
                    SendFault(FaultFromZSIException(ex), **kw)
                    return

            result = handler(**kwargs)
            aslist = False
            # make sure data is wrapped, try to make this a Struct
            if type(result) in _seqtypes:
                 for o in result:
                     aslist = hasattr(result, 'typecode')
                     if aslist: break
            elif type(result) is not dict:
                 aslist = not hasattr(result, 'typecode')
                 result = (result,)

            tc = TC.Any(pname=what+'Response', aslist=aslist)
        else:
            # if this is an Array, call handler with list
            # if this is an Struct, call handler with dict
            tp = _find_type(ps.body_root)
            isarray = ((type(tp) in (tuple,list) and tp[1] == 'Array') or _find_arraytype(ps.body_root))
            data = _child_elements(ps.body_root)
            tc = TC.Any()
            if isarray and len(data) == 0:
                result = handler()
            elif isarray:
                try: arg = [ tc.parse(e, ps) for e in data ]
                except EvaluateException, e:
                    #SendFault(FaultFromZSIException(e), **kw)
                    SendFault(RuntimeError("THIS IS AN ARRAY: %s" %isarray))
                    return

                result = handler(*arg)
            else:
                try: kwarg = dict([ (str(e.localName),tc.parse(e, ps)) for e in data ])
                except EvaluateException, e:
                    SendFault(FaultFromZSIException(e), **kw)
                    return

                result = handler(**kwarg)

            # reponse typecode
            #tc = getattr(result, 'typecode', TC.Any(pname=what+'Response'))
            tc = TC.Any(pname=what+'Response')

        sw = SoapWriter(nsdict=nsdict)
        sw.serialize(result, tc)
        return SendResponse(str(sw), **kw)
    except Fault, e:
        return SendFault(e, **kw)
    except Exception, e:
        # Something went wrong, send a fault.
        return SendFault(FaultFromException(e, 0, sys.exc_info()[2]), **kw)

Example 117

Project: coveragepy Source File: coveragetest.py
    def check_coverage(
        self, text, lines=None, missing="", report="",
        excludes=None, partials="",
        arcz=None, arcz_missing="", arcz_unpredicted="",
        arcs=None, arcs_missing=None, arcs_unpredicted=None,
    ):
        """Check the coverage measurement of `text`.

        The source `text` is run and measured.  `lines` are the line numbers
        that are executable, or a list of possible line numbers, any of which
        could match. `missing` are the lines not executed, `excludes` are
        regexes to match against for excluding lines, and `report` is the text
        of the measurement report.

        For arc measurement, `arcz` is a string that can be decoded into arcs
        in the code (see `arcz_to_arcs` for the encoding scheme).
        `arcz_missing` are the arcs that are not executed, and
        `arcz_unpredicted` are the arcs executed in the code, but not deducible
        from the code.  These last two default to "", meaning we explicitly
        check that there are no missing or unpredicted arcs.

        Returns the Coverage object, in case you want to poke at it some more.

        """
        # We write the code into a file so that we can import it.
        # Coverage.py wants to deal with things as modules with file names.
        modname = self.get_module_name()

        self.make_file(modname + ".py", text)

        if arcs is None and arcz is not None:
            arcs = self.arcz_to_arcs(arcz)
        if arcs_missing is None:
            arcs_missing = self.arcz_to_arcs(arcz_missing)
        if arcs_unpredicted is None:
            arcs_unpredicted = self.arcz_to_arcs(arcz_unpredicted)

        # Start up coverage.py.
        cov = coverage.Coverage(branch=True)
        cov.erase()
        for exc in excludes or []:
            cov.exclude(exc)
        for par in partials or []:
            cov.exclude(par, which='partial')

        mod = self.start_import_stop(cov, modname)

        # Clean up our side effects
        del sys.modules[modname]

        # Get the analysis results, and check that they are right.
        analysis = cov._analyze(mod)
        statements = sorted(analysis.statements)
        if lines is not None:
            if isinstance(lines[0], int):
                # lines is just a list of numbers, it must match the statements
                # found in the code.
                self.assertEqual(statements, lines)
            else:
                # lines is a list of possible line number lists, one of them
                # must match.
                for line_list in lines:
                    if statements == line_list:
                        break
                else:
                    self.fail("None of the lines choices matched %r" % statements)

            missing_formatted = analysis.missing_formatted()
            if isinstance(missing, string_class):
                self.assertEqual(missing_formatted, missing)
            else:
                for missing_list in missing:
                    if missing_formatted == missing_list:
                        break
                else:
                    self.fail("None of the missing choices matched %r" % missing_formatted)

        if arcs is not None:
            with self.delayed_assertions():
                self.assert_equal_args(
                    analysis.arc_possibilities(), arcs,
                    "Possible arcs differ",
                )

                self.assert_equal_args(
                    analysis.arcs_missing(), arcs_missing,
                    "Missing arcs differ"
                )

                self.assert_equal_args(
                    analysis.arcs_unpredicted(), arcs_unpredicted,
                    "Unpredicted arcs differ"
                )

        if report:
            frep = StringIO()
            cov.report(mod, file=frep, show_missing=True)
            rep = " ".join(frep.getvalue().split("\n")[2].split()[1:])
            self.assertEqual(report, rep)

        return cov

Example 118

Project: dreampie Source File: safearray.py
Function: make_safearray_type
def _make_safearray_type(itemtype):
    # Create and return a subclass of tagSAFEARRAY
    from comtypes.automation import _ctype_to_vartype, VT_RECORD, \
         VT_UNKNOWN, IDispatch, VT_DISPATCH

    meta = type(_safearray.tagSAFEARRAY)
    sa_type = meta.__new__(meta,
                           "SAFEARRAY_%s" % itemtype.__name__,
                           (_safearray.tagSAFEARRAY,), {})

    try:
        vartype = _ctype_to_vartype[itemtype]
        extra = None
    except KeyError:
        if issubclass(itemtype, Structure):
            try:
                guids = itemtype._recordinfo_
            except AttributeError:
                extra = None
            else:
                from comtypes.typeinfo import GetRecordInfoFromGuids
                extra = GetRecordInfoFromGuids(*guids)
            vartype = VT_RECORD
        elif issubclass(itemtype, POINTER(IDispatch)):
            vartype = VT_DISPATCH
            extra = pointer(itemtype._iid_)
        elif issubclass(itemtype, POINTER(IUnknown)):
            vartype = VT_UNKNOWN
            extra = pointer(itemtype._iid_)
        else:
            raise TypeError(itemtype)

    class _(partial, POINTER(sa_type)):
        # Should explain the ideas how SAFEARRAY is used in comtypes
        _itemtype_ = itemtype # a ctypes type
        _vartype_ = vartype # a VARTYPE value: VT_...
        _needsfree = False

##        @classmethod
        def create(cls, value, extra=None):
            """Create a POINTER(SAFEARRAY_...) instance of the correct
            type; value is an object containing the items to store.

            Python lists, tuples, and array.array instances containing
            compatible item types can be passed to create
            one-dimensional arrays.  To create multidimensional arrys,
            numpy arrays must be passed.
            """

            if "numpy" in sys.modules:
                numpy = sys.modules["numpy"]
                if isinstance(value, numpy.ndarray):
                    return cls.create_from_ndarray(value, extra)

            # For VT_UNKNOWN or VT_DISPATCH, extra must be a pointer to
            # the GUID of the interface.
            #
            # For VT_RECORD, extra must be a pointer to an IRecordInfo
            # describing the record.

            # XXX How to specify the lbound (3. parameter to CreateVectorEx)?
            # XXX How to write tests for lbound != 0?
            pa = _safearray.SafeArrayCreateVectorEx(cls._vartype_,
                                                    0,
                                                    len(value),
                                                    extra)
            if not pa:
                if cls._vartype_ == VT_RECORD and extra is None:
                    raise TypeError("Cannot create SAFEARRAY type VT_RECORD without IRecordInfo.")
                # Hm, there may be other reasons why the creation fails...
                raise MemoryError()
            # We now have a POINTER(tagSAFEARRAY) instance which we must cast
            # to the correct type:
            pa = cast(pa, cls)
            # Now, fill the data in:
            ptr = POINTER(cls._itemtype_)() # container for the values
            _safearray.SafeArrayAccessData(pa, byref(ptr))
            try:
                if isinstance(value, array.array):
                    addr, n = value.buffer_info()
                    nbytes = len(value) * sizeof(cls._itemtype_)
                    memmove(ptr, addr, nbytes)
                else:
                    for index, item in enumerate(value):
                        ptr[index] = item
            finally:
                _safearray.SafeArrayUnaccessData(pa)
            return pa
        create = classmethod(create)

##        @classmethod
        def create_from_ndarray(cls, value, extra, lBound=0):
            #c:/python25/lib/site-packages/numpy/ctypeslib.py
            numpy = __import__("numpy.ctypeslib")

            # SAFEARRAYs have Fortran order; convert the numpy array if needed
            if not value.flags.f_contiguous:
                value = numpy.array(value, order="F")

            ai = value.__array_interface__
            if ai["version"] != 3:
                raise TypeError("only __array_interface__ version 3 supported")
            if cls._itemtype_ != numpy.ctypeslib._typecodes[ai["typestr"]]:
                raise TypeError("Wrong array item type")

            # For VT_UNKNOWN or VT_DISPATCH, extra must be a pointer to
            # the GUID of the interface.
            #
            # For VT_RECORD, extra must be a pointer to an IRecordInfo
            # describing the record.
            rgsa = (_safearray.SAFEARRAYBOUND * value.ndim)()
            nitems = 1
            for i, d in enumerate(value.shape):
                nitems *= d
                rgsa[i].cElements = d
                rgsa[i].lBound = lBound
            pa = _safearray.SafeArrayCreateEx(cls._vartype_,
                                              value.ndim, # cDims
                                              rgsa, # rgsaBound
                                              extra) # pvExtra
            if not pa:
                if cls._vartype_ == VT_RECORD and extra is None:
                    raise TypeError("Cannot create SAFEARRAY type VT_RECORD without IRecordInfo.")
                # Hm, there may be other reasons why the creation fails...
                raise MemoryError()
            # We now have a POINTER(tagSAFEARRAY) instance which we must cast
            # to the correct type:
            pa = cast(pa, cls)
            # Now, fill the data in:
            ptr = POINTER(cls._itemtype_)() # pointer to the item values
            _safearray.SafeArrayAccessData(pa, byref(ptr))
            try:
                nbytes = nitems * sizeof(cls._itemtype_)
                memmove(ptr, value.ctypes.data, nbytes)
            finally:
                _safearray.SafeArrayUnaccessData(pa)
            return pa
        create_from_ndarray = classmethod(create_from_ndarray)

##        @classmethod
        def from_param(cls, value):
            if not isinstance(value, cls):
                value = cls.create(value, extra)
                value._needsfree = True
            return value
        from_param = classmethod(from_param)

        def __getitem__(self, index):
            # pparray[0] returns the whole array contents.
            if index != 0:
                raise IndexError("Only index 0 allowed")
            return self.unpack()

        def __setitem__(self, index, value):
            # XXX Need this to implement [in, out] safearrays in COM servers!
##            print "__setitem__", index, value
            raise TypeError("Setting items not allowed")

        def __ctypes_from_outparam__(self):
            self._needsfree = True
            return self[0]

        def __del__(self):
            if self._needsfree:
                _safearray.SafeArrayDestroy(self)

        def _get_size(self, dim):
            "Return the number of elements for dimension 'dim'"
            return _safearray.SafeArrayGetUBound(self, dim)+1 - _safearray.SafeArrayGetLBound(self, dim)

        def unpack(self):
            """Unpack a POINTER(SAFEARRAY_...) into a Python tuple."""
            dim = _safearray.SafeArrayGetDim(self)

            if dim == 1:
                num_elements = self._get_size(1)
                return tuple(self._get_elements_raw(num_elements))
            elif dim == 2:
                # get the number of elements in each dimension
                rows, cols = self._get_size(1), self._get_size(2)
                # get all elements
                result = self._get_elements_raw(rows * cols)
                # transpose the result, because it is in VB order
                result = [tuple(result[r::rows]) for r in range(rows)]
                return tuple(result)
            else:
                lowerbounds = [_safearray.SafeArrayGetLBound(self, d) for d in range(1, dim+1)]
                indexes = (c_long * dim)(*lowerbounds)
                upperbounds = [_safearray.SafeArrayGetUBound(self, d) for d in range(1, dim+1)]
                return self._get_row(0, indexes, lowerbounds, upperbounds)

        def _get_elements_raw(self, num_elements):
            """Returns a flat list containing ALL elements in the safearray."""
            from comtypes.automation import VARIANT
            # XXX Not sure this is true:
            # For VT_UNKNOWN and VT_DISPATCH, we should retrieve the
            # interface iid by SafeArrayGetIID().
            ptr = POINTER(self._itemtype_)() # container for the values
            _safearray.SafeArrayAccessData(self, byref(ptr))
            try:
                if self._itemtype_ == VARIANT:
                    return [i.value for i in ptr[:num_elements]]
                elif issubclass(self._itemtype_, POINTER(IUnknown)):
                    iid = _safearray.SafeArrayGetIID(self)
                    itf = com_interface_registry[str(iid)]
                    # COM interface pointers retrieved from array
                    # must be AddRef()'d if non-NULL.
                    elems = ptr[:num_elements]
                    result = []
                    for p in elems:
                        if bool(p):
                            p.AddRef()
                            result.append(p.QueryInterface(itf))
                        else:
                            # return a NULL-interface pointer.
                            result.append(POINTER(itf)())
                    return result
                else:
                    # If the safearray element are NOT native python
                    # objects, the containing safearray must be kept
                    # alive until all the elements are destroyed.
                    if not issubclass(self._itemtype_, Structure):
                        # Creating and returning numpy arrays instead
                        # of Python tuple from a safearray is a lot faster,
                        # but only for large arrays because of a certain overhead.
                        # Also, for backwards compatibility, some clients expect
                        # a Python tuple - so there should be a way to select
                        # what should be returned.  How could that work?
##                        # A hack which would return numpy arrays
##                        # instead of Python lists.  To be effective,
##                        # the result must not converted into a tuple
##                        # in the caller so there must be changes as
##                        # well!
##
##                        # Crude hack to create and attach an
##                        # __array_interface__ property to the
##                        # pointer instance
##                        array_type = ptr._type_ * num_elements
##                        if not hasattr(array_type, "__array_interface__"):
##                            import numpy.ctypeslib
##                            numpy.ctypeslib.prep_array(array_type)
##                        # use the array_type's __array_interface__, ...
##                        aif = array_type.__array_interface__.__get__(ptr)
##                        # overwrite the 'data' member so that it points to the
##                        # address we want to use
##                        aif["data"] = (cast(ptr, c_void_p).value, False)
##                        ptr.__array_interface__ = aif
##                        return numpy.array(ptr, copy=True)
                        return ptr[:num_elements]
                    def keep_safearray(v):
                        v.__keepref = self
                        return v
                    return [keep_safearray(x) for x in ptr[:num_elements]]
            finally:
                _safearray.SafeArrayUnaccessData(self)

        def _get_row(self, dim, indices, lowerbounds, upperbounds):
            # loop over the index of dimension 'dim'
            # we have to restore the index of the dimension we're looping over
            restore = indices[dim]

            result = []
            obj = self._itemtype_()
            pobj = byref(obj)
            if dim+1 == len(indices):
                # It should be faster to lock the array and get a whole row at once?
                # How to calculate the pointer offset?
                for i in range(indices[dim], upperbounds[dim]+1):
                    indices[dim] = i
                    _safearray.SafeArrayGetElement(self, indices, pobj)
                    result.append(obj.value)
            else:
                for i in range(indices[dim], upperbounds[dim]+1):
                    indices[dim] = i
                    result.append(self._get_row(dim+1, indices, lowerbounds, upperbounds))
            indices[dim] = restore
            return tuple(result) # for compatibility with pywin32.

    class _(partial, POINTER(POINTER(sa_type))):

##        @classmethod
        def from_param(cls, value):
            if isinstance(value, cls._type_):
                return byref(value)
            return byref(cls._type_.create(value, extra))
        from_param = classmethod(from_param)

        def __setitem__(self, index, value):
            # create an LP_SAFEARRAY_... instance
            pa = self._type_.create(value, extra)
            # XXX Must we destroy the currently contained data?
            # fill it into self
            super(POINTER(POINTER(sa_type)), self).__setitem__(index, pa)

    return sa_type

Example 119

Project: r2lldb Source File: __init__.py
def rap(debugger, command, result, dict):
	def r2cmd(c):
		print ("_____(%s)___"%c)
		if c == "":
			return
		if c[0:2] == ". ":
			return run_script(c[2:])
		if c[0] == ":":
			return dbg.cmd(c[1:])
		if c == "q":
			print ("STOP")
			rs.stop()
			return "OK"
		elif c[0:7] == "setenv ":
			a = c[7:].strip().split(" ",1)
			return dbg.setenv(a[0],a[1])
		elif c == "env":
			return dbg.cmd("print $environ")
		elif c[0:7] == "dlopen ":
			return dbg.dlopen(a[7:].strip())
		elif c[0:2] == "o ":
			return dbg.cmd("target create %s"%c[2:])
		elif c[0] == "o":
			return "TODO: show target"
		elif c == "objc":
			return dbg.objcListClasses()
		elif c == "run":
			return dbg.cmd("run")
		elif c[0:5] == "call ":
			return dbg.cmd("call "+ c[4:])
		elif c[0:5] == "lldb ":
			return dbg.cmd(c[5:])
		elif c == "dc":
			return dbg.cont()
		elif c == "ds":
			return dbg.cmd("stepi")
		elif c == "dso":
			return dbg.cmd("next") # thread step-over
		elif c == "dbt":
			res = ''
			for a in dbg.frames():
				line = "%d %s %s %s\n"%(a['index'], a['addr'], a['file'], a['meth'])
				res = res + line
			return res
		elif c == "i":
			print "NAME ERR"
			#if dbg.isThumb():
			#	s = s + "e asm.bits=16 # thumb\n"
			# TODO 
			#(lldb) target list
			#Current targets:
			#* target #0: path-to-bin ( arch=i386-apple-ios, platform=ios-simulator, pid=21617, state=stopped )
			try:
				return cmd("target list")
			except:
				return "cmd(target list)\n"
		elif c == "dbc":
			return "TODO: dbc"
		elif c[0:3] == "dt ":
			try:
				args = c[3:].split(' ', 1)
				if len(args)>1:
					if trace.add (args[0], args[1]):
						return "Trace added"
				else:
					if not trace.add(args[0], "?e trace"):
						return "Trace add fail"
			except:
				return "Trace exception"
			return ""
		elif c == "dT":
			return loop.listTracePoints()
		elif c[0:3] == "dT ":
			return loop.setTracePoint(c[4:])
		elif c == "dT?":
			return """Usage:
 dT    list all debugloop traces
 dT-   remove all tracepoints
 dT A  add specific address for tracing
 dTc   run/continue into the debugloop
"""
		elif c == "dTc":
			return loop.runLoop()
		elif c == "quit":
			rs.disconnect()
			del sys.modules["r2lldb"]
			return "Disconnected. Please Quit\n"
		elif c[0:3] == "ls ":
			return dbg.system_ls(c[3:])
		elif c == "ls":
			return dbg.system_ls(".")
		elif c[0:4] == "cat ":
			return dbg.system_cat(c[4:])
		elif c == "cat":
			return "cat: Missing file"
		elif c[0:5] == "head ":
			return dbg.system_cat(c[4:], True)
		elif c == "head":
			return "head: Missing file"
		elif c == "dt":
			return trace.list()
		elif c == "dcta":
			print(s)
			return "Set 0 traces"
		elif c == "dct":
			while True:
				try:
					dbg.cmd("continue")
					pc = dbg.getRegister("pc")
					if pc == '0':
						break
					t = None
					try:
						t = trace.get(pc)
					except:
						pass
					if not t:
						print ("Address not traced",pc)
						break
					rs.system(t)
				except e:
					print(e)
					traceback.print_stack()
					return "Exception happens"
			print ("Trace Done")
			return "Trace Done"
		elif c == "dks":
			dbg.stop()
		elif c == "is":
			syms = dbg.symbols()
			symbols = ""
			for a in syms:
				name = a['name']
				# XXX: filter flag name
				name = name.replace("'",'_')
				name = name.replace(' ','_')
				name = name.replace(' ','_')
				name = name.replace('-','_')
				name = name.replace('~','_')
				name = name.replace('+','_')
				name = name.replace('$','_')
				name = name.replace('&','_')
				name = name.replace('@','_')
				name = name.replace('|','_')
				name = name.replace('%','_')
				name = name.replace(';','_')
				name = name.replace('!','_')
				name = name.replace('`','_')
				name = name.replace(',','_')
				name = name.replace('/','_')
				name = name.replace('*','_')
				name = name.replace('(','_')
				name = name.replace(')','_')
				name = name.replace('[','_')
				name = name.replace(']','_')
				name = name.replace('<','_')
				name = name.replace('>','_')
				# TODO: many symbols are defined multiple times
				if name[0:2]!='0x':
					line = "f sym.%s = %s\n"%(name,a['addr'])
					symbols = symbols + line
			return symbols
		elif c == "db-*":
			return dbg.bp_clear()
		elif c[0:5] == "db 0x":
			return dbg.bp_addr(c[3:])
		elif c[0:3] == "db ":
			return dbg.bp_symbol(c[3:])
		elif c[0:4] == "dbo ":
			a = c[4:].strip().split(' ')
			if len(a) != 2:
				return "Usage: dbo OBJCLASS OBJCMETHOD"
			return dbg.bp_obj(a[0], a[1])
		elif c == "db":
			bps = dbg.bp_list()
			n = 0
			out = ''
			for a in bps:
				line = ("%d  %s  %s\n"%(n, a['type'], a[a['type']]))
				n = n + 1
				out = out + line
			#print(dbg.bp_list())
			return out + "\nFound %d breakpoints"%n
			#dbg.cmd("break list")
		elif c == "dm?":
			return """Usage: dm"
			dm         list maps
			dm [addr]  show address information
			"""
		elif c == "dm":
			return dbg.cmd('image list')
		elif c[0:3] == "dm ":
			return dbg.cmd('image lookup --address %s'%c[4:])
		elif c == "dfv":
			return dbg.cmd("fr v") # -a
		elif c == "dcue":
			return dbg.run_to_entry()
		elif c == "dr=":
			try:
				s = "" + dbg.cmd("reg read")
				nl = s.find("\n")
				if nl != -1:
					s = s[nl+1:]
					res = ""
					col = 0
					while True:
						nl = s.find("\n")
						if nl == -1:
							break
						#s = s.replace("\n", "") + "\n"
						line = s[0:nl]
						col = col + 1
						res = res + line
						if col>1:
							col = 0
							res = res + "\n"
						s = s[nl+1:]
					s = res
				#s = s.split("\n").join(" ")
				return s #s.split("\n").join(" ") + "\n"
			except:
				print "ERRER"
				return "ERROR"
		elif c == "dr":
			return dbg.cmd('reg read')
		elif c == "dra":
			return dbg.cmd('reg read -a')
		elif c == "dr*":
			regs = dbg.cmd("reg read").strip().split("\n")
			res = ""
			for a in regs:
				a = a.strip()
				if a.find(" = ") == -1:
					next
				mo = re.match( r'(.*) = ([^ ]*)', a , re.M|re.I)
				if mo:
					line = "f %s = %s\n"%(mo.group(1), mo.group(2))
					line = line + "ar %s = %s\n"%(mo.group(1), mo.group(2))
					res = res + line
			#regs = dbg.getRegister("pc")
			return res
		elif c == "?":
			return """Usage: =![cmd] ...       # r2lldb integration
=!?                      # show r2lldb's help (this one)
=!help                   # show lldb's help
=!i                      # target information
=!is                     # list symbols
=!dfv                    # show frame variables (arguments + locals)
=!ls [path]              # list files from remote device
=!cat [path]             # show contents of file
=!call (int)getuid()     # inject and run code in target process
=!lldb ..command..       # run lldb command
=!up,down,list           # lldb's command to list select frames and show source
=!dks                    # stop debugged process
=!dm                     # show maps (image list)
=!dr                     # show registers
=!dra                    # show all registers
=!dr*                    # "" "" in r2 commands
=!dr-*                   # remove all breakpoints
=!db                     # list breakpoints
=!db 0x12924             # set breakpoint at address
=!db objc_msgSend        # set breakpoint on symbol
=!dbo NSString init:     # set objc breakpoint
=!dbt                    # show backtrace
=!ds                     # step
=!dcue			 # continue until entrypoint
=!dso                    # step over
=!dt                     # list all trace points
=!dt 0x804040 =!dr       # add tracepoint for this address
=!dc                     # continue
=!dct                    # continue with tracing
=!env                    # show process environment
=!objc                   # list all objc classes
=!setenv k v             # set variable in target process
=!dlopen /path/to/lib    # dlopen lib (libr2.so, frida?)
=!quit                   # quit r2lldb server loop
"""
		return None
	port = int(command)
	rs = RapServer()
	def __read(sz):
		return dbg.read(rs.offset, sz)
	def __write(buf):
		return dbg.write(rs.offset, buf)
	def __seek(off,when):
		if when == 2:
			return 0xffffffffffffffff
		rs.offset = off
		return dbg.seek(off, when)
	def __cmd(c):
		c = c[0:len(c)-1].strip()
		res = r2cmd(c)
		if res:
			return res
		return dbg.cmd(c)
	rs.handle_system = __cmd
	rs.handle_cmd = __cmd
	rs.handle_read = __read
	rs.handle_write = __write
	rs.handle_seek = __seek
	rs.listen_tcp (port)

Example 120

Project: solpy Source File: expedite.py
def string_notes(system, run=0.0, station_class=3):
    """page 5"""

    name, usaf = geo.closest_usaf(geo.zip_coordinates(system.zipcode), \
            station_class)
    mintemp = eere.minimum(usaf)
    twopercent_temp = eere.twopercent(usaf)
    ac_kva_rated = 0.0
    dc_rated = 0.0
    ac_kw = 0.0
    for i in system.shape:
        dc_rated += i.array.p_max
        try:
            if i.phase == 1:
                ac_kva_rated += i.current * i.ac_voltage
            else:
                ac_kva_rated += i.phase * i.current * i.ac_voltage / 3**.5
        except Exception:
            ac_kva_rated += i.p_aco
        ac_kw += i.p_aco
    notes = []
    notes.append("%s KVA AC RATED" % round(ac_kva_rated/1000.0, 2))
    notes.append("%s KW AC RATED" % round(ac_kw/1000.0, 2))
    notes.append("%s KW DC RATED" % round(dc_rated/1000.0, 2))
    #BUG: This doesn't work for unbalanced 3 phase
    if system.phase == 1:
        a_ac = round(ac_kva_rated/i.ac_voltage, 1)
    else:
        a_ac = round(ac_kva_rated/i.ac_voltage/3**.5, 1)
    notes.append("System AC Output Current: %s A" % a_ac)

    notes.append("Nominal AC Voltage: %s V" % i.ac_voltage)
    notes.append("")
    notes.append("Minimum Temperature: %s C" % mintemp)
    notes.append("2 Percent Max Temperature: %s C" % twopercent_temp)
    notes.append("Weather Source: %s %s" % (name, usaf))
    notes.append("")
    d_inverters, d_panels = system.describe()
    a_max = 0
    for i in system.shape:
        module_name = i.array.dump()['panel']
        if d_panels.has_key(module_name):
            module = modules.Module(module_name)
            notes.append("PV Module Ratings @ STC")
            notes.append("Module Make: %s" % module.make)
            notes.append("Module Model: %s" % module.model)
            notes.append("Quantity: %s" % d_panels[module_name])
            notes.append("Max Power-Point Current (Imp): %s A" % module.i_mpp)
            notes.append("Max Power-Point Voltage (Vmp): %s V" % module.v_mpp)
            notes.append("Open-Circuit Voltage (Voc): %s V" % module.v_oc)
            notes.append("Short-Circuit Current (Isc): %s A" % module.i_sc)
            notes.append("Maximum Power (Pmax): %s W" % round(module.p_max, 1))

            notes.append("")
            d_panels.pop(module_name)
        if d_inverters.has_key(i.model):
            notes.append("Inverter Make: %s" % i.make)
            notes.append("Inverter Model: %s" % i.model)
            notes.append("Quantity: %s" % d_inverters[i.model])
            notes.append("Max Power: %s KW" % round(i.p_aco/1000.0, 1))
            #this is hack... This should be calculated based upon power cores
            if hasattr(i, 'current'):
                notes.append("Max AC Current: %s A" % round(i.current, 1))
            elif i.ac_voltage == 480:
                notes.append("Max AC Current: %s A" % \
                        round(i.p_aco*1.0/i.ac_voltage/3**.5, 1))
            else:
                notes.append("Max AC Current: %s A" % \
                        round(i.p_aco*1.0/i.ac_voltage, 1))
            #greater than 1 in parallel
            if i.array.mcount() > 1:
                notes.append("DC Operating Current: %s A" % \
                        round(i.array.i_mpp(), 1))
                notes.append("DC Short Circuit Current: %s A" % \
                        round(i.array.i_sc(), 1))
            #greater than 1 in series
            if i.array.mcount() > 1:
                notes.append("DC Operating Voltage: %s V" % \
                        round(i.array.v_dc(), 1))
                notes.append("System Max DC Voltage: %s V" % \
                        round(i.array.v_max(mintemp), 1))
                if i.array.v_max(mintemp) > 600:
                    logger.warning("WARNING: Array exceeds 600V DC")
                notes.append("Pnom Ratio: %s" % \
                        round((i.array.p_max/i.p_aco), 2))
                if (i.array.v_dc(twopercent_temp) * .9) < i.mppt_low:
                    logger.warning("WARNING: " \
                            "Array IV Knee drops out of Inverter range")
                if (i.array.p_max/i.p_aco) < 1.1:
                    logger.warning("WARNING: Array potentially undersized")
            notes.append("")
            d_inverters.pop(i.model)
        if i.array.v_max(mintemp) > a_max:
            a_max = i.array.v_max(mintemp)

    notes.append("Array Azimuth: %s Degrees" % system.azimuth)
    notes.append("Array Tilt: %s Degrees" % system.tilt)
    sols_9 = system.solstice(9)
    sols_15 = system.solstice(15)
    notes.append("December 21 9:00 AM Sun Azimuth: %s Degrees" % \
            (round(degrees(sols_9[1]), 1)))
    notes.append("December 21 9:00 AM Sun Altitude: %s Degrees" % \
            (round(degrees(sols_9[0]), 1)))
    notes.append("December 21 3:00 PM Sun Azimuth: %s Degrees" % \
            (round(degrees(sols_15[1]), 1)))
    notes.append("December 21 3:00 PM Sun Altitude: %s Degrees" % \
            (round(degrees(sols_9[0]), 1)))
    if 'geomag' in sys.modules:
        notes.append("Magnetic declination: %s Degrees" % \
                round(geomag.declination(dlat=system.place[0], \
                dlon=system.place[1])))
    notes.append("Minimum Row space ratio: %s" % \
            round(system.min_row_space(1.0), 2))
    if __name__ == '__main__':
        print "\n".join(notes)
    else:
        logger.info("Plant Details:\n" + "\n".join(notes))

    print ""
    print "Minimum Bundle"
    min_c = vd.vd(a_ac, 5, verbose=False)
    try:
        ee.assemble(min_c, a_ac, conduit='STEEL')
        if run > 0:
            print "Long Run"
            min_c = vd.vd(a_ac, run, v=i.ac_voltage, t_amb=15, pf=.95, \
                    material='AL', verbose=False)
            ee.assemble(min_c, a_ac, conduit='PVC')
    except:
        print "Warning: Multiple sets of conductors"
    return notes

Example 121

Project: OpenUpgrade Source File: loading.py
Function: load_module_graph
def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules=None, report=None, upg_registry=None):
    """Migrates+Updates or Installs all module nodes from ``graph``
       :param graph: graph of module nodes to load
       :param status: deprecated parameter, unused, left to avoid changing signature in 8.0
       :param perform_checks: whether module descriptors should be checked for validity (prints warnings
                              for same cases)
       :param skip_modules: optional list of module names (packages) which have previously been loaded and can be skipped
       :return: list of modules that were installed or updated
    """
    def load_test(module_name, idref, mode):
        cr.commit()
        try:
            _load_data(cr, module_name, idref, mode, 'test')
            return True
        except Exception:
            _test_logger.exception(
                'module %s: an exception occurred in a test', module_name)
            return False
        finally:
            if tools.config.options['test_commit']:
                cr.commit()
            else:
                cr.rollback()
                # avoid keeping stale xml_id, etc. in cache 
                openerp.modules.registry.RegistryManager.clear_caches(cr.dbname)


    def _get_files_of_kind(kind):
        if kind == 'demo':
            kind = ['demo_xml', 'demo']
        elif kind == 'data':
            kind = ['init_xml', 'update_xml', 'data']
        if isinstance(kind, str):
            kind = [kind]
        files = []
        for k in kind:
            for f in package.data[k]:
                files.append(f)
                if k.endswith('_xml') and not (k == 'init_xml' and not f.endswith('.xml')):
                    # init_xml, update_xml and demo_xml are deprecated except
                    # for the case of init_xml with yaml, csv and sql files as
                    # we can't specify noupdate for those file.
                    correct_key = 'demo' if k.count('demo') else 'data'
                    _logger.warning(
                        "module %s: key '%s' is deprecated in favor of '%s' for file '%s'.",
                        package.name, k, correct_key, f
                    )
        return files

    def _load_data(cr, module_name, idref, mode, kind):
        """

        kind: data, demo, test, init_xml, update_xml, demo_xml.

        noupdate is False, unless it is demo data or it is csv data in
        init mode.

        """
        try:
            if kind in ('demo', 'test'):
                threading.currentThread().testing = True
            for filename in _get_files_of_kind(kind):
                _logger.info("loading %s/%s", module_name, filename)
                noupdate = False
                if kind in ('demo', 'demo_xml') or (filename.endswith('.csv') and kind in ('init', 'init_xml')):
                    noupdate = True
                tools.convert_file(cr, module_name, filename, idref, mode, noupdate, kind, report)
        finally:
            if kind in ('demo', 'test'):
                threading.currentThread().testing = False

    if status is None:
        status = {}

    if skip_modules is None:
        skip_modules = []

    processed_modules = []
    loaded_modules = []
    registry = openerp.registry(cr.dbname)
    migrations = openerp.modules.migration.MigrationManager(cr, graph)
    _logger.info('loading %d modules...', len(graph))

    registry.clear_manual_fields()

    # suppress commits to have the upgrade of one module in just one transaction
    cr.commit_org = cr.commit
    cr.commit = lambda *args: None
    cr.rollback_org = cr.rollback
    cr.rollback = lambda *args: None
    openerp.osv.fields.set_migration_cursor(cr)

    # register, instantiate and initialize models for each modules
    t0 = time.time()
    t0_sql = openerp.sql_db.sql_counter

    for index, package in enumerate(graph):
        module_name = package.name
        module_id = package.id

        if module_name in skip_modules or module_name in loaded_modules:
            continue

        migrations.migrate_module(package, 'pre')
        load_openerp_module(package.name)

        new_install = package.state == 'to install'
        if new_install:
            py_module = sys.modules['openerp.addons.%s' % (module_name,)]
            pre_init = package.info.get('pre_init_hook')
            if pre_init:
                getattr(py_module, pre_init)(cr)

        models = registry.load(cr, package)

        loaded_modules.append(package.name)
        if hasattr(package, 'init') or hasattr(package, 'update') or package.state in ('to install', 'to upgrade'):
            registry.setup_models(cr, partial=True)
            # OpenUpgrade: add this module's models to the registry
            local_registry = {}
            for model in models:
                if not model._auto:
                    continue
                openupgrade_loading.log_model(model, local_registry)
            openupgrade_loading.compare_registries(
                cr, package.name, upg_registry, local_registry)
            init_module_models(cr, package.name, models)

        idref = {}

        mode = 'update'
        if hasattr(package, 'init') or package.state == 'to install':
            mode = 'init'

        if hasattr(package, 'init') or hasattr(package, 'update') or package.state in ('to install', 'to upgrade'):
            # Can't put this line out of the loop: ir.module.module will be
            # registered by init_module_models() above.
            modobj = registry['ir.module.module']

            if perform_checks:
                modobj.check(cr, SUPERUSER_ID, [module_id])

            if package.state=='to upgrade':
                # upgrading the module information
                modobj.write(cr, SUPERUSER_ID, [module_id], modobj.get_values_from_terp(package.data))
            _load_data(cr, module_name, idref, mode, kind='data')
            has_demo = hasattr(package, 'demo') or (package.dbdemo and package.state != 'installed')
            if has_demo:
                _load_data(cr, module_name, idref, mode, kind='demo')
                cr.execute('update ir_module_module set demo=%s where id=%s', (True, module_id))
                modobj.invalidate_cache(cr, SUPERUSER_ID, ['demo'], [module_id])

            # OpenUpgrade: add 'try' block for logging exceptions
            # as errors in post scripts seem to be dropped
            try:
                migrations.migrate_module(package, 'post')
            except Exception as exc:
                _logger.error('Error executing post migration script for module %s: %s',
                              package, exc)
                raise

            # Update translations for all installed languages
            modobj.update_translations(cr, SUPERUSER_ID, [module_id], None, {'overwrite': openerp.tools.config["overwrite_existing_translations"]})

            registry._init_modules.add(package.name)

            if new_install:
                post_init = package.info.get('post_init_hook')
                if post_init:
                    getattr(py_module, post_init)(cr, registry)

            # validate all the views at a whole
            registry['ir.ui.view']._validate_module_views(cr, SUPERUSER_ID, module_name)

            if has_demo:
                # launch tests only in demo mode, allowing tests to use demo data.
                if tools.config.options['test_enable']:
                    # Yamel test
                    report.record_result(load_test(module_name, idref, mode))
                    # Python tests
                    ir_http = registry['ir.http']
                    if hasattr(ir_http, '_routing_map'):
                        # Force routing map to be rebuilt between each module test suite
                        del(ir_http._routing_map)
                    report.record_result(openerp.modules.module.run_unit_tests(module_name, cr.dbname))

            processed_modules.append(package.name)

            ver = adapt_version(package.data['version'])
            # Set new modules and dependencies
            modobj.write(cr, SUPERUSER_ID, [module_id], {'state': 'installed', 'latest_version': ver})

            package.state = 'installed'
            for kind in ('init', 'demo', 'update'):
                if hasattr(package, kind):
                    delattr(package, kind)

        registry._init_modules.add(package.name)
        cr.commit_org()

        # OpenUpgrade edit start:
        # if there's a tests directory, run those if tests are enabled
        tests_dir = os.path.join(
            openerp.modules.module.get_module_path(package.name),
            'migrations',
            adapt_version(package.data['version']),
            'tests',
        )
        # check for an environment variable because we don't want to mess
        # with odoo's config.py, but we also don't want to run existing
        # tests
        if os.environ.get('OPENUPGRADE_TESTS') and os.path.exists(
            tests_dir
        ):
            import unittest
            threading.currentThread().testing = True
            tests = unittest.defaultTestLoader.discover(tests_dir, top_level_dir=tests_dir)
            report.record_result(
                unittest.TextTestRunner(
                    verbosity=2,
                    stream=openerp.modules.module.TestStream(package.name),
                ).run(tests)
                .wasSuccessful()
            )
            threading.currentThread().testing = False
        # OpenUpgrade edit end

    _logger.log(25, "%s modules loaded in %.2fs, %s queries", len(graph), time.time() - t0, openerp.sql_db.sql_counter - t0_sql)

    registry.clear_manual_fields()

    cr.commit = cr.commit_org
    cr.commit()
    openerp.osv.fields.set_migration_cursor()

    return loaded_modules, processed_modules

Example 122

Project: OpenUpgrade Source File: loading.py
def load_modules(db, force_demo=False, status=None, update_module=False):
    initialize_sys_path()

    force = []
    if force_demo:
        force.append('demo')

    upg_registry = {}
    cr = db.cursor()
    try:
        if not openerp.modules.db.is_initialized(cr):
            _logger.info("init db")
            openerp.modules.db.initialize(cr)
            update_module = True # process auto-installed modules
            tools.config["init"]["all"] = 1
            tools.config['update']['all'] = 1
            if not tools.config['without_demo']:
                tools.config["demo"]['all'] = 1

        # This is a brand new registry, just created in
        # openerp.modules.registry.RegistryManager.new().
        registry = openerp.registry(cr.dbname)

        if 'base' in tools.config['update'] or 'all' in tools.config['update']:
            cr.execute("update ir_module_module set state=%s where name=%s and state=%s", ('to upgrade', 'base', 'installed'))

        # STEP 1: LOAD BASE (must be done before module dependencies can be computed for later steps) 
        graph = openerp.modules.graph.Graph()
        graph.add_module(cr, 'base', force)
        if not graph:
            _logger.critical('module base cannot be loaded! (hint: verify addons-path)')
            raise ImportError('Module `base` cannot be loaded! (hint: verify addons-path)')

        # processed_modules: for cleanup step after install
        # loaded_modules: to avoid double loading
        report = registry._assertion_report
        loaded_modules, processed_modules = load_module_graph(cr, graph, status, perform_checks=update_module, report=report, upg_registry=upg_registry)

        load_lang = tools.config.pop('load_language')
        if load_lang or update_module:
            # some base models are used below, so make sure they are set up
            registry.setup_models(cr, partial=True)

        if load_lang:
            for lang in load_lang.split(','):
                tools.load_language(cr, lang)

        # STEP 2: Mark other modules to be loaded/updated
        if update_module:
            modobj = registry['ir.module.module']
            if ('base' in tools.config['init']) or ('base' in tools.config['update']):
                _logger.info('updating modules list')
                modobj.update_list(cr, SUPERUSER_ID)

            _check_module_names(cr, itertools.chain(tools.config['init'].keys(), tools.config['update'].keys()))

            mods = [k for k in tools.config['init'] if tools.config['init'][k]]
            if mods:
                ids = modobj.search(cr, SUPERUSER_ID, ['&', ('state', '=', 'uninstalled'), ('name', 'in', mods)])
                if ids:
                    modobj.button_install(cr, SUPERUSER_ID, ids)

            mods = [k for k in tools.config['update'] if tools.config['update'][k]]
            if mods:
                # OpenUpgrade: in standard Odoo, '--update all' just means:
                # '--update base + upward (installed) dependencies. This breaks
                # the chain when new glue modules are encountered.
                # E.g. purchase in 8.0 depends on stock_account and report,
                # both of which are new. They may be installed, but purchase as
                # an upward dependency is not selected for upgrade.
                # Therefore, explicitely select all installed modules for
                # upgrading in OpenUpgrade.
                domain = [('state', '=', 'installed')]
                if 'all' not in mods:
                    domain.append(('name', 'in', mods))
                ids = modobj.search(cr, SUPERUSER_ID, domain)
                if ids:
                    modobj.button_upgrade(cr, SUPERUSER_ID, ids)

            cr.execute("update ir_module_module set state=%s where name=%s", ('installed', 'base'))
            modobj.invalidate_cache(cr, SUPERUSER_ID, ['state'])


        # STEP 3: Load marked modules (skipping base which was done in STEP 1)
        # IMPORTANT: this is done in two parts, first loading all installed or
        #            partially installed modules (i.e. installed/to upgrade), to
        #            offer a consistent system to the second part: installing
        #            newly selected modules.
        #            We include the modules 'to remove' in the first step, because
        #            they are part of the "currently installed" modules. They will
        #            be dropped in STEP 6 later, before restarting the loading
        #            process.
        # IMPORTANT 2: We have to loop here until all relevant modules have been
        #              processed, because in some rare cases the dependencies have
        #              changed, and modules that depend on an uninstalled module
        #              will not be processed on the first pass.
        #              It's especially useful for migrations.
        previously_processed = -1
        while previously_processed < len(processed_modules):
            previously_processed = len(processed_modules)
            processed_modules += load_marked_modules(cr, graph,
                ['installed', 'to upgrade', 'to remove'],
                force, status, report, loaded_modules, update_module, upg_registry)
            if update_module:
                processed_modules += load_marked_modules(cr, graph,
                    ['to install'], force, status, report,
                    loaded_modules, update_module, upg_registry)

        registry.setup_models(cr)

        # STEP 4: Finish and cleanup installations
        if processed_modules:
            cr.execute("""select model,name from ir_model where id NOT IN (select distinct model_id from ir_model_access)""")
            for (model, name) in cr.fetchall():
                if model in registry and not registry[model].is_transient() and not isinstance(registry[model], openerp.osv.orm.AbstractModel):
                    _logger.warning('The model %s has no access rules, consider adding one. E.g. access_%s,access_%s,model_%s,,1,0,0,0',
                        model, model.replace('.', '_'), model.replace('.', '_'), model.replace('.', '_'))

            # Temporary warning while we remove access rights on osv_memory objects, as they have
            # been replaced by owner-only access rights
            cr.execute("""select distinct mod.model, mod.name from ir_model_access acc, ir_model mod where acc.model_id = mod.id""")
            for (model, name) in cr.fetchall():
                if model in registry and registry[model].is_transient():
                    _logger.warning('The transient model %s (%s) should not have explicit access rules!', model, name)

            cr.execute("SELECT model from ir_model")
            for (model,) in cr.fetchall():
                if model in registry:
                    registry[model]._check_removed_columns(cr, log=True)
                else:
                    _logger.warning("Model %s is declared but cannot be loaded! (Perhaps a module was partially removed or renamed)", model)

            # Cleanup orphan records
            registry['ir.model.data']._process_end(cr, SUPERUSER_ID, processed_modules)

            # OpenUpgrade: call deferred migration steps
            if update_module:
                deferred_90.migrate_deferred(cr, registry)

        for kind in ('init', 'demo', 'update'):
            tools.config[kind] = {}

        cr.commit()

        # STEP 5: Uninstall modules to remove
        if update_module:
            # Remove records referenced from ir_model_data for modules to be
            # removed (and removed the references from ir_model_data).
            cr.execute("SELECT name, id FROM ir_module_module WHERE state=%s", ('to remove',))
            modules_to_remove = dict(cr.fetchall())
            if modules_to_remove:
                pkgs = reversed([p for p in graph if p.name in modules_to_remove])
                for pkg in pkgs:
                    uninstall_hook = pkg.info.get('uninstall_hook')
                    if uninstall_hook:
                        py_module = sys.modules['openerp.addons.%s' % (pkg.name,)]
                        getattr(py_module, uninstall_hook)(cr, registry)

                registry['ir.module.module'].module_uninstall(cr, SUPERUSER_ID, modules_to_remove.values())
                # Recursive reload, should only happen once, because there should be no
                # modules to remove next time
                cr.commit()
                _logger.info('Reloading registry once more after uninstalling modules')
                openerp.api.Environment.reset()
                return openerp.modules.registry.RegistryManager.new(cr.dbname, force_demo, status, update_module)

        # STEP 6: verify custom views on every model
        if update_module:
            Views = registry['ir.ui.view']
            custom_view_test = True
            for model in registry.models.keys():
                if not Views._validate_custom_views(cr, SUPERUSER_ID, model):
                    custom_view_test = False
                    _logger.error('invalid custom view(s) for model %s', model)
            report.record_result(custom_view_test)

        if report.failures:
            _logger.error('At least one test failed when loading the modules.')
        else:
            _logger.info('Modules loaded.')

        # STEP 8: call _register_hook on every model
        for model in registry.models.values():
            model._register_hook(cr)

        # STEP 9: Run the post-install tests
        cr.commit()

        t0 = time.time()
        t0_sql = openerp.sql_db.sql_counter
        if openerp.tools.config['test_enable']:
            if update_module:
                cr.execute("SELECT name FROM ir_module_module WHERE state='installed' and name = ANY(%s)", (processed_modules,))
            else:
                cr.execute("SELECT name FROM ir_module_module WHERE state='installed'")
            for module_name in cr.fetchall():
                report.record_result(openerp.modules.module.run_unit_tests(module_name[0], cr.dbname, position=runs_post_install))
            _logger.log(25, "All post-tested in %.2fs, %s queries", time.time() - t0, openerp.sql_db.sql_counter - t0_sql)
    finally:
        cr.close()

Example 123

Project: networking-calico Source File: lib.py
Function: setup_eventlet
    def setUp_eventlet(self):
        """setUp_eventlet

        Setup to intercept sleep calls made by the code under test, and hence
        to (i) control when those expire, and (ii) allow time to appear to pass
        (to the code under test) without actually having to wait for that time.
        """
        # Reset the simulated time (in seconds) that has passed since the
        # beginning of the test.
        self.current_time = 0

        # Make time.time() return current_time.
        self.old_time = sys.modules['time'].time
        sys.modules['time'].time = lambda: self.current_time

        # Reset the dict of current sleepers.  In each dict entry, the key is
        # an eventlet.Queue object and the value is the time at which the sleep
        # should complete.
        self.sleepers = {}

        # Reset the list of spawned eventlet threads.
        self.threads = []

        # Replacement for eventlet.sleep: sleep for some simulated passage of
        # time (as directed by simulated_time_advance), instead of for real
        # elapsed time.
        def simulated_time_sleep(secs=None):
            if secs is None:
                # Thread just wants to yield to any other waiting thread.
                self.give_way()
                return
            # Create a new queue.
            queue = eventlet.Queue(1)
            queue.stack = inspect.stack()[1][3]

            # Add it to the dict of sleepers, together with the waking up time.
            self.sleepers[queue] = self.current_time + secs

            _log.info("T=%s: %s: Start sleep for %ss until T=%s",
                      self.current_time,
                      queue.stack,
                      secs,
                      self.sleepers[queue])

            # Do a zero time real sleep, to allow other threads to run.
            self.real_eventlet_sleep(REAL_EVENTLET_SLEEP_TIME)

            # Block until something is posted to the queue.
            queue.get(True)

            # Wake up.
            return None

        # Replacement for eventlet.spawn: track spawned threads so that we can
        # kill them all when a test case ends.
        def simulated_spawn(*args):

            # Do the real spawn.
            thread = self.real_eventlet_spawn(*args)

            # Remember this thread.
            self.threads.append(thread)

            # Also return it.
            return thread

        def simulated_spawn_after(secs, fn, *args):
            def sleep_then_run():
                simulated_time_sleep(secs)
                fn(*args)

            return simulated_spawn(sleep_then_run)

        # Hook sleeping.
        self.real_eventlet_sleep = eventlet.sleep
        eventlet.sleep = simulated_time_sleep

        # Similarly hook spawning.
        self.real_eventlet_spawn = eventlet.spawn
        eventlet.spawn = simulated_spawn

        self.real_eventlet_spawn_after = eventlet.spawn_after
        eventlet.spawn_after = simulated_spawn_after

Example 124

Project: ryu Source File: test_ofctl.py
def _add_tests():
    _ofp_vers = {
        'of10': 0x01,
        'of12': 0x03,
        'of13': 0x04,
        'of14': 0x05,
        'of15': 0x06,
    }

    _test_cases = {
        'of10': [
            {
                'method': ofctl_v1_0.mod_flow_entry,
                'request': '1-2-ofp_flow_mod.packet.json',
                'reply': None
            },
        ],
        'of12': [
            {
                'method': ofctl_v1_2.get_desc_stats,
                'request': '3-24-ofp_desc_stats_request.packet.json',
                'reply': '3-0-ofp_desc_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_queue_stats,
                'request': '3-37-ofp_queue_stats_request.packet.json',
                'reply': '3-38-ofp_queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_queue_stats,
                'request': 'lib-ofctl-ofp_queue_stats_request.packet1.json',
                'reply': '3-38-ofp_queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_queue_stats,
                'request': 'lib-ofctl-ofp_queue_stats_request.packet2.json',
                'reply': '3-38-ofp_queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_queue_stats,
                'request': 'lib-ofctl-ofp_queue_stats_request.packet3.json',
                'reply': '3-38-ofp_queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_queue_config,
                'request': '3-35-ofp_queue_get_config_request.packet.json',
                'reply': '3-36-ofp_queue_get_config_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_queue_config,
                'request': 'lib-ofctl-ofp_queue_get_config_request.packet.json',
                'reply': '3-36-ofp_queue_get_config_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_flow_stats,
                'request': '3-11-ofp_flow_stats_request.packet.json',
                'reply': '3-12-ofp_flow_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_aggregate_flow_stats,
                'request': '3-25-ofp_aggregate_stats_request.packet.json',
                'reply': '3-26-ofp_aggregate_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_table_stats,
                'request': '3-27-ofp_table_stats_request.packet.json',
                'reply': '3-28-ofp_table_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_port_stats,
                'request': '3-29-ofp_port_stats_request.packet.json',
                'reply': '3-30-ofp_port_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_port_stats,
                'request': 'lib-ofctl-ofp_port_stats_request.packet.json',
                'reply': '3-30-ofp_port_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_group_stats,
                'request': '3-61-ofp_group_stats_request.packet.json',
                'reply': '3-62-ofp_group_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_group_stats,
                'request': 'lib-ofctl-ofp_group_stats_request.packet.json',
                'reply': '3-62-ofp_group_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_group_features,
                'request': '3-31-ofp_group_features_stats_request.packet.json',
                'reply': '3-32-ofp_group_features_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.get_group_desc,
                'request': '3-33-ofp_group_desc_stats_request.packet.json',
                'reply': '3-34-ofp_group_desc_stats_reply.packet.json'
            },
            # In OpenFlow 1.2, ofp_port_desc is not defined.
            # We use ofp_features_request to get ports description instead.
            {
                'method': ofctl_v1_2.get_port_desc,
                'request': '3-5-ofp_features_request.packet.json',
                'reply': '3-6-ofp_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_2.mod_flow_entry,
                'request': '3-2-ofp_flow_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_2.mod_group_entry,
                'request': '3-21-ofp_group_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_2.mod_port_behavior,
                'request': '3-22-ofp_port_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_2.send_experimenter,
                'request': '3-16-ofp_experimenter.packet.json',
                'reply': None
            },
        ],
        'of13': [
            {
                'method': ofctl_v1_3.get_desc_stats,
                'request': '4-24-ofp_desc_request.packet.json',
                'reply': '4-0-ofp_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_queue_stats,
                'request': '4-37-ofp_queue_stats_request.packet.json',
                'reply': '4-38-ofp_queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_queue_stats,
                'request': 'lib-ofctl-ofp_queue_stats_request.packet1.json',
                'reply': '4-38-ofp_queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_queue_stats,
                'request': 'lib-ofctl-ofp_queue_stats_request.packet2.json',
                'reply': '4-38-ofp_queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_queue_stats,
                'request': 'lib-ofctl-ofp_queue_stats_request.packet3.json',
                'reply': '4-38-ofp_queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_queue_config,
                'request': '4-35-ofp_queue_get_config_request.packet.json',
                'reply': '4-36-ofp_queue_get_config_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_queue_config,
                'request': 'lib-ofctl-ofp_queue_get_config_request.packet.json',
                'reply': '4-36-ofp_queue_get_config_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_flow_stats,
                'request': '4-11-ofp_flow_stats_request.packet.json',
                'reply': '4-12-ofp_flow_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_aggregate_flow_stats,
                'request': '4-25-ofp_aggregate_stats_request.packet.json',
                'reply': '4-26-ofp_aggregate_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_table_stats,
                'request': '4-27-ofp_table_stats_request.packet.json',
                'reply': '4-28-ofp_table_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_table_features,
                'request': 'lib-ofctl-ofp_table_features_request.packet.json',
                'reply': '4-56-ofp_table_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_port_stats,
                'request': '4-29-ofp_port_stats_request.packet.json',
                'reply': '4-30-ofp_port_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_port_stats,
                'request': 'lib-ofctl-ofp_port_stats_request.packet.json',
                'reply': '4-30-ofp_port_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_meter_stats,
                'request': '4-49-ofp_meter_stats_request.packet.json',
                'reply': '4-50-ofp_meter_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_meter_stats,
                'request': 'lib-ofctl-ofp_meter_stats_request.packet.json',
                'reply': '4-50-ofp_meter_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_meter_features,
                'request': '4-51-ofp_meter_features_request.packet.json',
                'reply': '4-52-ofp_meter_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_meter_config,
                'request': '4-47-ofp_meter_config_request.packet.json',
                'reply': '4-48-ofp_meter_config_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_meter_config,
                'request': 'lib-ofctl-ofp_meter_config_request.packet.json',
                'reply': '4-48-ofp_meter_config_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_group_stats,
                'request': '4-57-ofp_group_stats_request.packet.json',
                'reply': '4-58-ofp_group_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_group_stats,
                'request': 'lib-ofctl-ofp_group_stats_request.packet.json',
                'reply': '4-58-ofp_group_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_group_features,
                'request': '4-31-ofp_group_features_request.packet.json',
                'reply': '4-32-ofp_group_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_group_desc,
                'request': '4-33-ofp_group_desc_request.packet.json',
                'reply': '4-34-ofp_group_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.get_port_desc,
                'request': '4-53-ofp_port_desc_request.packet.json',
                'reply': '4-54-ofp_port_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_3.mod_flow_entry,
                'request': '4-2-ofp_flow_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_3.mod_meter_entry,
                'request': '4-45-ofp_meter_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_3.mod_group_entry,
                'request': '4-21-ofp_group_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_3.mod_port_behavior,
                'request': '4-22-ofp_port_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_3.send_experimenter,
                'request': '4-16-ofp_experimenter.packet.json',
                'reply': None
            },
        ],
        'of14': [
            {
                'method': ofctl_v1_4.get_desc_stats,
                'request': '5-24-ofp_desc_request.packet.json',
                'reply': '5-0-ofp_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_queue_stats,
                'request': '5-35-ofp_queue_stats_request.packet.json',
                'reply': '5-36-ofp_queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_queue_desc,
                'request': '5-63-ofp_queue_desc_request.packet.json',
                'reply': '5-64-ofp_queue_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_flow_stats,
                'request': '5-11-ofp_flow_stats_request.packet.json',
                'reply': '5-12-ofp_flow_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_aggregate_flow_stats,
                'request': '5-25-ofp_aggregate_stats_request.packet.json',
                'reply': '5-26-ofp_aggregate_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_table_stats,
                'request': '5-27-ofp_table_stats_request.packet.json',
                'reply': '5-28-ofp_table_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_table_features,
                'request': 'lib-ofctl-ofp_table_features_request.packet.json',
                'reply': '5-54-ofp_table_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_port_stats,
                'request': '5-29-ofp_port_stats_request.packet.json',
                'reply': '5-30-ofp_port_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_meter_stats,
                'request': '5-47-ofp_meter_stats_request.packet.json',
                'reply': '5-48-ofp_meter_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_meter_features,
                'request': '5-49-ofp_meter_features_request.packet.json',
                'reply': '5-50-ofp_meter_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_meter_config,
                'request': '5-45-ofp_meter_config_request.packet.json',
                'reply': '5-46-ofp_meter_config_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_group_stats,
                'request': '5-55-ofp_group_stats_request.packet.json',
                'reply': '5-56-ofp_group_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_group_features,
                'request': '5-31-ofp_group_features_request.packet.json',
                'reply': '5-32-ofp_group_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_group_desc,
                'request': '5-33-ofp_group_desc_request.packet.json',
                'reply': '5-34-ofp_group_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.get_port_desc,
                'request': '5-51-ofp_port_desc_request.packet.json',
                'reply': '5-52-ofp_port_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_4.mod_flow_entry,
                'request': '5-2-ofp_flow_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_4.mod_meter_entry,
                'request': '5-43-ofp_meter_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_4.mod_group_entry,
                'request': '5-21-ofp_group_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_4.mod_port_behavior,
                'request': '5-22-ofp_port_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_4.send_experimenter,
                'request': '5-16-ofp_experimenter.packet.json',
                'reply': None
            },
        ],
        'of15': [
            {
                'method': ofctl_v1_5.get_desc_stats,
                'request': 'libofproto-OFP15-desc_request.packet.json',
                'reply': 'libofproto-OFP15-desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_queue_stats,
                'request': 'lib-ofctl-ofp_queue_stats_request.packet.json',
                'reply': 'libofproto-OFP15-queue_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_queue_desc,
                'request': 'libofproto-OFP15-queue_desc_request.packet.json',
                'reply': 'libofproto-OFP15-queue_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_flow_stats,
                'request': 'libofproto-OFP15-flow_stats_request.packet.json',
                'reply': 'libofproto-OFP15-flow_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_flow_desc_stats,
                'request': 'libofproto-OFP15-flow_desc_request.packet.json',
                'reply': 'libofproto-OFP15-flow_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_flow_desc_stats,
                'request': 'lib-ofctl-OFP15-flow_desc_request.packet.json',
                'reply': 'lib-ofctl-OFP15-flow_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_aggregate_flow_stats,
                'request': 'libofproto-OFP15-aggregate_stats_request.packet.json',
                'reply': 'libofproto-OFP15-aggregate_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_table_stats,
                'request': 'libofproto-OFP15-table_stats_request.packet.json',
                'reply': 'libofproto-OFP15-table_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_table_features,
                'request': 'lib-ofctl-ofp_table_features_request.packet.json',
                'reply': 'libofproto-OFP15-table_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_port_stats,
                'request': 'libofproto-OFP15-port_stats_request.packet.json',
                'reply': 'libofproto-OFP15-port_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_meter_stats,
                'request': 'libofproto-OFP15-meter_stats_request.packet.json',
                'reply': 'libofproto-OFP15-meter_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_meter_features,
                'request': 'libofproto-OFP15-meter_features_request.packet.json',
                'reply': 'libofproto-OFP15-meter_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_meter_desc,
                'request': 'libofproto-OFP15-meter_desc_request.packet.json',
                'reply': 'libofproto-OFP15-meter_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_group_stats,
                'request': 'libofproto-OFP15-group_stats_request.packet.json',
                'reply': 'libofproto-OFP15-group_stats_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_group_features,
                'request': 'libofproto-OFP15-group_features_request.packet.json',
                'reply': 'libofproto-OFP15-group_features_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_group_desc,
                'request': 'libofproto-OFP15-group_desc_request.packet.json',
                'reply': 'libofproto-OFP15-group_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.get_port_desc,
                'request': 'libofproto-OFP15-port_desc_request.packet.json',
                'reply': 'libofproto-OFP15-port_desc_reply.packet.json'
            },
            {
                'method': ofctl_v1_5.mod_flow_entry,
                'request': 'libofproto-OFP15-flow_mod_no_nx.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_5.mod_flow_entry,
                'request': 'lib-ofctl-OFP15-flow_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_5.mod_meter_entry,
                'request': 'libofproto-OFP15-meter_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_5.mod_group_entry,
                'request': 'libofproto-OFP15-group_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_5.mod_port_behavior,
                'request': 'libofproto-OFP15-port_mod.packet.json',
                'reply': None
            },
            {
                'method': ofctl_v1_5.send_experimenter,
                'request': 'libofproto-OFP15-experimenter.packet.json',
                'reply': None
            }
        ],
    }

    def _jsonfile_to_msg(datapath, jsonfile):
        return ofproto_parser.ofp_msg_from_jsondict(
            datapath, json.load(open(jsonfile)))

    this_dir = os.path.dirname(sys.modules[__name__].__file__)
    parser_json_root = os.path.join(this_dir, '../ofproto/json/')
    ofctl_json_root = os.path.join(this_dir, 'ofctl_json/')

    for ofp_ver, tests in _test_cases.items():
        dp = DummyDatapath(_ofp_vers[ofp_ver])
        parser_json_dir = os.path.join(parser_json_root, ofp_ver)
        ofctl_json_dir = os.path.join(ofctl_json_root, ofp_ver)
        for test in tests:
            name = 'test_ofctl_' + ofp_ver + '_' + test['request']
            print('adding %s ...' % name)
            args = {}
            args_json_path = os.path.join(ofctl_json_dir, test['request'])
            if os.path.exists(args_json_path):
                args = json.load(open(args_json_path))
            request = _jsonfile_to_msg(
                dp, os.path.join(parser_json_dir, test['request']))
            reply = None
            expected = None
            if test['reply']:
                reply = _jsonfile_to_msg(
                    dp, os.path.join(parser_json_dir, test['reply']))
                expected = json.load(
                    open(os.path.join(ofctl_json_dir, test['reply'])))
            f = functools.partial(
                Test_ofctl._test, name=name, dp=dp, method=test['method'],
                args=args, request=request, reply=reply, expected=expected)
            test_lib.add_method(Test_ofctl, name, f)

Example 125

Project: pyxb Source File: setup.py
Function: run
    def run (self):
        # Make sure log messages are supported
        logging.basicConfig()

        # Walk the tests hierarchy looking for tests
        dirs = self.testdirs.split(':')
        tests = [ ]
        while dirs:
            dir = dirs.pop(0)
            if self.trace_tests:
                print('Searching for tests in %s' % (dir,))
            for f in os.listdir(dir):
                fn = os.path.join(dir, f)
                statb = os.stat(fn)
                if stat.S_ISDIR(statb[0]):
                    dirs.append(fn)
                elif self.__TestFile_re.match(f):
                    tests.append(fn)

        number = 0
        import sys
        import traceback
        import unittest
        import types

        # Import each test into its own module, then add the test
        # cases in it to a complete suite.
        loader = unittest.defaultTestLoader
        suite = unittest.TestSuite()
        used_names = set()
        for fn in tests:
            stage = 'compile'
            try:
                # Assign a unique name for this test
                test_name = os.path.basename(fn).split('.')[0]
                test_name = test_name.replace('-', '_')
                number = 2
                base_name = test_name
                while test_name in used_names:
                    test_name = '%s%d' % (base_name, number)
                    number += 1

                # Read the test source in and compile it
                rv = compile(open(fn).read(), test_name, 'exec')
                state = 'evaluate'

                # Make a copy of the globals array so we don't
                # contaminate this environment.
                g = globals().copy()

                # The test cases use __file__ to determine the path to
                # the schemas
                g['__file__'] = fn

                # Create a module into which the test will be evaluated.
                module = types.ModuleType(test_name)

                # The generated code uses __name__ to look up the
                # containing module in sys.modules.
                g['__name__'] = test_name
                sys.modules[test_name] = module

                # Import the test into the module, making sure the created globals look like they're in the module.
                eval(rv, g)
                module.__dict__.update(g)

                # Find all subclasses of unittest.TestCase that were
                # in the test source and add them to the suite.
                for (nm, obj) in g.items():
                    if (type == type(obj)) and issubclass(obj, unittest.TestCase):
                        suite.addTest(loader.loadTestsFromTestCase(obj))
                if self.trace_tests:
                    print('%s imported' % (fn,))
            except Exception as e:
                print('%s failed in %s: %s' % (fn, stage, e))
                raise

        # Run everything
        verbosity = 1
        if self.trace_tests:
            verbosity = 2
        elif self.inhibit_output:
            # Don't know how to do this for real
            verbosity = 0
        runner = unittest.TextTestRunner(verbosity=verbosity)
        runner.run(suite)
See More Examples - Go to Next Page
Page 1 Page 2 Page 3 Selected