django.core.management.call_command

Here are the examples of the python api django.core.management.call_command taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: talk.org
Source File: syncdb.py
View license
    def handle_noargs(self, **options):
        from django.db import connection, transaction, models
        from django.conf import settings
        from django.core.management.sql import table_list, installed_models, sql_model_create, sql_for_pending_references, many_to_many_sql_for_model, custom_sql_for_model, sql_indexes_for_model, emit_post_sync_signal

        verbosity = int(options.get('verbosity', 1))
        interactive = options.get('interactive')

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                __import__(app_name + '.management', {}, {}, [''])
            except ImportError, exc:
                if not exc.args[0].startswith('No module named management'):
                    raise

        cursor = connection.cursor()

        if connection.features.uses_case_insensitive_names:
            table_name_converter = lambda x: x.upper()
        else:
            table_name_converter = lambda x: x
        # Get a list of all existing database tables, so we know what needs to
        # be added.
        tables = [table_name_converter(name) for name in table_list()]

        # Get a list of already installed *models* so that references work right.
        seen_models = installed_models(tables)
        created_models = set()
        pending_references = {}

        # Create the tables for each model
        for app in models.get_apps():
            app_name = app.__name__.split('.')[-2]
            model_list = models.get_models(app)
            for model in model_list:
                # Create the model's database table, if it doesn't already exist.
                if verbosity >= 2:
                    print "Processing %s.%s model" % (app_name, model._meta.object_name)
                if table_name_converter(model._meta.db_table) in tables:
                    continue
                sql, references = sql_model_create(model, self.style, seen_models)
                seen_models.add(model)
                created_models.add(model)
                for refto, refs in references.items():
                    pending_references.setdefault(refto, []).extend(refs)
                    if refto in seen_models:
                        sql.extend(sql_for_pending_references(refto, self.style, pending_references))
                sql.extend(sql_for_pending_references(model, self.style, pending_references))
                if verbosity >= 1:
                    print "Creating table %s" % model._meta.db_table
                for statement in sql:
                    cursor.execute(statement)
                tables.append(table_name_converter(model._meta.db_table))

        # Create the m2m tables. This must be done after all tables have been created
        # to ensure that all referred tables will exist.
        for app in models.get_apps():
            app_name = app.__name__.split('.')[-2]
            model_list = models.get_models(app)
            for model in model_list:
                if model in created_models:
                    sql = many_to_many_sql_for_model(model, self.style)
                    if sql:
                        if verbosity >= 2:
                            print "Creating many-to-many tables for %s.%s model" % (app_name, model._meta.object_name)
                        for statement in sql:
                            cursor.execute(statement)

        transaction.commit_unless_managed()

        # Send the post_syncdb signal, so individual apps can do whatever they need
        # to do at this point.
        emit_post_sync_signal(created_models, verbosity, interactive)

        # Install custom SQL for the app (but only if this
        # is a model we've just created)
        for app in models.get_apps():
            app_name = app.__name__.split('.')[-2]
            for model in models.get_models(app):
                if model in created_models:
                    custom_sql = custom_sql_for_model(model)
                    if custom_sql:
                        if verbosity >= 1:
                            print "Installing custom SQL for %s.%s model" % (app_name, model._meta.object_name)
                        try:
                            for sql in custom_sql:
                                cursor.execute(sql)
                        except Exception, e:
                            sys.stderr.write("Failed to install custom SQL for %s.%s model: %s" % \
                                                (app_name, model._meta.object_name, e))
                            transaction.rollback_unless_managed()
                        else:
                            transaction.commit_unless_managed()

        # Install SQL indicies for all newly created models
        for app in models.get_apps():
            app_name = app.__name__.split('.')[-2]
            for model in models.get_models(app):
                if model in created_models:
                    index_sql = sql_indexes_for_model(model, self.style)
                    if index_sql:
                        if verbosity >= 1:
                            print "Installing index for %s.%s model" % (app_name, model._meta.object_name)
                        try:
                            for sql in index_sql:
                                cursor.execute(sql)
                        except Exception, e:
                            sys.stderr.write("Failed to install index for %s.%s model: %s" % \
                                                (app_name, model._meta.object_name, e))
                            transaction.rollback_unless_managed()
                        else:
                            transaction.commit_unless_managed()

        # Install the 'initial_data' fixture, using format discovery
        from django.core.management import call_command
        call_command('loaddata', 'initial_data', verbosity=verbosity)

Example 2

Project: talk.org
Source File: creation.py
View license
def create_test_db(settings, connection, verbosity=1, autoclobber=False):
    TEST_DATABASE_NAME = _test_database_name(settings)
    TEST_DATABASE_USER = _test_database_user(settings)
    TEST_DATABASE_PASSWD = _test_database_passwd(settings)
    TEST_DATABASE_TBLSPACE = _test_database_tblspace(settings)
    TEST_DATABASE_TBLSPACE_TMP = _test_database_tblspace_tmp(settings)

    parameters = {
        'dbname': TEST_DATABASE_NAME,
        'user': TEST_DATABASE_USER,
        'password': TEST_DATABASE_PASSWD,
        'tblspace': TEST_DATABASE_TBLSPACE,
        'tblspace_temp': TEST_DATABASE_TBLSPACE_TMP,
 	}

    REMEMBER['user'] = settings.DATABASE_USER
    REMEMBER['passwd'] = settings.DATABASE_PASSWORD

    cursor = connection.cursor()
    if _test_database_create(settings):
        if verbosity >= 1:
            print 'Creating test database...'
        try:
            _create_test_db(cursor, parameters, verbosity)
        except Exception, e:
            sys.stderr.write("Got an error creating the test database: %s\n" % e)
            if not autoclobber:
                confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_DATABASE_NAME)
            if autoclobber or confirm == 'yes':
                try:
                    if verbosity >= 1:
                        print "Destroying old test database..."
                    _destroy_test_db(cursor, parameters, verbosity)
                    if verbosity >= 1:
                        print "Creating test database..."
                    _create_test_db(cursor, parameters, verbosity)
                except Exception, e:
                    sys.stderr.write("Got an error recreating the test database: %s\n" % e)
                    sys.exit(2)
            else:
                print "Tests cancelled."
                sys.exit(1)

    if _test_user_create(settings):
        if verbosity >= 1:
            print "Creating test user..."
        try:
            _create_test_user(cursor, parameters, verbosity)
        except Exception, e:
            sys.stderr.write("Got an error creating the test user: %s\n" % e)
            if not autoclobber:
                confirm = raw_input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_DATABASE_USER)
            if autoclobber or confirm == 'yes':
                try:
                    if verbosity >= 1:
                        print "Destroying old test user..."
                    _destroy_test_user(cursor, parameters, verbosity)
                    if verbosity >= 1:
                        print "Creating test user..."
                    _create_test_user(cursor, parameters, verbosity)
                except Exception, e:
                    sys.stderr.write("Got an error recreating the test user: %s\n" % e)
                    sys.exit(2)
            else:
                print "Tests cancelled."
                sys.exit(1)

    connection.close()
    settings.DATABASE_USER = TEST_DATABASE_USER
    settings.DATABASE_PASSWORD = TEST_DATABASE_PASSWD

    management.call_command('syncdb', verbosity=verbosity, interactive=False)

    # Get a cursor (even though we don't need one yet). This has
    # the side effect of initializing the test database.
    cursor = connection.cursor()

Example 3

View license
def main():
    """
    The entry point for the script. This script is fairly basic. Here is a
    quick example of how to use it::
    
        app_test_runner.py [path-to-app]
    
    You must have Django on the PYTHONPATH prior to running this script. This
    script basically will bootstrap a Django environment for you.
    
    By default this script with use SQLite and an in-memory database. If you
    are using Python 2.5 it will just work out of the box for you.
    
    TODO: show more options here.
    """
    parser = OptionParser()
    parser.add_option("--DATABASE_ENGINE", dest="DATABASE_ENGINE", default="sqlite3")
    parser.add_option("--DATABASE_NAME", dest="DATABASE_NAME", default="")
    parser.add_option("--DATABASE_USER", dest="DATABASE_USER", default="")
    parser.add_option("--DATABASE_PASSWORD", dest="DATABASE_PASSWORD", default="")
    parser.add_option("--SITE_ID", dest="SITE_ID", type="int", default=1)
    
    options, args = parser.parse_args()
    
    # check for app in args
    try:
        app_path = args[0]
    except IndexError:
        print "You did not provide an app path."
        raise SystemExit
    else:
        if app_path.endswith("/"):
            app_path = app_path[:-1]
        parent_dir, app_name = os.path.split(app_path)
        sys.path.insert(0, parent_dir)
    
    settings.configure(**{
        "DATABASE_ENGINE": options.DATABASE_ENGINE,
        "DATABASE_NAME": options.DATABASE_NAME,
        "DATABASE_USER": options.DATABASE_USER,
        "DATABASE_PASSWORD": options.DATABASE_PASSWORD,
        "SITE_ID": options.SITE_ID,
        "ROOT_URLCONF": "",
        "TEMPLATE_LOADERS": (
            "django.template.loaders.filesystem.load_template_source",
            "django.template.loaders.app_directories.load_template_source",
        ),
        "TEMPLATE_DIRS": (
            os.path.join(os.path.dirname(__file__), "templates"),
        ),
        "INSTALLED_APPS": (
            # HACK: the admin app should *not* be required. Need to spend some
            # time looking into this. Django #8523 has a patch for this issue,
            # but was wrongly attached to that ticket. It should have its own
            # ticket.
            "django.contrib.admin",
            "django.contrib.auth",
            "django.contrib.contenttypes",
            "django.contrib.sessions",
            "django.contrib.sites",
            app_name,
        ),
    })
    call_command("test")

Example 4

Project: GAE-Bulk-Mailer
Source File: flush.py
View license
    def handle_noargs(self, **options):
        db = options.get('database')
        connection = connections[db]
        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        # 'reset_sequences' is a stealth option
        reset_sequences = options.get('reset_sequences', True)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True, reset_sequences=reset_sequences)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to the state it was in after syncdb.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                cursor = connection.cursor()
                for sql in sql_list:
                    cursor.execute(sql)
            except Exception as e:
                transaction.rollback_unless_managed(using=db)
                raise CommandError("""Database %s couldn't be flushed. Possible reasons:
  * The database isn't running or isn't configured correctly.
  * At least one of the expected database tables doesn't exist.
  * The SQL was invalid.
Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.
The full error: %s""" % (connection.settings_dict['NAME'], e))
            transaction.commit_unless_managed(using=db)

            # Emit the post sync signal. This allows individual
            # applications to respond as if the database had been
            # sync'd from scratch.
            all_models = []
            for app in models.get_apps():
                all_models.extend([
                    m for m in models.get_models(app, include_auto_created=True)
                    if router.allow_syncdb(db, m)
                ])
            emit_post_sync_signal(set(all_models), verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            kwargs = options.copy()
            kwargs['database'] = db
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture.
                call_command('loaddata', 'initial_data', **options)

        else:
            self.stdout.write("Flush cancelled.\n")

Example 5

Project: GAE-Bulk-Mailer
Source File: syncdb.py
View license
    def handle_noargs(self, **options):

        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        show_traceback = options.get('traceback')
        load_initial_data = options.get('load_initial_data')

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError as exc:
                # This is slightly hackish. We want to ignore ImportErrors
                # if the "management" module itself is missing -- but we don't
                # want to ignore the exception if the management module exists
                # but raises an ImportError for some reason. The only way we
                # can do this is to check the text of the exception. Note that
                # we're a bit broad in how we check the text, because different
                # Python implementations may not use the same text.
                # CPython uses the text "No module named management"
                # PyPy uses "No module named myproject.myapp.management"
                msg = exc.args[0]
                if not msg.startswith('No module named') or 'management' not in msg:
                    raise

        db = options.get('database')
        connection = connections[db]
        cursor = connection.cursor()

        # Get a list of already installed *models* so that references work right.
        tables = connection.introspection.table_names()
        seen_models = connection.introspection.installed_models(tables)
        created_models = set()
        pending_references = {}

        # Build the manifest of apps and models that are to be synchronized
        all_models = [
            (app.__name__.split('.')[-2],
                [m for m in models.get_models(app, include_auto_created=True)
                if router.allow_syncdb(db, m)])
            for app in models.get_apps()
        ]

        def model_installed(model):
            opts = model._meta
            converter = connection.introspection.table_name_converter
            return not ((converter(opts.db_table) in tables) or
                (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables))

        manifest = SortedDict(
            (app_name, list(filter(model_installed, model_list)))
            for app_name, model_list in all_models
        )

        # Create the tables for each model
        if verbosity >= 1:
            self.stdout.write("Creating tables ...\n")
        for app_name, model_list in manifest.items():
            for model in model_list:
                # Create the model's database table, if it doesn't already exist.
                if verbosity >= 3:
                    self.stdout.write("Processing %s.%s model\n" % (app_name, model._meta.object_name))
                sql, references = connection.creation.sql_create_model(model, self.style, seen_models)
                seen_models.add(model)
                created_models.add(model)
                for refto, refs in references.items():
                    pending_references.setdefault(refto, []).extend(refs)
                    if refto in seen_models:
                        sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references))
                sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references))
                if verbosity >= 1 and sql:
                    self.stdout.write("Creating table %s\n" % model._meta.db_table)
                for statement in sql:
                    cursor.execute(statement)
                tables.append(connection.introspection.table_name_converter(model._meta.db_table))

        transaction.commit_unless_managed(using=db)

        # Send the post_syncdb signal, so individual apps can do whatever they need
        # to do at this point.
        emit_post_sync_signal(created_models, verbosity, interactive, db)

        # The connection may have been closed by a syncdb handler.
        cursor = connection.cursor()

        # Install custom SQL for the app (but only if this
        # is a model we've just created)
        if verbosity >= 1:
            self.stdout.write("Installing custom SQL ...\n")
        for app_name, model_list in manifest.items():
            for model in model_list:
                if model in created_models:
                    custom_sql = custom_sql_for_model(model, self.style, connection)
                    if custom_sql:
                        if verbosity >= 2:
                            self.stdout.write("Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))
                        try:
                            for sql in custom_sql:
                                cursor.execute(sql)
                        except Exception as e:
                            self.stderr.write("Failed to install custom SQL for %s.%s model: %s\n" % \
                                                (app_name, model._meta.object_name, e))
                            if show_traceback:
                                traceback.print_exc()
                            transaction.rollback_unless_managed(using=db)
                        else:
                            transaction.commit_unless_managed(using=db)
                    else:
                        if verbosity >= 3:
                            self.stdout.write("No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))

        if verbosity >= 1:
            self.stdout.write("Installing indexes ...\n")
        # Install SQL indices for all newly created models
        for app_name, model_list in manifest.items():
            for model in model_list:
                if model in created_models:
                    index_sql = connection.creation.sql_indexes_for_model(model, self.style)
                    if index_sql:
                        if verbosity >= 2:
                            self.stdout.write("Installing index for %s.%s model\n" % (app_name, model._meta.object_name))
                        try:
                            for sql in index_sql:
                                cursor.execute(sql)
                        except Exception as e:
                            self.stderr.write("Failed to install index for %s.%s model: %s\n" % \
                                                (app_name, model._meta.object_name, e))
                            transaction.rollback_unless_managed(using=db)
                        else:
                            transaction.commit_unless_managed(using=db)

        # Load initial_data fixtures (unless that has been disabled)
        if load_initial_data:
            call_command('loaddata', 'initial_data', verbosity=verbosity,
                         database=db, skip_validation=True)

Example 6

Project: DistrictBuilder
Source File: setup.py
View license
    def handle(self, *args, **options):
        """
        Perform the command. 
        """
        self.setup_logging(int(options.get('verbosity')))

        translation.activate('en')

        if options.get('config') is None:
            logger.warning("""
ERROR:

    This management command requires the -c or --config option. This option
    specifies the main configuration file.
""")
            sys.exit(1)


        try:
            store = redistricting.StoredConfig( options.get('config') )
        except Exception, ex:
            logger.info("""
ERROR:

The configuration file specified could not be parsed. Please check the
contents of the file and try again.
""")
            # Indicate that an error has occurred
            sys.exit(1)

        if not store.validate():
            logger.info("""
ERROR:

The configuration file was not valid. Please check the contents of the
file and try again.
""")
            # Indicate that an error has occurred
            sys.exit(1)

        # Create an importer for use in importing the config objects
        config = ConfigImporter(store)

        # When the setup script is run, it re-computes the secret key
        # used to secure session data. Blow away any old sessions that
        # were in the DB.
        success = Utils.purge_sessions()

        if not success:
            sys.exit(1)

        force = options.get('force')

        # When configuring, we want to keep track of any failures so we can
        # return the correct exit code.  Since False evaluates to a 0, we can
        # multiply values by all_ok so that a single False value means all_ok
        # will remain false
        all_ok = True

        success = config.import_superuser(force)

        all_ok = success
        try:
            all_ok = all_ok * self.import_prereq(config, force)
        except:
            all_ok = False
            logger.info('Error importing configuration.')
            logger.info(traceback.format_exc())

        # Create the utilities for spatial operations (renesting and geoserver)
        geoutil = SpatialUtils(store)

        try:
            optlevels = options.get("geolevels")
            nestlevels = options.get("nesting")

            if (not optlevels is None) or (not nestlevels is None):
                # Begin the import process
                geolevels = store.filter_geolevels()

                for i,geolevel in enumerate(geolevels):
                    if not optlevels is None:
                        importme = len(optlevels) == 0
                        importme = importme or (i in optlevels)
                        if importme:
                            self.import_geolevel(store, geolevel)

                    if not nestlevels is None:
                        nestme = len(nestlevels) == 0
                        nestme = nestme or (i in nestlevels)
                        if nestme:
                            geoutil.renest_geolevel(geolevel)
        except:
            all_ok = False
            logger.info('ERROR importing geolevels.')
            logger.debug(traceback.format_exc())
         

        # Do this once after processing the geolevels
        config.import_contiguity_overrides()

        # Save any changes to the config locale files
        config.save()

        if options.get("views"):
            # Create views based on the subjects and geolevels
            try:
                configure_views()
            except:
                logger.info(traceback.format_exc())
                all_ok = False

        if options.get("geoserver"):
            try:
                all_ok = all_ok * geoutil.purge_geoserver()
                if all_ok:
                    logger.info("Geoserver configuration cleaned.")
                    all_ok = all_ok * geoutil.configure_geoserver()
                else:
                    logger.info("Geoserver configuration could not be cleaned.")
            except:
                logger.info('ERROR configuring geoserver.')
                logger.info(traceback.format_exc())
                all_ok = False

        if options.get("templates"):
            try:
                self.create_template(store.data)
            except:
                logger.info('ERROR creating templates.')
                logger.debug(traceback.format_exc())
                all_ok = False
       
        if options.get("static"):
            call_command('collectstatic', interactive=False, verbosity=options.get('verbosity'))
            call_command('compress', interactive=False, verbosity=options.get('verbosity'), force=True)

        if options.get("languages"):
            call_command('makelanguagefiles', interactive=False, verbosity=options.get('verbosity'))

        if options.get("adjacency"):
            self.import_adjacency(store.data)
            
        if options.get("bard_templates"):
            try:
                self.create_report_templates(store.data)
            except:
                logger.info('ERROR creating BARD template files.')
                logger.debug(traceback.format_exc())
                all_ok = False
    
        if options.get("bard"):
            all_ok = all_ok * self.build_bardmap(store.data)

        # For our return value, a 0 (False) means OK, any nonzero (i.e., True or 1)
        # means that  an error occurred - the opposite of the meaning of all_ok's bool
        sys.exit(not all_ok)

Example 7

Project: DistrictBuilder
Source File: setup.py
View license
def main():
    """
    Main method to start the setup of DistrictBuilder.
    """
    usage = "usage: %prog [options] SCHEMA CONFIG"
    parser = OptionParser(usage=usage)
    parser.add_option('-d', '--database', dest="database",
            help="Generate the database schema", default=False,
            action='store_true')
    parser.add_option('-g', '--geolevel', dest="geolevels",
            help="Import the geography from the Nth GeoLevel.", 
            action="append", type="int")
    parser.add_option('-V', '--views', dest="views",
            help="Create database views based on all geographies.",
            action='store_true', default=False)
    parser.add_option('-G', '--geoserver', dest="geoserver",
            help="Create spatial data layers in Geoserver.",
            default=False, action='store_true')
    parser.add_option('-t', '--templates', dest="templates",
            help="Create the system-wide templates.",
            default=False, action='store_true')
    parser.add_option('-n', '--nesting', dest="nesting",
            help="Enforce nested geometries.",
            action='append', type="int")
    parser.add_option('-s', '--static', dest="static",
            help="Collect the static javascript and css files.",
            action='store_true', default=False),
    parser.add_option('-l', '--languages', dest="languages",
            help="Create and compile a message file for each Language defined.",
            action='store_true', default=False),
    parser.add_option('-a', '--adjacency', dest="adjacency",
            help="Load adjacency data", default=False, action='store_true')
    parser.add_option('-b', '--bard', dest="bard",
            help="Create a BARD map based on the imported spatial data.", 
            default=False, action='store_true'),
    parser.add_option('-B', '--bardtemplates', dest="bard_templates",
            help="Create the BARD reporting templates.",
            action='store_true', default=False),
    parser.add_option('-v', '--verbosity', dest="verbosity",
            help="Verbosity level; 0=minimal output, 1=normal output, 2=all output",
            default=1, type="int")
    parser.add_option('-f', '--force', dest="force",
            help="Force changes if config differs from database",
            default=False, action='store_true')

    (options, args) = parser.parse_args()

    allops = (not options.database) and (not options.geolevels) and (not options.views) and (not options.geoserver) and (not options.templates) and (not options.nesting) and (not options.bard) and (not options.static) and (not options.languages) and (not options.bard_templates) and (not options.adjacency)

    setup_logging(options.verbosity)

    if len(args) != 2:
        logger.warning("""
ERROR:

    This script requires a configuration file and a schema. Please check
    the command line arguments and try again.
""")
        sys.exit(1)

    try:
        config = StoredConfig(args[1], schema=args[0])
    except Exception, e:
        sys.exit(1)

    if not config.validate():
        logger.info("Configuration could not be validated.")

        sys.exit(1)

    logger.info("Validated config.")

    merge_status = config.merge_settings('settings.py')
    if merge_status:
        logger.info("Generated django settings for publicmapping.")
    else:
        sys.exit(1)

    merge_status = config.merge_settings('reporting_settings.py')
    if merge_status:
        logger.info("Generated django settings for reporting.")
    else:
        sys.exit(1)

    os.environ['DJANGO_SETTINGS_MODULE'] = 'publicmapping.settings'
    
    sys.path += ['.', '..']

    from django.core import management

    if allops or options.database:
        management.call_command('syncdb', verbosity=options.verbosity, interactive=False)

    if allops:
        database = True
        geolevels = []
        views = True
        geoserver = True
        templates = True
        nesting = []
        static = True
        languages = True
        bard = False
        bard_templates = False
        adjacency = False
    else:
        database = options.database
        geolevels = options.geolevels
        views = options.views
        geoserver = options.geoserver
        templates = options.templates
        nesting = options.nesting
        static = options.static
        languages = options.languages
        bard = options.bard
        bard_templates = options.bard_templates
        adjacency = options.adjacency

    management.call_command('setup', config=args[1], verbosity=options.verbosity, database=database, geolevels=geolevels, views=views, geoserver=geoserver, templates=templates, nesting=nesting, static=static, languages=languages, bard=bard, bard_templates=bard_templates, force=options.force, adjacency=adjacency)
    
    # Success! Exit-code 0
    sys.exit(0)

Example 8

Project: PyClassLessons
Source File: flush.py
View license
    def handle_noargs(self, **options):
        database = options.get('database')
        connection = connections[database]
        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        # The following are stealth options used by Django's internals.
        reset_sequences = options.get('reset_sequences', True)
        allow_cascade = options.get('allow_cascade', False)
        inhibit_post_migrate = options.get('inhibit_post_migrate', False)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_config in apps.get_app_configs():
            try:
                import_module('.management', app_config.name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True,
                             reset_sequences=reset_sequences,
                             allow_cascade=allow_cascade)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to an empty state.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                with transaction.atomic(using=database,
                                        savepoint=connection.features.can_rollback_ddl):
                    with connection.cursor() as cursor:
                        for sql in sql_list:
                            cursor.execute(sql)
            except Exception as e:
                new_msg = (
                    "Database %s couldn't be flushed. Possible reasons:\n"
                    "  * The database isn't running or isn't configured correctly.\n"
                    "  * At least one of the expected database tables doesn't exist.\n"
                    "  * The SQL was invalid.\n"
                    "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n"
                    "The full error: %s") % (connection.settings_dict['NAME'], e)
                six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])

            if not inhibit_post_migrate:
                self.emit_post_migrate(verbosity, interactive, database)

            # Reinstall the initial_data fixture.
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture for apps without migrations.
                from django.db.migrations.executor import MigrationExecutor
                executor = MigrationExecutor(connection)
                app_options = options.copy()
                for app_label in executor.loader.unmigrated_apps:
                    app_options['app_label'] = app_label
                    call_command('loaddata', 'initial_data', **app_options)
        else:
            self.stdout.write("Flush cancelled.\n")

Example 9

Project: PyClassLessons
Source File: migrate.py
View license
    def handle(self, *args, **options):

        self.verbosity = int(options.get('verbosity'))
        self.interactive = options.get('interactive')
        self.show_traceback = options.get('traceback')
        self.load_initial_data = options.get('load_initial_data')
        self.test_database = options.get('test_database', False)

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_config in apps.get_app_configs():
            if module_has_submodule(app_config.module, "management"):
                import_module('.management', app_config.name)

        # Get the database we're operating from
        db = options.get('database')
        connection = connections[db]

        # If they asked for a migration listing, quit main execution flow and show it
        if options.get("list", False):
            return self.show_migration_list(connection, args)

        # Work out which apps have migrations and which do not
        executor = MigrationExecutor(connection, self.migration_progress_callback)

        # Before anything else, see if there's conflicting apps and drop out
        # hard if there are any
        conflicts = executor.loader.detect_conflicts()
        if conflicts:
            name_str = "; ".join(
                "%s in %s" % (", ".join(names), app)
                for app, names in conflicts.items()
            )
            raise CommandError("Conflicting migrations detected (%s).\nTo fix them run 'python manage.py makemigrations --merge'" % name_str)

        # If they supplied command line arguments, work out what they mean.
        run_syncdb = False
        target_app_labels_only = True
        if len(args) > 2:
            raise CommandError("Too many command-line arguments (expecting 'app_label' or 'app_label migrationname')")
        elif len(args) == 2:
            app_label, migration_name = args
            if app_label not in executor.loader.migrated_apps:
                raise CommandError("App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label)
            if migration_name == "zero":
                targets = [(app_label, None)]
            else:
                try:
                    migration = executor.loader.get_migration_by_prefix(app_label, migration_name)
                except AmbiguityError:
                    raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % (
                        migration_name, app_label))
                except KeyError:
                    raise CommandError("Cannot find a migration matching '%s' from app '%s'." % (
                        migration_name, app_label))
                targets = [(app_label, migration.name)]
            target_app_labels_only = False
        elif len(args) == 1:
            app_label = args[0]
            if app_label not in executor.loader.migrated_apps:
                raise CommandError("App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label)
            targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label]
        else:
            targets = executor.loader.graph.leaf_nodes()
            run_syncdb = True

        plan = executor.migration_plan(targets)

        # Print some useful info
        if self.verbosity >= 1:
            self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:"))
            if run_syncdb and executor.loader.unmigrated_apps:
                self.stdout.write(self.style.MIGRATE_LABEL("  Synchronize unmigrated apps: ") + (", ".join(executor.loader.unmigrated_apps)))
            if target_app_labels_only:
                self.stdout.write(self.style.MIGRATE_LABEL("  Apply all migrations: ") + (", ".join(set(a for a, n in targets)) or "(none)"))
            else:
                if targets[0][1] is None:
                    self.stdout.write(self.style.MIGRATE_LABEL("  Unapply all migrations: ") + "%s" % (targets[0][0], ))
                else:
                    self.stdout.write(self.style.MIGRATE_LABEL("  Target specific migration: ") + "%s, from %s" % (targets[0][1], targets[0][0]))

        # Run the syncdb phase.
        # If you ever manage to get rid of this, I owe you many, many drinks.
        # Note that pre_migrate is called from inside here, as it needs
        # the list of models about to be installed.
        if run_syncdb and executor.loader.unmigrated_apps:
            if self.verbosity >= 1:
                self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:"))
            created_models = self.sync_apps(connection, executor.loader.unmigrated_apps)
        else:
            created_models = []
            emit_pre_migrate_signal([], self.verbosity, self.interactive, connection.alias)

        # The test runner requires us to flush after a syncdb but before migrations,
        # so do that here.
        if options.get("test_flush", False):
            call_command(
                'flush',
                verbosity=max(self.verbosity - 1, 0),
                interactive=False,
                database=db,
                reset_sequences=False,
                inhibit_post_migrate=True,
            )

        # Migrate!
        if self.verbosity >= 1:
            self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:"))
        if not plan:
            if self.verbosity >= 1:
                self.stdout.write("  No migrations to apply.")
                # If there's changes that aren't in migrations yet, tell them how to fix it.
                autodetector = MigrationAutodetector(
                    executor.loader.project_state(),
                    ProjectState.from_apps(apps),
                )
                changes = autodetector.changes(graph=executor.loader.graph)
                if changes:
                    self.stdout.write(self.style.NOTICE("  Your models have changes that are not yet reflected in a migration, and so won't be applied."))
                    self.stdout.write(self.style.NOTICE("  Run 'manage.py makemigrations' to make new migrations, and then re-run 'manage.py migrate' to apply them."))
        else:
            executor.migrate(targets, plan, fake=options.get("fake", False))

        # Send the post_migrate signal, so individual apps can do whatever they need
        # to do at this point.
        emit_post_migrate_signal(created_models, self.verbosity, self.interactive, connection.alias)

Example 10

Project: PyClassLessons
Source File: migrate.py
View license
    def sync_apps(self, connection, app_labels):
        "Runs the old syncdb-style operation on a list of app_labels."
        cursor = connection.cursor()

        try:
            # Get a list of already installed *models* so that references work right.
            tables = connection.introspection.table_names(cursor)
            seen_models = connection.introspection.installed_models(tables)
            created_models = set()
            pending_references = {}

            # Build the manifest of apps and models that are to be synchronized
            all_models = [
                (app_config.label,
                    router.get_migratable_models(app_config, connection.alias, include_auto_created=True))
                for app_config in apps.get_app_configs()
                if app_config.models_module is not None and app_config.label in app_labels
            ]

            def model_installed(model):
                opts = model._meta
                converter = connection.introspection.table_name_converter
                # Note that if a model is unmanaged we short-circuit and never try to install it
                return not ((converter(opts.db_table) in tables) or
                    (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables))

            manifest = OrderedDict(
                (app_name, list(filter(model_installed, model_list)))
                for app_name, model_list in all_models
            )

            create_models = set(itertools.chain(*manifest.values()))
            emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias)

            # Create the tables for each model
            if self.verbosity >= 1:
                self.stdout.write("  Creating tables...\n")
            with transaction.atomic(using=connection.alias, savepoint=False):
                for app_name, model_list in manifest.items():
                    for model in model_list:
                        # Create the model's database table, if it doesn't already exist.
                        if self.verbosity >= 3:
                            self.stdout.write("    Processing %s.%s model\n" % (app_name, model._meta.object_name))
                        sql, references = connection.creation.sql_create_model(model, no_style(), seen_models)
                        seen_models.add(model)
                        created_models.add(model)
                        for refto, refs in references.items():
                            pending_references.setdefault(refto, []).extend(refs)
                            if refto in seen_models:
                                sql.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references))
                        sql.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references))
                        if self.verbosity >= 1 and sql:
                            self.stdout.write("    Creating table %s\n" % model._meta.db_table)
                        for statement in sql:
                            cursor.execute(statement)
                        tables.append(connection.introspection.table_name_converter(model._meta.db_table))

            # We force a commit here, as that was the previous behavior.
            # If you can prove we don't need this, remove it.
            transaction.set_dirty(using=connection.alias)
        finally:
            cursor.close()

        # The connection may have been closed by a syncdb handler.
        cursor = connection.cursor()
        try:
            # Install custom SQL for the app (but only if this
            # is a model we've just created)
            if self.verbosity >= 1:
                self.stdout.write("  Installing custom SQL...\n")
            for app_name, model_list in manifest.items():
                for model in model_list:
                    if model in created_models:
                        custom_sql = custom_sql_for_model(model, no_style(), connection)
                        if custom_sql:
                            if self.verbosity >= 2:
                                self.stdout.write("    Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))
                            try:
                                with transaction.commit_on_success_unless_managed(using=connection.alias):
                                    for sql in custom_sql:
                                        cursor.execute(sql)
                            except Exception as e:
                                self.stderr.write("    Failed to install custom SQL for %s.%s model: %s\n" % (app_name, model._meta.object_name, e))
                                if self.show_traceback:
                                    traceback.print_exc()
                        else:
                            if self.verbosity >= 3:
                                self.stdout.write("    No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))

            if self.verbosity >= 1:
                self.stdout.write("  Installing indexes...\n")

            # Install SQL indices for all newly created models
            for app_name, model_list in manifest.items():
                for model in model_list:
                    if model in created_models:
                        index_sql = connection.creation.sql_indexes_for_model(model, no_style())
                        if index_sql:
                            if self.verbosity >= 2:
                                self.stdout.write("    Installing index for %s.%s model\n" % (app_name, model._meta.object_name))
                            try:
                                with transaction.commit_on_success_unless_managed(using=connection.alias):
                                    for sql in index_sql:
                                        cursor.execute(sql)
                            except Exception as e:
                                self.stderr.write("    Failed to install index for %s.%s model: %s\n" % (app_name, model._meta.object_name, e))
        finally:
            cursor.close()

        # Load initial_data fixtures (unless that has been disabled)
        if self.load_initial_data:
            for app_label in app_labels:
                call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=connection.alias, skip_validation=True, app_label=app_label, hide_empty=True)

        return created_models

Example 11

Project: django-cassandra-engine
Source File: creation.py
View license
    def create_test_db(self, verbosity=1, autoclobber=False, **kwargs):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.
        """

        # Don't import django.core.management if it isn't needed.
        from django.core.management import call_command
        from django.conf import settings

        self.connection.connect()

        # If using django-nose, its runner has already set the db name
        # to test_*, so restore it here so that all the models for the
        # live keyspace can be found.
        self.connection.connection.keyspace = \
            self.connection.settings_dict['NAME']
        test_database_name = self._get_test_db_name()

        # Set all models keyspace to the test keyspace
        self.set_models_keyspace(test_database_name)

        if verbosity >= 1:
            test_db_repr = ''
            if verbosity >= 2:
                test_db_repr = " ('%s')" % test_database_name
            print("Creating test database for alias '%s'%s..." % (
                self.connection.alias, test_db_repr))

        options = self.connection.settings_dict.get('OPTIONS', {})
        replication_opts = options.get('replication', {})
        replication_factor = replication_opts.pop('replication_factor', 1)

        create_keyspace_simple(self.connection.settings_dict['NAME'],
                               replication_factor)

        settings.DATABASES[self.connection.alias]["NAME"] = test_database_name
        self.connection.settings_dict["NAME"] = test_database_name

        self.connection.reconnect()

        # Report syncdb messages at one level lower than that requested.
        # This ensures we don't get flooded with messages during testing
        # (unless you really ask to be flooded)
        call_command(
            'sync_cassandra',
            verbosity=max(verbosity - 1, 0),
            interactive=False,
            database=self.connection.alias,
            load_initial_data=False
        )

        return test_database_name

Example 12

Project: django-geonames
Source File: load_geonames.py
View license
    def handle_noargs(self, **options):
        if options['time']: start_time = datetime.datetime.now()

        # Making sure the db tables exist.
        call_command('syncdb')
        db_table = Geoname._meta.db_table

        db_opts = get_cmd_options()

        fromfile_cmd = 'psql %(db_opts)s -f %(sql_file)s'
        fromfile_args = {'db_opts' : db_opts,
                         }

        ### COPY'ing into the Geonames table ###

        # Executing a shell command that pipes the unzipped data to PostgreSQL
        # using the `COPY` directive.  This builds the database directly from
        # the file made by the `compress_geonames` command, and eliminates the
        # overhead from using the ORM.  Moreover, copying from a gzipped file
        # reduces disk I/O.
        copy_sql = "COPY %s (geonameid,name,alternates,fclass,fcode,country,cc2,admin1,admin2,admin3,admin4,population,elevation,topo,timezone,moddate,point) FROM STDIN;" % db_table
        copy_cmd = 'gunzip -c %(gz_file)s | psql %(db_opts)s -c "%(copy_sql)s"'
        copy_args = {'gz_file' : os.path.join(GEONAMES_DATA, 'allCountries.gz'),
                     'db_opts' : db_opts,
                     'copy_sql' : copy_sql
                     }

        # Printing the copy command and executing it.
        if not options['no_geonames']:
            fromfile_args['sql_file'] = os.path.join(GEONAMES_SQL, 'drop_geoname_indexes.sql')
            print(fromfile_cmd % fromfile_args)
            os.system(fromfile_cmd % fromfile_args)
            print(copy_cmd % copy_args)
            os.system(copy_cmd % copy_args)
            fromfile_args['sql_file'] = os.path.join(GEONAMES_SQL, 'create_geoname_indexes.sql')
            print(fromfile_cmd % fromfile_args)
            os.system(fromfile_cmd % fromfile_args)
            print('Finished PostgreSQL `COPY` from Geonames all countries data file.')

        ### COPY'ing into the Geonames alternate table ###

        db_table = Alternate._meta.db_table
        copy_sql = "COPY %s (alternateid,geoname_id,isolanguage,variant,preferred,short) FROM STDIN;" % db_table
        copy_cmd = 'zcat %(gz_file)s | psql %(db_opts)s -c "%(copy_sql)s"'
        copy_args = {'gz_file' : os.path.join(GEONAMES_DATA, 'alternateNames.gz'),
                     'db_opts' : get_cmd_options(),
                     'copy_sql' : copy_sql
                     }

        if not options['no_alternates']:
            fromfile_args['sql_file'] = os.path.join(GEONAMES_SQL, 'drop_alternate_indexes.sql')
            print(fromfile_cmd % fromfile_args)
            os.system(fromfile_cmd % fromfile_args)
            print(copy_cmd % copy_args)
            os.system(copy_cmd % copy_args)
            print('Finished PostgreSQL `COPY` from Geonames alternate names data file.')
            fromfile_args['sql_file'] = os.path.join(GEONAMES_SQL, 'create_alternate_indexes.sql')
            print(fromfile_cmd % fromfile_args)
            os.system(fromfile_cmd % fromfile_args)

        # Done
        if options['time']: print('\nCompleted in %s' % (datetime.datetime.now() - start_time))

Example 13

Project: reviewboard
Source File: evolutions.py
View license
def init_evolutions(app, created_models, **kwargs):
    """Attempt to initialize the Django Evolution schema signatures.

    This attempts to initialize the evolution signatures to sane values. This
    works around the issue where a first syncdb with Django Evolution (even on
    existing databases) will cause Django Evolution to assume the database is
    the most up to date, even if it's not. This will break the database. Our
    workarounds prevent this by starting off with sane values and doing some
    smart checks.
    """
    if FileDiff in created_models:
        # This is a new install. Let it continue through. The database will
        # be created with an up-to-date schema.
        return

    try:
        latest_version = django_evolution.Version.objects.latest('when')
    except django_evolution.Version.DoesNotExist:
        # This install didn't previously have Django Evolution. We might need
        # to prefill it with the schema from before the first db mutation.
        # However, we only want to do this if this is an existing database,
        # or users will have to evolve after the first install, which is
        # bad.
        latest_version = None

    if latest_version:
        # There's an existing Django Evolution install. Check to see if it's
        # broken, as it may be from the time just after the addition of
        # Django Evolution where it wouldn't migrate databases and instead
        # marked the schemas as being up to date in the stored signature.
        try:
            # If this succeeds, we're good.
            FileDiff.objects.filter(parent_diff64="")

            return
        except:
            # If that failed, then most likely it's due to the
            # parent_diff_base64 column not existing in the database, which
            # means that Django Evolution's view of the database and the
            # database itself are out of match from an early install during
            # the breakage period.
            #
            # We can feel free to nuke the Django Evolution tables so that
            # we can apply our own schema in order to kickstart a proper
            # evolution.
            django_evolution.Version.objects.all().delete()
            django_evolution.Evolution.objects.all().delete()

    # Load the Django Evolution fixture describing the database at the time
    # of the Django Evolution addition.
    call_command('loaddata', 'admin/fixtures/initial_evolution_schema.json',
                 verbosity=0)

Example 14

View license
    def test_loadworkflowsampledata(self):
        """ Ensure that workflow sample data loads correctly. """

        # Load the test workflow
        load_workflow(TEST_WORKFLOW, VERSION_1)
        assert_test_dir_workflow_loaded(self)
        assert_test_dir_v1_loaded(self)

        # Calling the command with an invalid version argument should fail.
        invalid_args = (
            VERSION_1,  # Workflow slug omitted
            TEST_WORKFLOW,  # Version slug omitted
            '{}/{}/'.format(TEST_WORKFLOW, VERSION_1),  # Too many slashes
            '{}.{}'.format(TEST_WORKFLOW, VERSION_1),  # Wrong separator
        )
        for invalid_arg in invalid_args:
            invalid_stderr = StringIO()
            invalid_message = 'Please specify workflow versions in the format'
            call_command('loadworkflowsampledata', invalid_arg,
                         stderr=invalid_stderr)
            self.assertIn(invalid_message, invalid_stderr.getvalue())

        # Loading valid sample data should succeed without errors
        v1_str = '{}/{}'.format(TEST_WORKFLOW, VERSION_1)
        stdout = StringIO()
        call_command('loadworkflowsampledata', v1_str, stdout=stdout)
        output = stdout.getvalue()
        success_message = 'Successfully loaded sample data'
        self.assertIn(success_message, output)

        # clean up for subsequent commands
        del sys.modules['orchestra.tests.workflows.test_dir.load_sample_data']

        # Loading sample data for a nonexistent workflow should fail
        stderr1 = StringIO()
        call_command(
            'loadworkflowsampledata',
            '{}/{}'.format(NONEXISTENT_WORKFLOW_SLUG, VERSION_1),
            stderr=stderr1)
        output1 = stderr1.getvalue()
        no_workflow_error = ('Workflow {} has not been loaded into the '
                             'database'.format(NONEXISTENT_WORKFLOW_SLUG))
        self.assertIn(no_workflow_error, output1)

        # Loading sample data for a nonexistent version should fail
        stderr2 = StringIO()
        call_command(
            'loadworkflowsampledata',
            '{}/{}'.format(TEST_WORKFLOW, NONEXISTENT_VERSION),
            stderr=stderr2)
        output2 = stderr2.getvalue()
        no_version_error = ('Version {} does not exist'
                            .format(NONEXISTENT_VERSION))
        self.assertIn(no_version_error, output2)

        # Loading a workflow with no loading script should fail
        # Simulate this by moving the file.
        workflow = Workflow.objects.get(slug=TEST_WORKFLOW)
        workflow.sample_data_load_function = 'invalid_load_function'
        workflow.save()
        stderr3 = StringIO()
        call_command('loadworkflowsampledata', v1_str, stderr=stderr3)
        output3 = stderr3.getvalue()
        no_module_error = 'An error occurred while loading sample data'
        self.assertIn(no_module_error, output3)

        # Loading sample data for a workflow with no load function in its JSON
        # manifest should fail.
        workflow = Workflow.objects.get(slug=TEST_WORKFLOW)
        workflow.sample_data_load_function = None
        workflow.save()
        stderr4 = StringIO()
        call_command('loadworkflowsampledata', v1_str, stderr=stderr4)
        output4 = stderr4.getvalue()
        no_load_function_error = ('Workflow {} does not provide sample data'
                                  .format(TEST_WORKFLOW))
        self.assertIn(no_load_function_error, output4)

Example 15

Project: django-autoadmin
Source File: models.py
View license
    def create_autoadmin(self):
        UserModel = get_user_model()

        # Get an usable password before creating the superuser
        if PASSWORD():
            try:
                # Let's try to see if it is a callable
                password = PASSWORD()()
            except TypeError:
                password = PASSWORD()
        else:
            password = UserModel.objects.make_random_password()

        try:
            UserModel.objects.get(**{UserModel.USERNAME_FIELD: USERNAME()})
        except UserModel.DoesNotExist:
            logger.info(
                'Creating superuser -- login: %s, email: %s, password: %s',
                USERNAME(), EMAIL(), password
            )
            management.call_command(
                'createsuperuser',
                **{
                    UserModel.USERNAME_FIELD: USERNAME(),
                    'email': EMAIL(),
                    'interactive': False
                }
            )

            account = UserModel.objects.get(
                **{UserModel.USERNAME_FIELD: USERNAME()}
            )
            account.set_password(raw_password=password)
            account.save()
            # Store the auto admin password properties to display the
            # first login message
            auto_admin_properties, created = AutoAdminSingleton.objects.get_or_create()  # NOQA
            auto_admin_properties.account = account
            auto_admin_properties.password = password
            auto_admin_properties.password_hash = account.password
            auto_admin_properties.save()
        else:
            logger.error(
                'Super admin user already exists. -- login: %s', USERNAME()
            )

Example 16

View license
    def handle(self, *args, **options):
        api = slumber.API(base_url='http://readthedocs.org/api/v1/')
        user1 = User.objects.filter(pk__gt=0).order_by('pk').first()

        for slug in options['project_slug']:
            self.stdout.write('Importing {slug} ...'.format(slug=slug))

            project_data = api.project.get(slug=slug)
            try:
                project_data = project_data['objects'][0]
            except (KeyError, IndexError):
                self.stderr.write(
                    'Cannot find {slug} in API. Response was:\n{response}'
                    .format(
                        slug=slug,
                        response=json.dumps(project_data)))

            try:
                project = Project.objects.get(slug=slug)
            except Project.DoesNotExist:
                project = Project(slug=slug)

            copy_attributes = (
                'pub_date',
                'modified_date',
                'name',
                'description',
                'repo',
                'repo_type',
                'project_url',
                'canonical_url',
                'version',
                'copyright',
                'theme',
                'suffix',
                'single_version',
                'default_version',
                'default_branch',
                'requirements_file',
                'documentation_type',
                'allow_comments',
                'comment_moderation',
                # 'analytics_code' is left out on purpose.
                'enable_epub_build',
                'enable_pdf_build',
                'conf_py_file',
                'skip',
                'mirror',
                'install_project',
                'python_interpreter',
                'use_system_packages',
                'django_packages_url',
                'privacy_level',
                'version_privacy_level',
                'language',
                'num_major',
                'num_minor',
                'num_point',
            )

            for attribute in copy_attributes:
                setattr(project, attribute, project_data[attribute])
            project.user = user1
            project.save()
            if user1:
                project.users.add(user1)

            call_command('update_repos', project.slug, version='all')

Example 17

Project: Django--an-app-at-a-time
Source File: flush.py
View license
    def handle(self, **options):
        database = options.get('database')
        connection = connections[database]
        verbosity = options.get('verbosity')
        interactive = options.get('interactive')
        # The following are stealth options used by Django's internals.
        reset_sequences = options.get('reset_sequences', True)
        allow_cascade = options.get('allow_cascade', False)
        inhibit_post_migrate = options.get('inhibit_post_migrate', False)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_config in apps.get_app_configs():
            try:
                import_module('.management', app_config.name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True,
                             reset_sequences=reset_sequences,
                             allow_cascade=allow_cascade)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to an empty state.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                with transaction.atomic(using=database,
                                        savepoint=connection.features.can_rollback_ddl):
                    with connection.cursor() as cursor:
                        for sql in sql_list:
                            cursor.execute(sql)
            except Exception as e:
                new_msg = (
                    "Database %s couldn't be flushed. Possible reasons:\n"
                    "  * The database isn't running or isn't configured correctly.\n"
                    "  * At least one of the expected database tables doesn't exist.\n"
                    "  * The SQL was invalid.\n"
                    "Hint: Look at the output of 'django-admin sqlflush'. "
                    "That's the SQL this command wasn't able to run.\n"
                    "The full error: %s") % (connection.settings_dict['NAME'], e)
                six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])

            if not inhibit_post_migrate:
                self.emit_post_migrate(verbosity, interactive, database)

            # Reinstall the initial_data fixture.
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture for apps without migrations.
                from django.db.migrations.executor import MigrationExecutor
                executor = MigrationExecutor(connection)
                app_options = options.copy()
                for app_label in executor.loader.unmigrated_apps:
                    app_options['app_label'] = app_label
                    call_command('loaddata', 'initial_data', **app_options)
        else:
            self.stdout.write("Flush cancelled.\n")

Example 18

Project: Django--an-app-at-a-time
Source File: migrate.py
View license
    def handle(self, *args, **options):

        self.verbosity = options.get('verbosity')
        self.interactive = options.get('interactive')
        self.show_traceback = options.get('traceback')
        self.load_initial_data = options.get('load_initial_data')

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_config in apps.get_app_configs():
            if module_has_submodule(app_config.module, "management"):
                import_module('.management', app_config.name)

        # Get the database we're operating from
        db = options.get('database')
        connection = connections[db]

        # If they asked for a migration listing, quit main execution flow and show it
        if options.get("list", False):
            warnings.warn(
                "The 'migrate --list' command is deprecated. Use 'showmigrations' instead.",
                RemovedInDjango20Warning, stacklevel=2)
            self.stdout.ending = None  # Remove when #21429 is fixed
            return call_command(
                'showmigrations',
                '--list',
                app_labels=[options['app_label']] if options['app_label'] else None,
                database=db,
                no_color=options.get('no_color'),
                settings=options.get('settings'),
                stdout=self.stdout,
                traceback=self.show_traceback,
                verbosity=self.verbosity,
            )

        # Hook for backends needing any database preparation
        connection.prepare_database()
        # Work out which apps have migrations and which do not
        executor = MigrationExecutor(connection, self.migration_progress_callback)

        # Before anything else, see if there's conflicting apps and drop out
        # hard if there are any
        conflicts = executor.loader.detect_conflicts()
        if conflicts:
            name_str = "; ".join(
                "%s in %s" % (", ".join(names), app)
                for app, names in conflicts.items()
            )
            raise CommandError(
                "Conflicting migrations detected (%s).\nTo fix them run "
                "'python manage.py makemigrations --merge'" % name_str
            )

        # If they supplied command line arguments, work out what they mean.
        run_syncdb = False
        target_app_labels_only = True
        if options['app_label'] and options['migration_name']:
            app_label, migration_name = options['app_label'], options['migration_name']
            if app_label not in executor.loader.migrated_apps:
                raise CommandError(
                    "App '%s' does not have migrations (you cannot selectively "
                    "sync unmigrated apps)" % app_label
                )
            if migration_name == "zero":
                targets = [(app_label, None)]
            else:
                try:
                    migration = executor.loader.get_migration_by_prefix(app_label, migration_name)
                except AmbiguityError:
                    raise CommandError(
                        "More than one migration matches '%s' in app '%s'. "
                        "Please be more specific." %
                        (migration_name, app_label)
                    )
                except KeyError:
                    raise CommandError("Cannot find a migration matching '%s' from app '%s'." % (
                        migration_name, app_label))
                targets = [(app_label, migration.name)]
            target_app_labels_only = False
        elif options['app_label']:
            app_label = options['app_label']
            if app_label not in executor.loader.migrated_apps:
                raise CommandError(
                    "App '%s' does not have migrations (you cannot selectively "
                    "sync unmigrated apps)" % app_label
                )
            targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label]
        else:
            targets = executor.loader.graph.leaf_nodes()
            run_syncdb = True

        plan = executor.migration_plan(targets)

        # Print some useful info
        if self.verbosity >= 1:
            self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:"))
            if run_syncdb and executor.loader.unmigrated_apps:
                self.stdout.write(
                    self.style.MIGRATE_LABEL("  Synchronize unmigrated apps: ") +
                    (", ".join(executor.loader.unmigrated_apps))
                )
            if target_app_labels_only:
                self.stdout.write(
                    self.style.MIGRATE_LABEL("  Apply all migrations: ") +
                    (", ".join(set(a for a, n in targets)) or "(none)")
                )
            else:
                if targets[0][1] is None:
                    self.stdout.write(self.style.MIGRATE_LABEL(
                        "  Unapply all migrations: ") + "%s" % (targets[0][0], )
                    )
                else:
                    self.stdout.write(self.style.MIGRATE_LABEL(
                        "  Target specific migration: ") + "%s, from %s"
                        % (targets[0][1], targets[0][0])
                    )

        # Run the syncdb phase.
        # If you ever manage to get rid of this, I owe you many, many drinks.
        # Note that pre_migrate is called from inside here, as it needs
        # the list of models about to be installed.
        if run_syncdb and executor.loader.unmigrated_apps:
            if self.verbosity >= 1:
                self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:"))
            created_models = self.sync_apps(connection, executor.loader.unmigrated_apps)
        else:
            created_models = []
            emit_pre_migrate_signal([], self.verbosity, self.interactive, connection.alias)

        # The test runner requires us to flush after a syncdb but before migrations,
        # so do that here.
        if options.get("test_flush", False):
            call_command(
                'flush',
                verbosity=max(self.verbosity - 1, 0),
                interactive=False,
                database=db,
                reset_sequences=False,
                inhibit_post_migrate=True,
            )

        # Migrate!
        if self.verbosity >= 1:
            self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:"))
        if not plan:
            if self.verbosity >= 1:
                self.stdout.write("  No migrations to apply.")
                # If there's changes that aren't in migrations yet, tell them how to fix it.
                autodetector = MigrationAutodetector(
                    executor.loader.project_state(),
                    ProjectState.from_apps(apps),
                )
                changes = autodetector.changes(graph=executor.loader.graph)
                if changes:
                    self.stdout.write(self.style.NOTICE(
                        "  Your models have changes that are not yet reflected "
                        "in a migration, and so won't be applied."
                    ))
                    self.stdout.write(self.style.NOTICE(
                        "  Run 'manage.py makemigrations' to make new "
                        "migrations, and then re-run 'manage.py migrate' to "
                        "apply them."
                    ))
        else:
            fake = options.get("fake")
            fake_initial = options.get("fake_initial")
            executor.migrate(targets, plan, fake=fake, fake_initial=fake_initial)

        # Send the post_migrate signal, so individual apps can do whatever they need
        # to do at this point.
        emit_post_migrate_signal(created_models, self.verbosity, self.interactive, connection.alias)

Example 19

Project: Django--an-app-at-a-time
Source File: migrate.py
View license
    def sync_apps(self, connection, app_labels):
        "Runs the old syncdb-style operation on a list of app_labels."
        cursor = connection.cursor()

        try:
            # Get a list of already installed *models* so that references work right.
            tables = connection.introspection.table_names(cursor)
            created_models = set()

            # Build the manifest of apps and models that are to be synchronized
            all_models = [
                (app_config.label,
                    router.get_migratable_models(app_config, connection.alias, include_auto_created=False))
                for app_config in apps.get_app_configs()
                if app_config.models_module is not None and app_config.label in app_labels
            ]

            def model_installed(model):
                opts = model._meta
                converter = connection.introspection.table_name_converter
                # Note that if a model is unmanaged we short-circuit and never try to install it
                return not ((converter(opts.db_table) in tables) or
                    (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables))

            manifest = OrderedDict(
                (app_name, list(filter(model_installed, model_list)))
                for app_name, model_list in all_models
            )

            create_models = set(itertools.chain(*manifest.values()))
            emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias)

            # Create the tables for each model
            if self.verbosity >= 1:
                self.stdout.write("  Creating tables...\n")
            with transaction.atomic(using=connection.alias, savepoint=connection.features.can_rollback_ddl):
                deferred_sql = []
                for app_name, model_list in manifest.items():
                    for model in model_list:
                        if model._meta.proxy or not model._meta.managed:
                            continue
                        if self.verbosity >= 3:
                            self.stdout.write(
                                "    Processing %s.%s model\n" % (app_name, model._meta.object_name)
                            )
                        with connection.schema_editor() as editor:
                            if self.verbosity >= 1:
                                self.stdout.write("    Creating table %s\n" % model._meta.db_table)
                            editor.create_model(model)
                            deferred_sql.extend(editor.deferred_sql)
                            editor.deferred_sql = []
                        created_models.add(model)

                if self.verbosity >= 1:
                    self.stdout.write("    Running deferred SQL...\n")
                for statement in deferred_sql:
                    cursor.execute(statement)
        finally:
            cursor.close()

        # The connection may have been closed by a syncdb handler.
        cursor = connection.cursor()
        try:
            # Install custom SQL for the app (but only if this
            # is a model we've just created)
            if self.verbosity >= 1:
                self.stdout.write("  Installing custom SQL...\n")
            for app_name, model_list in manifest.items():
                for model in model_list:
                    if model in created_models:
                        custom_sql = custom_sql_for_model(model, no_style(), connection)
                        if custom_sql:
                            if self.verbosity >= 2:
                                self.stdout.write(
                                    "    Installing custom SQL for %s.%s model\n" %
                                    (app_name, model._meta.object_name)
                                )
                            try:
                                with transaction.atomic(using=connection.alias):
                                    for sql in custom_sql:
                                        cursor.execute(sql)
                            except Exception as e:
                                self.stderr.write(
                                    "    Failed to install custom SQL for %s.%s model: %s\n"
                                    % (app_name, model._meta.object_name, e)
                                )
                                if self.show_traceback:
                                    traceback.print_exc()
                        else:
                            if self.verbosity >= 3:
                                self.stdout.write(
                                    "    No custom SQL for %s.%s model\n" %
                                    (app_name, model._meta.object_name)
                                )
        finally:
            cursor.close()

        # Load initial_data fixtures (unless that has been disabled)
        if self.load_initial_data:
            for app_label in app_labels:
                call_command(
                    'loaddata', 'initial_data', verbosity=self.verbosity,
                    database=connection.alias, app_label=app_label,
                    hide_empty=True,
                )

        return created_models

Example 20

Project: django-json-rpc
Source File: test.py
View license
def start_json_server_thread():
  class JSONServer(object):
    def _thread_body(self):
      try:
        from wsgiref.simple_server import make_server
        from django.core.handlers.wsgi import WSGIHandler
        import django
        ver = django.VERSION[:2]
        if ver >= (1, 7):
          django.setup() # populate app registry for django >= 1.8

        if ver <= (1, 7):
          management.call_command('syncdb', interactive=False)
        else:
          management.call_command('migrate', interactive=False)
        try:
          User.objects.create_user(username='sammeh', email='[email protected]', password='password').save()
        except:
          pass

        http = make_server('', 8999, WSGIHandler())
        print('Server made. continue={0}'.format(self.continue_serving))
        self.event.set() # notify parent thread that the server is ready to serve requests
        while self.continue_serving:
          print('Waiting for request!')
          http.handle_request()
          self.n_requests += 1
          print('Handled {0} requests!'.format(self.n_requests))
        print('Got server stop! requests={0}'.format(self.n_requests))
        http.server_close()
        print('Server closed!')
      except Exception as e:
        import traceback
        traceback.print_exc()
        print('Error startign server: {0}'.format(e))
      finally:
        if not self.event.is_set():
          self.event.set()

    def start(self):
      print('Got server start')
      self.continue_serving = True
      self.n_requests = 0
      self.event = threading.Event()
      self.t = threading.Thread(target=self._thread_body)
      self.t.start()
      self.event.wait()
      return self

    def stop(self):
      print('Got stop call')
      self.continue_serving = False
      try:
        proxy = ServiceProxy('http://127.0.0.1:8999/json/', version=2.0)
        proxy.jsonrpc.test(string='Hello')['result']
      except: # doesnt matter if this fails
        pass
      self.t.join(2.0)
      return self

  return JSONServer().start()

Example 21

Project: snowy
Source File: migration.py
View license
def migrate_app(app, target_name=None, resolve_mode=None, fake=False, db_dry_run=False, yes=False, silent=False, load_inital_data=False, skip=False):
    
    app_name = get_app_name(app)
    
    db.debug = not silent
    
    # If any of their app names in the DB contain a ., they're 0.2 or below, so migrate em
    longuns = MigrationHistory.objects.filter(app_name__contains=".")
    if longuns:
        for mh in longuns:
            mh.app_name = short_from_long(mh.app_name)
            mh.save()
        if not silent:
            print "- Updated your South 0.2 database."
    
    # Find out what delightful migrations we have
    tree = dependency_tree()
    migrations = get_migration_names(app)
    
    # If there aren't any, quit quizically
    if not migrations:
        if not silent:
            print "? You have no migrations for the '%s' app. You might want some." % app_name
        return
    
    if target_name not in migrations and target_name not in ["zero", None]:
        matches = [x for x in migrations if x.startswith(target_name)]
        if len(matches) == 1:
            target = migrations.index(matches[0]) + 1
            if not silent:
                print " - Soft matched migration %s to %s." % (
                    target_name,
                    matches[0]
                )
            target_name = matches[0]
        elif len(matches) > 1:
            if not silent:
                print " - Prefix %s matches more than one migration:" % target_name
                print "     " + "\n     ".join(matches)
            return
        else:
            if not silent:
                print " ! '%s' is not a migration." % target_name
            return
    
    # Check there's no strange ones in the database
    ghost_migrations = []
    for m in MigrationHistory.objects.filter(applied__isnull = False):
        try:
            if get_app(m.app_name) not in tree or m.migration not in tree[get_app(m.app_name)]:
                ghost_migrations.append(m)
        except ImproperlyConfigured:
            pass
    
    if ghost_migrations:
        if not silent:
            print " ! These migrations are in the database but not on disk:"
            print "   - " + "\n   - ".join(["%s: %s" % (x.app_name, x.migration) for x in ghost_migrations])
            print " ! I'm not trusting myself; fix this yourself by fiddling"
            print " ! with the south_migrationhistory table."
        return
    
    # Say what we're doing
    if not silent:
        print "Running migrations for %s:" % app_name
    
    # Get the forwards and reverse dependencies for this target
    if target_name == None:
        target_name = migrations[-1]
    if target_name == "zero":
        forwards = []
        backwards = needed_before_backwards(tree, app, migrations[0]) + [(app, migrations[0])]
    else:
        forwards = needed_before_forwards(tree, app, target_name) + [(app, target_name)]
        # When migrating backwards we want to remove up to and including
        # the next migration up in this app (not the next one, that includes other apps)
        try:
            migration_before_here = migrations[migrations.index(target_name)+1]
            backwards = needed_before_backwards(tree, app, migration_before_here) + [(app, migration_before_here)]
        except IndexError:
            backwards = []
    
    # Get the list of currently applied migrations from the db
    current_migrations = []
    for m in MigrationHistory.objects.filter(applied__isnull = False):
        try:
            current_migrations.append((get_app(m.app_name), m.migration))
        except ImproperlyConfigured:
            pass
    
    direction = None
    bad = False
    
    # Work out the direction
    applied_for_this_app = list(MigrationHistory.objects.filter(app_name=app_name, applied__isnull=False).order_by("migration"))
    if target_name == "zero":
        direction = -1
    elif not applied_for_this_app:
        direction = 1
    elif migrations.index(target_name) > migrations.index(applied_for_this_app[-1].migration):
        direction = 1
    elif migrations.index(target_name) < migrations.index(applied_for_this_app[-1].migration):
        direction = -1
    else:
        direction = None
    
    # Is the whole forward branch applied?
    missing = [step for step in forwards if step not in current_migrations]
    # If they're all applied, we only know it's not backwards
    if not missing:
        direction = None
    # If the remaining migrations are strictly a right segment of the forwards
    # trace, we just need to go forwards to our target (and check for badness)
    else:
        problems = forwards_problems(tree, forwards, current_migrations, silent=silent)
        if problems:
            bad = True
        direction = 1
    
    # What about the whole backward trace then?
    if not bad:
        missing = [step for step in backwards if step not in current_migrations]
        # If they're all missing, stick with the forwards decision
        if missing == backwards:
            pass
        # If what's missing is a strict left segment of backwards (i.e.
        # all the higher migrations) then we need to go backwards
        else:
            problems = backwards_problems(tree, backwards, current_migrations, silent=silent)
            if problems:
                bad = True
            direction = -1
    
    if bad and resolve_mode not in ['merge'] and not skip:
        if not silent:
            print " ! Inconsistent migration history"
            print " ! The following options are available:"
            print "    --merge: will just attempt the migration ignoring any potential dependency conflicts."
        sys.exit(1)
    
    if direction == 1:
        if not silent:
            print " - Migrating forwards to %s." % target_name
        try:
            for mapp, mname in forwards:
                if (mapp, mname) not in current_migrations:
                    result = run_forwards(mapp, [mname], fake=fake, db_dry_run=db_dry_run, silent=silent)
                    if result is False: # The migrations errored, but nicely.
                        return False
        finally:
            # Call any pending post_syncdb signals
            db.send_pending_create_signals()
        # Now load initial data, only if we're really doing things and ended up at current
        if not fake and not db_dry_run and load_inital_data and target_name == migrations[-1]:
            print " - Loading initial data for %s." % app_name
            # Override Django's get_apps call temporarily to only load from the
            # current app
            old_get_apps, models.get_apps = (
                models.get_apps,
                lambda: [models.get_app(get_app_name(app))],
            )
            # Load the initial fixture
            call_command('loaddata', 'initial_data', verbosity=1)
            # Un-override
            models.get_apps = old_get_apps
    elif direction == -1:
        if not silent:
            print " - Migrating backwards to just after %s." % target_name
        for mapp, mname in backwards:
            if (mapp, mname) in current_migrations:
                run_backwards(mapp, [mname], fake=fake, db_dry_run=db_dry_run, silent=silent)
    else:
        if not silent:
            print "- Nothing to migrate."

Example 22

Project: django-systemjs
Source File: test_management.py
View license
    @mock.patch('systemjs.management.commands.systemjs_bundle.find_systemjs_location')
    def test_collectstatic_retain_bundle_manifest(self, systemjs_mock, bundle_mock):
        """
        Issue #13: bundling full, followed by collectstatic, followed by
        bundling a single template removes the bundle entries from the manifest
        (staticfiles.json). This may not happen.

        Reference: https://github.com/sergei-maertens/django-systemjs/issues/13#issuecomment-243968551
        """
        bundle_mock.side_effect = _bundle
        systemjs_mock.return_value = '/non/existant/path/'

        base = os.path.abspath(settings.STATIC_ROOT)
        self.assertEqual(_num_files(base), 0)

        call_command('collectstatic', interactive=False)

        # dummy.js + dummy.hash.js + staticfiles.json + dependency.js + dependency.hash.js
        self.assertEqual(_num_files(base), 5)
        with open(os.path.join(base, 'staticfiles.json')) as infile:
            manifest = json.loads(infile.read())
        self.assertEqual(manifest['paths'], {
            'app/dummy.js': 'app/dummy.65d75b61cae0.js',
            'app/dependency.js': 'app/dependency.d41d8cd98f00.js',
        })

        # bundle the files and check that the bundled file is post-processed
        call_command('systemjs_bundle', stdout=self.out, stderr=self.err)
        # + 2 bundled files + 2 post-processed files (not staticfiles.json!) + systemjs manifest
        self.assertEqual(_num_files(base), 8)
        with open(os.path.join(base, 'staticfiles.json')) as infile:
            manifest = json.loads(infile.read())
        self.assertEqual(manifest['paths'], {
            'app/dummy.js': 'app/dummy.65d75b61cae0.js',
            'app/dependency.js': 'app/dependency.d41d8cd98f00.js',
            'SYSTEMJS/app/dummy.js': 'SYSTEMJS/app/dummy.5d1dad25dae3.js',
        })

        # wipes the bundles from the manifest without the systemjs manifest mixin
        with override_settings(STATICFILES_DIRS=[os.path.join(os.path.dirname(__file__), 'static1')]):
            call_command('collectstatic', interactive=False)
            self.assertEqual(_num_files(base), 10)
            with open(os.path.join(base, 'staticfiles.json')) as infile:
                manifest = json.loads(infile.read())

            self.assertEqual(manifest['paths'], {
                'app/dummy.js': 'app/dummy.65d75b61cae0.js',
                'app/dependency.js': 'app/dependency.d41d8cd98f00.js',
                'dummy2.js': 'dummy2.65d75b61cae0.js',
                'SYSTEMJS/app/dummy.js': 'SYSTEMJS/app/dummy.5d1dad25dae3.js',
            })

            with add_tpl_dir(os.path.join(os.path.dirname(__file__), 'templates2')):
                call_command('systemjs_bundle', '--template', 'extra.html', stdout=self.out, stderr=self.err)

        with open(os.path.join(base, 'staticfiles.json')) as infile:
            manifest = json.loads(infile.read())
        self.assertEqual(_num_files(base), 12)
        self.assertEqual(manifest['paths'], {
            'app/dummy.js': 'app/dummy.65d75b61cae0.js',
            'app/dependency.js': 'app/dependency.d41d8cd98f00.js',
            'dummy2.js': 'dummy2.65d75b61cae0.js',
            'SYSTEMJS/app/dummy.js': 'SYSTEMJS/app/dummy.5d1dad25dae3.js',
            'SYSTEMJS/dummy2.js': 'SYSTEMJS/dummy2.5d1dad25dae3.js',
        })

Example 23

Project: callisto-core
Source File: test_views.py
View license
    @override_settings(MATCH_IMMEDIATELY=False)
    def test_match_sends_report_delayed(self):
        self.client.post((self.submission_url % self.report.pk),
                         data={'name': 'test submitter 1',
                               'email': '[email protected]',
                               'phone_number': '555-555-1212',
                               'email_confirmation': "False",
                               'key': self.report_key,
                               'form-0-perp': 'facebook.com/triggered_match',
                               'form-TOTAL_FORMS': '1',
                               'form-INITIAL_FORMS': '1',
                               'form-MAX_NUM_FORMS': '', })
        user2 = User.objects.create_user(username='dummy2', password='dummy')
        self.client.login(username='dummy2', password='dummy')
        report2_text = """[
    { "answer": "test answer",
      "id": %i,
      "section": 1,
      "question_text": "first question",
      "type": "SingleLineText"
    },
    { "answer": "another answer to a different question",
      "id": %i,
      "section": 1,
      "question_text": "2nd question",
      "type": "SingleLineText"
    }
  ]""" % (self.question1.pk, self.question2.pk)
        report2 = Report(owner=user2)
        report2_key = 'a key a key a key a key key'
        report2.encrypt_report(report2_text, report2_key)
        report2.save()
        response = self.client.post((self.submission_url % report2.pk),
                                    data={'name': 'test submitter 2',
                                          'email': '[email protected]',
                                          'phone_number': '555-555-1213',
                                          'email_confirmation': "False",
                                          'key': report2_key,
                                          'form-0-perp': 'facebook.com/triggered_match',
                                          'form-TOTAL_FORMS': '1',
                                          'form-INITIAL_FORMS': '1',
                                          'form-MAX_NUM_FORMS': '', })
        self.assertNotIn('submit_error', response.context)
        self.assertEqual(len(mail.outbox), 0)
        call_command('find_matches')
        self.assertEqual(len(mail.outbox), 3)
        message = mail.outbox[0]
        self.assertEqual(message.subject, 'test match notification')
        self.assertEqual(message.to, ['[email protected]'])
        self.assertIn('Matching" <[email protected]', message.from_email)
        self.assertIn('test match notification body', message.body)
        message = mail.outbox[1]
        self.assertEqual(message.subject, 'test match notification')
        self.assertEqual(message.to, ['[email protected]'])
        self.assertIn('Matching" <[email protected]', message.from_email)
        self.assertIn('test match notification body', message.body)
        message = mail.outbox[2]
        self.assertEqual(message.subject, 'test match delivery')
        self.assertEqual(message.to, ['[email protected]'])
        self.assertIn('"Reports" <[email protected]', message.from_email)
        self.assertIn('test match delivery body', message.body)
        self.assertRegexpMatches(message.attachments[0][0], 'report_.*\\.pdf\\.gpg')

Example 24

Project: splunk-webframework
Source File: flush.py
View license
    def handle_noargs(self, **options):
        db = options.get('database')
        connection = connections[db]
        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        # 'reset_sequences' is a stealth option
        reset_sequences = options.get('reset_sequences', True)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True, reset_sequences=reset_sequences)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to the state it was in after syncdb.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                cursor = connection.cursor()
                for sql in sql_list:
                    cursor.execute(sql)
            except Exception as e:
                transaction.rollback_unless_managed(using=db)
                raise CommandError("""Database %s couldn't be flushed. Possible reasons:
  * The database isn't running or isn't configured correctly.
  * At least one of the expected database tables doesn't exist.
  * The SQL was invalid.
Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.
The full error: %s""" % (connection.settings_dict['NAME'], e))
            transaction.commit_unless_managed(using=db)

            # Emit the post sync signal. This allows individual
            # applications to respond as if the database had been
            # sync'd from scratch.
            all_models = []
            for app in models.get_apps():
                all_models.extend([
                    m for m in models.get_models(app, include_auto_created=True)
                    if router.allow_syncdb(db, m)
                ])
            emit_post_sync_signal(set(all_models), verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            kwargs = options.copy()
            kwargs['database'] = db
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture.
                call_command('loaddata', 'initial_data', **options)

        else:
            self.stdout.write("Flush cancelled.\n")

Example 25

Project: splunk-webframework
Source File: syncdb.py
View license
    def handle_noargs(self, **options):

        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        show_traceback = options.get('traceback')
        load_initial_data = options.get('load_initial_data')

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError as exc:
                # This is slightly hackish. We want to ignore ImportErrors
                # if the "management" module itself is missing -- but we don't
                # want to ignore the exception if the management module exists
                # but raises an ImportError for some reason. The only way we
                # can do this is to check the text of the exception. Note that
                # we're a bit broad in how we check the text, because different
                # Python implementations may not use the same text.
                # CPython uses the text "No module named management"
                # PyPy uses "No module named myproject.myapp.management"
                msg = exc.args[0]
                if not msg.startswith('No module named') or 'management' not in msg:
                    raise

        db = options.get('database')
        connection = connections[db]
        cursor = connection.cursor()

        # Get a list of already installed *models* so that references work right.
        tables = connection.introspection.table_names()
        seen_models = connection.introspection.installed_models(tables)
        created_models = set()
        pending_references = {}

        # Build the manifest of apps and models that are to be synchronized
        all_models = [
            (app.__name__.split('.')[-2],
                [m for m in models.get_models(app, include_auto_created=True)
                if router.allow_syncdb(db, m)])
            for app in models.get_apps()
        ]

        def model_installed(model):
            opts = model._meta
            converter = connection.introspection.table_name_converter
            return not ((converter(opts.db_table) in tables) or
                (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables))

        manifest = SortedDict(
            (app_name, list(filter(model_installed, model_list)))
            for app_name, model_list in all_models
        )

        # Create the tables for each model
        if verbosity >= 1:
            self.stdout.write("Creating tables ...\n")
        for app_name, model_list in manifest.items():
            for model in model_list:
                # Create the model's database table, if it doesn't already exist.
                if verbosity >= 3:
                    self.stdout.write("Processing %s.%s model\n" % (app_name, model._meta.object_name))
                sql, references = connection.creation.sql_create_model(model, self.style, seen_models)
                seen_models.add(model)
                created_models.add(model)
                for refto, refs in references.items():
                    pending_references.setdefault(refto, []).extend(refs)
                    if refto in seen_models:
                        sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references))
                sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references))
                if verbosity >= 1 and sql:
                    self.stdout.write("Creating table %s\n" % model._meta.db_table)
                for statement in sql:
                    cursor.execute(statement)
                tables.append(connection.introspection.table_name_converter(model._meta.db_table))

        transaction.commit_unless_managed(using=db)

        # Send the post_syncdb signal, so individual apps can do whatever they need
        # to do at this point.
        emit_post_sync_signal(created_models, verbosity, interactive, db)

        # The connection may have been closed by a syncdb handler.
        cursor = connection.cursor()

        # Install custom SQL for the app (but only if this
        # is a model we've just created)
        if verbosity >= 1:
            self.stdout.write("Installing custom SQL ...\n")
        for app_name, model_list in manifest.items():
            for model in model_list:
                if model in created_models:
                    custom_sql = custom_sql_for_model(model, self.style, connection)
                    if custom_sql:
                        if verbosity >= 2:
                            self.stdout.write("Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))
                        try:
                            for sql in custom_sql:
                                cursor.execute(sql)
                        except Exception as e:
                            self.stderr.write("Failed to install custom SQL for %s.%s model: %s\n" % \
                                                (app_name, model._meta.object_name, e))
                            if show_traceback:
                                traceback.print_exc()
                            transaction.rollback_unless_managed(using=db)
                        else:
                            transaction.commit_unless_managed(using=db)
                    else:
                        if verbosity >= 3:
                            self.stdout.write("No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))

        if verbosity >= 1:
            self.stdout.write("Installing indexes ...\n")
        # Install SQL indices for all newly created models
        for app_name, model_list in manifest.items():
            for model in model_list:
                if model in created_models:
                    index_sql = connection.creation.sql_indexes_for_model(model, self.style)
                    if index_sql:
                        if verbosity >= 2:
                            self.stdout.write("Installing index for %s.%s model\n" % (app_name, model._meta.object_name))
                        try:
                            for sql in index_sql:
                                cursor.execute(sql)
                        except Exception as e:
                            self.stderr.write("Failed to install index for %s.%s model: %s\n" % \
                                                (app_name, model._meta.object_name, e))
                            transaction.rollback_unless_managed(using=db)
                        else:
                            transaction.commit_unless_managed(using=db)

        # Load initial_data fixtures (unless that has been disabled)
        if load_initial_data:
            call_command('loaddata', 'initial_data', verbosity=verbosity,
                         database=db, skip_validation=True)

Example 26

Project: mezzanine
Source File: runtests.py
View license
def main(package="mezzanine"):
    """
    This is the main test function called via ``python setup.py test``.
    It's responsible for hacking the ``project_template`` dir into
    an actual project to test against.
    """

    from mezzanine.utils.importing import path_for_import
    package_path = path_for_import(package)
    project_path = os.path.join(package_path, "project_template")

    os.environ["DJANGO_SETTINGS_MODULE"] = "project_name.test_settings"

    project_app_path = os.path.join(project_path, "project_name")

    local_settings_path = os.path.join(project_app_path, "local_settings.py")
    test_settings_path = os.path.join(project_app_path, "test_settings.py")

    sys.path.insert(0, package_path)
    sys.path.insert(0, project_path)

    if not os.path.exists(test_settings_path):
        shutil.copy(local_settings_path + ".template", test_settings_path)
        with open(test_settings_path, "r") as f:
            local_settings = f.read()
        with open(test_settings_path, "w") as f:
            test_settings = """

from . import settings

globals().update(i for i in settings.__dict__.items() if i[0].isupper())

# Require the mezzanine.accounts app. We use settings.INSTALLED_APPS here so
# the syntax test doesn't complain about an undefined name.
if "mezzanine.accounts" not in settings.INSTALLED_APPS:
    INSTALLED_APPS = list(settings.INSTALLED_APPS) + ["mezzanine.accounts"]

# Use the MD5 password hasher by default for quicker test runs.
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',)

"""
            f.write(test_settings + local_settings)

        def cleanup_test_settings():
            import os  # Outer scope sometimes unavailable in atexit functions.
            for fn in [test_settings_path, test_settings_path + 'c']:
                try:
                    os.remove(fn)
                except OSError:
                    pass
        atexit.register(cleanup_test_settings)

    django.setup()

    from django.core.management.commands import test
    if django.VERSION < (1, 10):
        sys.exit(test.Command().execute(verbosity=1))
    sys.exit(call_command(test.Command(), verbosity=1))

Example 27

Project: elenas_inbox
Source File: mail_dedupe.py
View license
    def handle_noargs(self, **options):
        buf = []
        last_name = None
        for p in Person.objects.all().order_by('name_hash', 'id'):

            # time to clear the buffer?
            if last_name is not None and last_name!=p.name_hash:

                # do nothing on empty buf
                if len(buf)>1:     
                    
                    # make sure we haven't already merged these guys
                    found_names = {}
                    for x in buf:
                        found_names[x.name] = True
                    print found_names
                    
                    if (len(found_names.keys())>1) and (not self._buf_is_already_merged(buf)):
                    
                        print last_name
                        for i in range(0, len(buf)):
                            print "  %d. %s" % (i, str(buf[i]))
                        print "  %d. No merge" % len(buf)
                        choice_num = raw_input('Merge into: ')

                        if choice_num.upper().strip()=='R':
                            new_name = raw_input('New name: ')
                            new_name = new_name.strip()
                            for p in buf:
                                p.name = new_name
                                p.save()
                        
                            call_command('mail_combine_people', *(map(lambda x: x.id, buf))) # it doesn't matter who we merge into since they all have the same name
                    
                        else:

                            if int(choice_num)<(len(buf)):
                                print "got %d" % int(choice_num)
                                choice = buf[int(choice_num)]

                                other_ids = [choice.id]
                                for x in buf:
                                    if x.id!=choice.id:
                                        other_ids.append(x.id)

                                print "merging %s into %s" % (map(lambda x: int(x), other_ids), choice.name)

                                call_command('mail_combine_people', *other_ids)                    
                
                buf = [p]                
                
            else:
                buf.append(p)    
            
            last_name = p.name_hash

Example 28

Project: django-evolution
Source File: runtests.py
View license
def run_tests(verbosity=1, interactive=False):
    from django.conf import settings
    from django.core import management
    from django.db import connections
    from django.test.utils import setup_test_environment, \
                                  teardown_test_environment

    if hasattr(django, 'setup'):
        # Django >= 1.7
        django.setup()

    setup_test_environment()
    settings.DEBUG = False

    old_db_names = []

    for alias in connections:
        connection = connections[alias]

        old_db_names.append((connection, connection.settings_dict['NAME']))
        connection.creation.create_test_db(verbosity,
                                           autoclobber=not interactive)

    if django.VERSION[:2] >= (1, 7):
        management.call_command('migrate', verbosity=verbosity,
                                interactive=interactive)
    else:
        management.call_command('syncdb', verbosity=verbosity,
                                interactive=interactive)

    nose_argv = ['runtests.py', '-v',
                 '--with-coverage',
                 '--with-doctest',
                 '--doctest-extension=.txt',
                 '--cover-package=django_evolution',
                 '--match=tests[\/]*.py',
                 '--match=^test']

    if len(sys.argv) > 2:
        nose_argv += sys.argv[2:]

    nose.run(argv=nose_argv)

    for connection, name in old_db_names:
        connection.creation.destroy_test_db(name, verbosity=0)

    teardown_test_environment()

Example 29

Project: tendenci
Source File: fake_initials.py
View license
    def handle(self, *args, **options):
        try:
            call_command('migrate', 'contenttypes', '0001', '--fake')
        except:
            pass
        try:
            call_command('migrate', 'contenttypes')
        except:
            # might need to fake 0002
            call_command('migrate', 'contenttypes', '0002', '--fake')
        try:
            call_command('migrate', 'auth', '0001', '--fake')
            call_command('migrate', 'auth')
        except:
            pass

        apps = ('admin',
                'user_groups',
                'entities',
                'accountings',
                'announcements',
                'articles',
                'base',
                'boxes',
                'campaign_monitor',
                'captcha',
                'careers',
                'case_studies',
                'categories',
                'committees',
                'contacts',
                'contributions',
                'corporate_memberships',
                'dashboard',
                'directories',
                'discounts',
                'donations',
                'educations',
                'email_blocks',
                'emails',
                'event_logs',
                'events',
                'explorer',
                'explorer_extensions',
                'exports',
                'files',
                'forms',
                'handler404',
                'help_files',
                'ics',
                'imports',
                'industries',
                'invoices',
                'jobs',
                'locations',
                'make_payments',
                'memberships',
                'meta',
                'metrics',
                'navs',
                'news',
                'newsletters',
                'notifications',
                'pages',
                'payments',
                'perms',
                'photos',
                'profiles',
                'recurring_payments',
                'redirects',
                'regions',
                'registration',
                'reports',
                'resumes',
                'robots',
                'search',
                'sessions',
                'site_settings',
                'sites',
                'social_auth',
                'speakers',
                'staff',
                'stories',
                'studygroups',
                'tagging',
                'tendenci_guide',
                'testimonials',
                'theme_editor',
                'versions',
                'videos',
                'wp_exporter',
                'wp_importer',
                'djcelery',
                'tastypie'
                )
        for a in apps:
            try:
                call_command('migrate', a, '0001', '--fake')
            except:
                print traceback.format_exc()

Example 30

Project: tendenci
Source File: load_base_defaults.py
View license
    def call_loaddata(self, reset_nav=False):
        """
        This calls the loaddata command on all
        non profit fixtures.
        The order - It's a big deal.
        """
        from tendenci.apps.files.models import File

        if reset_nav:
            from tendenci.apps.navs.models import NavItem
            try:
                main_nav_items = NavItem.objects.filter(nav_id=1)
                main_nav_items.delete()
            except:
                pass

        staff_installed = "addons.staff" in settings.INSTALLED_APPS
        print 'npo_default_auth_user.json'
        call_command('loaddata', 'npo_default_auth_user.json')
        print 'npo_default_entities.json'
        call_command('loaddata', 'npo_default_entities.json')
        print 'npo_default_user_groups.json'
        call_command('loaddata', 'npo_default_user_groups.json')
        print 'npo_default_files.json'
        call_command('loaddata', 'npo_default_files.json')
        print 'paymentmethod.json'
        call_command('loaddata', 'paymentmethod.json')
        print 'load default_forums.json'
        call_command('loaddata', 'default_forums.json')
        print 'load npo_default_directories_pricings.json'
        call_command('loaddata', 'npo_default_directories_pricings.json')
        
        # default sqls for explorer
        call_command('load_sqlexplorer_defaults')
        

        box_ct = ContentType.objects.get(app_label='boxes', model='box')
        story_ct = ContentType.objects.get(app_label='stories', model='story')
        setting_ct = ContentType.objects.get(app_label='site_settings', model='setting')
        if staff_installed:
            staff_ct = ContentType.objects.get(app_label='staff', model='staff')

        files = File.objects.all()

        print 'updating files'
        for f in files:

            if 'box' in unicode(f.file):
                f.content_type = box_ct
            if 'story' in unicode(f.file):
                f.content_type = story_ct
            if 'setting' in unicode(f.file):
                f.content_type = setting_ct
            if 'staff' in unicode(f.file) and staff_installed:
                f.content_type = staff_ct

            f.save()

        suffix_list = [
            'profiles_profile',
            'forms',
            'boxes',
            'pages',
            'navs',
            'stories',
            'videos',
        ]

        # call loaddata on fixtures
        for suffix in suffix_list:
            filename = 'npo_default_%s.json' % suffix

            print filename
            call_command('loaddata', filename)

Example 31

Project: tendenci
Source File: load_npo_defaults.py
View license
    def call_loaddata(self, reset_nav=False):
        """
        This calls the loaddata command on all
        non profit fixtures.
        The order - It's a big deal.
        """
        from tendenci.apps.files.models import File

        if reset_nav:
            from tendenci.apps.navs.models import NavItem
            try:
                main_nav_items = NavItem.objects.filter(nav_id=1)
                main_nav_items.delete()
            except:
                pass

        staff_installed = "addons.staff" in settings.INSTALLED_APPS
        print 'npo_default_auth_user.json'
        call_command('loaddata', 'npo_default_auth_user.json')
        print 'npo_default_entities.json'
        call_command('loaddata', 'npo_default_entities.json')
        print 'npo_default_user_groups.json'
        call_command('loaddata', 'npo_default_user_groups.json')
        print 'npo_default_files.json'
        call_command('loaddata', 'npo_default_files.json')
        print 'load paymentmethod.json'
        call_command('loaddata', 'paymentmethod.json')
        print 'load default_forums.json'
        call_command('loaddata', 'default_forums.json')
        print 'load regions_region.json'
        call_command('loaddata', 'regions_region.json')
        print 'load npo_default_directories_pricings.json'
        call_command('loaddata', 'npo_default_directories_pricings.json')
        
        
        # default sqls for explorer
        call_command('load_sqlexplorer_defaults')
        

        box_ct = ContentType.objects.get(app_label='boxes', model='box')
        story_ct = ContentType.objects.get(app_label='stories', model='story')
        setting_ct = ContentType.objects.get(app_label='site_settings', model='setting')
        if staff_installed:
            staff_ct = ContentType.objects.get(app_label='staff', model='staff')

        files = File.objects.all()

        print 'updating files'
        for f in files:

            if 'box' in unicode(f.file):
                f.content_type = box_ct
            if 'story' in unicode(f.file):
                f.content_type = story_ct
            if 'setting' in unicode(f.file):
                f.content_type = setting_ct
            if 'staff' in unicode(f.file) and staff_installed:
                f.content_type = staff_ct

            f.save()

        suffix_list = [
            'profiles_profile',
            'events',
            'jobs',
            'memberships',
            'memberships_membershipdefault',
            'directories',
            'articles',
            'forms',
            'news',
            'photos',
            'boxes',
            'pages',
            'navs',
            'stories',
            'videos',
        ]

        # call loaddata on fixtures
        for suffix in suffix_list:
            filename = 'npo_default_%s.json' % suffix

            print filename
            call_command('loaddata', filename)

Example 32

Project: tendenci
Source File: update_tendenci.py
View license
    def handle(self, *args, **options):
        from tendenci.apps.site_settings.utils import get_setting

        pass_update_tendenci = False
        pass_update_tendenci_site = False
        pass_restart_server = False
        is_uwsgi = False
        gunicorn_error_msg = None
        uwsgi_error_msg = None
        errors_list = []

        pypi = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
        latest_version = pypi.package_releases('tendenci')[0]
        error_message = ""
        email_context = {'site_url':get_setting('site', 'global', 'siteurl'),
                         'version':latest_version, 'error_message':error_message}

        email_sender = get_setting('site', 'global', 'siteemailnoreplyaddress') or settings.DEFAULT_FROM_EMAIL
        email_recipient = ""
        user_id = options['user']
        if User.objects.filter(pk=user_id).exists():
            user = User.objects.get(pk=user_id)
            if user.email:
                email_recipient = user.email

        try:
            print "Updating tendenci"
            subprocess.check_output("pip install tendenci --upgrade", stderr=subprocess.STDOUT, shell=True)
            pass_update_tendenci = True

        except subprocess.CalledProcessError as e:
            errors_list.append(e.output)

        # run python deploy.py iff update_tendenci is successful
        if pass_update_tendenci:
            try:
                print "Updating tendenci site"
                subprocess.check_output("python deploy.py", stderr=subprocess.STDOUT, shell=True)
                pass_update_tendenci_site = True

            except subprocess.CalledProcessError as e:
                errors_list.append(e.output)

        # run reload if update is done
        if pass_update_tendenci_site:
            try:
                print "Restarting Server"
                subprocess.check_output("sudo reload %s" % os.path.basename(settings.PROJECT_ROOT),
                                    stderr=subprocess.STDOUT, shell=True)

            except subprocess.CalledProcessError as e:
                gunicorn_error_msg = e.output
                if "reload: Unknown job:" in e.output:
                    is_uwsgi = True

        # run usgi command iff it was proven that the site is using uwsgi instead
        if is_uwsgi:
            try:
                print "Restarting Server"
                subprocess.check_output("sudo touch /etc/uwsgi/vassals/%s.ini" % os.path.basename(settings.PROJECT_ROOT),
                                    stderr=subprocess.STDOUT, shell=True)

            except subprocess.CalledProcessError as e:
                uwsgi_error_msg = e.output

        if gunicorn_error_msg and uwsgi_error_msg:
            errors_list.append(uwsgi_error_msg)
            errors_list.append(gunicorn_error_msg)

        try:
            print "Clearing cache"
            call_command('clear_cache')
        except CommandError as e:
            errors_list.append(e.output)

        email_context['errors_list'] = errors_list

        if email_recipient:
            subject = render_to_string('notification/update_tendenci_notice/short.txt', email_context)
            subject = subject.strip('\n').strip('\r')
            body = render_to_string('notification/update_tendenci_notice/full.html', email_context)
            email = EmailMessage()
            email.subject = subject
            email.body = body
            email.from_email = email_sender
            email.to = [email_recipient]
            email.content_subtype = 'html'
            email.send()

Example 33

View license
    def test_index_sections(self):
        """Tests the command located in `index_sections.py`
        """

        for i in range(50):
            section_name = 'Section-{}'.format(i)
            section_slug = 'section-{}'.format(i)
            query = {
                'label': section_name,
                'query': {
                    'groups': [
                        {
                            'conditions': [
                                {
                                    'type': 'all',
                                    'field': 'tag',
                                    'values': [
                                        {
                                            'value': section_slug,
                                            'label': section_name
                                        }
                                    ]
                                }

                            ]
                        }
                    ]
                }
            }
            Section.objects.create(name=section_name, query=query)

        assert Section.objects.count() == 50
        with self.assertRaises(TransportError):
            section = Section.objects.first()
            _id = "{}.{}".format(section.name, section.id)
            self.es.get(
                index=Content.search_objects.mapping.index,
                doc_type='.percolator',
                id=_id
            )

        empty_sections = mommy.make(Section, _quantity=50)
        db_empty_section_ids = [s.id for s in Section.objects.all() if s.query == {} or s.query is None]

        assert len(db_empty_section_ids) == len(empty_sections)
        with self.assertRaises(TransportError):
            section = empty_sections[0]
            _id = "{}.{}".format(section.name, section.query)
            self.es.get(
                index=Content.search_objects.mapping.index,
                doc_type='.percolator',
                id=_id,
            )

        call_command('index_sections')
        query_sections = Section.objects.exclude(id__in=db_empty_section_ids)

        for section in query_sections:
            response = self.es.get(
                index=Content.search_objects.mapping.index,
                doc_type='.percolator',
                id=section.es_id
            )
            assert response['found']
            assert response['_id'] == section.es_id

Example 34

Project: django-extras
Source File: runner.py
View license
def main():
    """
    The entry point for the script. This script is fairly basic. Here is a
    quick example of how to use it::

        app_test_runner.py [path-to-app]

    You must have Django on the PYTHONPATH prior to running this script. This
    script basically will bootstrap a Django environment for you.

    By default this script with use SQLite and an in-memory database. If you
    are using Python 2.6 it will just work out of the box for you.
    """
    parser = OptionParser()
    parser.add_option("--DATABASE_ENGINE", dest="DATABASE_ENGINE", default="django.db.backends.sqlite3")
    parser.add_option("--DATABASE_NAME", dest="DATABASE_NAME", default=":memory:")
    parser.add_option("--DATABASE_USER", dest="DATABASE_USER", default="")
    parser.add_option("--DATABASE_PASSWORD", dest="DATABASE_PASSWORD", default="")
    parser.add_option("--SITE_ID", dest="SITE_ID", type="int", default=1)

    options, args = parser.parse_args()

    # check for app in args
    try:
        app_path = args[0]
    except IndexError:
        print("You did not provide an app path.")
        raise SystemExit
    else:
        if app_path.endswith("/"):
            app_path = app_path[:-1]
        parent_dir, app_name = os.path.split(app_path)
        sys.path.insert(0, parent_dir)

    settings.configure(**{
        "DATABASES": {
            'default': {
                'ENGINE': options.DATABASE_ENGINE,
                'NAME': options.DATABASE_NAME,  # ':memory:',
                'USER': options.DATABASE_USER,
                'PASSWORD': options.DATABASE_PASSWORD,
                'HOST': '',
                'PORT': '',
            }
        },
        # "SITE_ID": options.SITE_ID,
        "SECRET_KEY": "Test Key",
        "ROOT_URLCONF": "django_app_test.urls",
        "TEMPLATE_LOADERS": (
            "django.template.loaders.app_directories.Loader",
        ),
        "TEMPLATE_DIRS": tuple(),
        "INSTALLED_APPS": (
            "django.contrib.auth",
            "django.contrib.contenttypes",
            "django.contrib.sessions",
            # "django.contrib.sites",
            app_name,
        ),

        # Force test runner to be the pre django 1.6 test runner. This tool does not work with then new default in 1.6.
        "TEST_RUNNER": "django.test.simple.DjangoTestSuiteRunner"
    })

    # This is to ensure that Django 1.7's app registry is populated prior to calling a command.
    if hasattr(django, 'setup'):
        django.setup()

    call_command("test", app_name)

Example 35

Project: ion
Source File: maintenance.py
View license
    def run(self):
        start_time = datetime.datetime.now()
        content = StringIO()
        failure = False

        try:
            content.write("=== Starting CSV to LDIF script.\n\n")
            os.chdir(self.folder)
            call_command("import_sis", csv_file=os.path.join(self.folder, "data.csv"), run=True, confirm=True, stdout=content, stderr=content)
            content.write("\n=== Finished CSV to LDIF script.\n")

            content.write("=== Starting LDIF import.\n")
            ldifs_imported = 0
            for f in os.listdir(self.folder):
                if f.endswith(".ldif"):
                    content.write("=== Importing {}\n".format(f))
                    # ldap3 does not support importing LDIF files
                    subprocess.check_call("ldapmodify", "-h", settings.LDAP_SERVER[7:], "-Y", "GSSAPI", "-f", f, env={
                        "KRB5CCNAME": os.environ["KRB5CCNAME"]
                    }, stdout=content, stderr=content)
                    content.write("=== Imported {}\n".format(f))
                    ldifs_imported += 1
            if ldifs_imported == 0:
                content.write("=== WARNING: No LDIF files were imported!\n")
                failure = True
            else:
                content.write("=== {} LDIF files were imported.\n".format(ldifs_imported))
            content.write("=== Finished LDIF import.\n")

            content.write("Processing complete.\n")
        except Exception:
            failure = True
            content.write("\n=== An error occured during the import process!\n\n")
            content.write(traceback.format_exc())
            content.write("\n=== The import process has been aborted.")

        content.seek(0)

        data = {
            "log": content.read(),
            "failure": failure,
            "help_email": settings.FEEDBACK_EMAIL,
            "date": start_time.strftime("%I:%M:%S %p %m/%d/%Y")
        }
        email_send("eighth/emails/import_notify.txt", "eighth/emails/import_notify.html", data, "SIS Import Results - {}".format("Failure" if failure else "Success"), [self.email])
        shutil.rmtree(self.folder)

Example 36

Project: django-tenants
Source File: create_tenant.py
View license
    def handle(self, *args, **options):

        tenant_data = {}
        tenant = BaseCommand
        for field in self.tenant_fields:
            input_value = options.get(field.name, None)
            tenant_data[field.name] = input_value

        domain_data = {}
        for field in self.domain_fields:
            input_value = options.get(field.name, None)
            domain_data[field.name] = input_value

        while True:
            for field in self.tenant_fields:
                if tenant_data.get(field.name, '') == '':
                    input_msg = field.verbose_name
                    default = field.get_default()
                    if default:
                        input_msg = "%s (leave blank to use '%s')" % (input_msg, default)

                    input_value = input(force_str('%s: ' % input_msg)) or default
                    tenant_data[field.name] = input_value
            tenant = self.store_tenant(**tenant_data)
            if tenant is not None:
                break
            tenant_data = {}

        while True:
            domain_data['tenant'] = tenant
            for field in self.domain_fields:
                if domain_data.get(field.name, '') == '':
                    input_msg = field.verbose_name
                    default = field.get_default()
                    if default:
                        input_msg = "%s (leave blank to use '%s')" % (input_msg, default)

                    input_value = input(force_str('%s: ' % input_msg)) or default
                    domain_data[field.name] = input_value
            domain = self.store_tenant_domain(**domain_data)
            if domain is not None:
                break
            domain_data = {}

        if options.get('s', None):
            self.stdout.write("Create superuser for %s" % tenant_data['schema_name'])
            call_command('create_tenant_superuser', schema_name=tenant_data['schema_name'], interactive=True)

Example 37

Project: wagtail
Source File: test_management_commands.py
View license
    def test_go_live_page_will_be_published(self):
        # Connect a mock signal handler to page_published signal
        signal_fired = [False]
        signal_page = [None]

        def page_published_handler(sender, instance, **kwargs):
            signal_fired[0] = True
            signal_page[0] = instance

        page_published.connect(page_published_handler)


        page = SimplePage(
            title="Hello world!",
            slug="hello-world",
            content="hello",
            live=False,
            has_unpublished_changes=True,
            go_live_at=timezone.now() - timedelta(days=1),
        )
        self.root_page.add_child(instance=page)

        page.save_revision(approved_go_live_at=timezone.now() - timedelta(days=1))

        p = Page.objects.get(slug='hello-world')
        self.assertFalse(p.live)
        self.assertTrue(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists())

        management.call_command('publish_scheduled_pages')

        p = Page.objects.get(slug='hello-world')
        self.assertTrue(p.live)
        self.assertTrue(p.first_published_at)
        self.assertFalse(p.has_unpublished_changes)
        self.assertFalse(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists())

        # Check that the page_published signal was fired
        self.assertTrue(signal_fired[0])
        self.assertEqual(signal_page[0], page)
        self.assertEqual(signal_page[0], signal_page[0].specific)

Example 38

Project: transifex
Source File: base2.py
View license
    def setUp(self):
        """Set up a sample set of base objects for inherited tests.

        If you are inheriting the class and overriding setUp, don't forget to
        call super::

          from transifex.txcommon.tests import (base, utils)
          class TestClassName(base.BaseTestCase)
              def setUp(self):
                  super(TestClassName, self).setUp()

        """
        super(TransactionBaseTestCase, self).setUp()

        # Add django-authority permission for writer
        self.permission = AuPermission.objects.create(
            codename='project_perm.submit_translations',
            approved=True, user=self.user['writer'],
            content_object=self.project, creator=self.user['maintainer'])

        # Create teams
        self.team = Team.objects.get_or_create(language=self.language,
            project=self.project, creator=self.user['maintainer'])[0]
        self.team_private = Team.objects.get_or_create(language=self.language,
            project=self.project_private, creator=self.user['maintainer'])[0]
        self.team.coordinators.add(self.user['team_coordinator'])
        self.team.members.add(self.user['team_member'])
        self.team.reviewers.add(self.user['reviewer'])
        self.team_private.coordinators.add(self.user['team_coordinator'])
        self.team_private.members.add(self.user['team_member'])
        self.team_private.reviewers.add(self.user['reviewer'])

        # Create a release
        self.release = Release.objects.create(slug="releaseslug1",
            name="Release1", project=self.project)
        self.release.resources.add(self.resource)
        self.release_private = Release.objects.create(slug="releaseslug2",
            name="Release2", project=self.project_private)
        self.release_private.resources.add(self.resource_private)


        # Create common URLs
        # Easier to call common URLs in your view/template unit tests.
        self.urls = {
            'project': reverse('project_detail', args=[self.project.slug]),
            'project_edit': reverse('project_edit', args=[self.project.slug]),
            'project_resources': reverse('project_resources', args=[self.project.slug]),
            'resource': reverse('resource_detail', args=[self.resource.project.slug, self.resource.slug]),
            'resource_actions': reverse('resource_actions', args=[self.resource.project.slug, self.resource.slug, self.language.code]),
            'resource_edit': reverse('resource_edit', args=[self.resource.project.slug, self.resource.slug]),
            'translate': reverse('translate_resource', args=[self.resource.project.slug, self.resource.slug, self.language.code]),
            'release': reverse('release_detail', args=[self.release.project.slug, self.release.slug]),
            'release_create': reverse('release_create', args=[self.project.slug]),
            'team': reverse('team_detail', args=[self.resource.project.slug,
                                                 self.language.code]),

            'project_private': reverse('project_detail', args=[self.project_private.slug]),
            'resource_private': reverse('resource_detail', args=[self.resource_private.project.slug, self.resource_private.slug]),
            'translate_private': reverse('translate_resource', args=[self.resource_private.project.slug, self.resource_private.slug, self.language.code]),
        }

        from django.core import management
        management.call_command('txstatsupdate', verbosity=0)

Example 39

Project: transifex
Source File: base2.py
View license
    @classmethod
    def setUpClass(cls):
        """Set up a sample set of class wide base objects for inherited tests.
        NOTE: Use this Test Suite with
          TEST_RUNNER = 'txtestrunner.runner.TxTestSuiteRunner'
        in settings.
        If you are inheriting the class and overriding setUpClass, don't forget to
        call super::

          from transifex.txcommon.tests import (base2, utils)
          class TestClassName(base2.BaseTestCase):
              @classmethod
              def setUpClass(self):
                  super(TestClassName, self).setUpClass()

        """
        super(BaseTestCase, cls).setUpClass()

        # Add django-authority permission for writer
        cls._permission = AuPermission.objects.create(
            codename='project_perm.submit_translations',
            approved=True, user=cls._user['writer'],
            content_object=cls._project, creator=cls._user['maintainer'])

        # Create teams
        cls._team = Team.objects.get_or_create(language=cls._language,
            project=cls._project, creator=cls._user['maintainer'])[0]
        cls._team_private = Team.objects.get_or_create(language=cls._language,
            project=cls._project_private, creator=cls._user['maintainer'])[0]
        cls._team.coordinators.add(cls._user['team_coordinator'])
        cls._team.members.add(cls._user['team_member'])
        cls._team.members.add(cls._user['reviewer'])
        cls._team_private.coordinators.add(cls._user['team_coordinator'])
        cls._team_private.members.add(cls._user['team_member'])
        cls._team_private.members.add(cls._user['reviewer'])

        # Create a release
        cls._release = Release.objects.get_or_create(slug="releaseslug1",
            name="Release1", project=cls._project)[0]
        cls._release.resources.add(cls._resource)
        cls._release_private = Release.objects.get_or_create(slug="releaseslug2",
            name="Release2", project=cls._project_private)[0]
        cls._release_private.resources.add(cls._resource_private)


        # Create common URLs
        # Easier to call common URLs in your view/template unit tests.
        cls._urls = {
            'project': reverse('project_detail', args=[cls._project.slug]),
            'project_edit': reverse('project_edit', args=[cls._project.slug]),
            'project_resources': reverse('project_resources', args=[cls._project.slug]),
            'resource': reverse('resource_detail', args=[cls._resource.project.slug, cls._resource.slug]),
            'resource_actions': reverse('resource_actions', args=[cls._resource.project.slug, cls._resource.slug, cls._language.code]),
            'resource_edit': reverse('resource_edit', args=[cls._resource.project.slug, cls._resource.slug]),
            'translate': reverse('translate_resource', args=[cls._resource.project.slug, cls._resource.slug, cls._language.code]),
            'release': reverse('release_detail', args=[cls._release.project.slug, cls._release.slug]),
            'release_create': reverse('release_create', args=[cls._project.slug]),
            'team': reverse('team_detail', args=[cls._resource.project.slug,
                                                 cls._language.code]),

            'project_private': reverse('project_detail', args=[cls._project_private.slug]),
            'resource_private': reverse('resource_detail', args=[cls._resource_private.project.slug, cls._resource_private.slug]),
            'translate_private': reverse('translate_resource', args=[cls._resource_private.project.slug, cls._resource_private.slug, cls._language.code]),
        }


        from django.core import management
        management.call_command('txstatsupdate', verbosity=0)

Example 40

Project: transifex
Source File: base_legacy.py
View license
    def setUp(self):
        """Set up a sample set of base objects for inherited tests.

        If you are inheriting the class and overriding setUp, don't forget to
        call super::

          from transifex.txcommon.tests import (base, utils)
          class TestClassName(base.BaseTestCase)
              def setUp(self):
                  super(TestClassName, self).setUp()

        """
        super(BaseTestCase, self).setUp()

        # Add django-authority permission for writer
        self.permission = AuPermission.objects.create(
            codename='project_perm.submit_translations',
            approved=True, user=self.user['writer'],
            content_object=self.project, creator=self.user['maintainer'])

        # Create teams
        self.team = Team.objects.get_or_create(language=self.language,
            project=self.project, creator=self.user['maintainer'])[0]
        self.team_private = Team.objects.get_or_create(language=self.language,
            project=self.project_private, creator=self.user['maintainer'])[0]
        self.team.coordinators.add(self.user['team_coordinator'])
        self.team.members.add(self.user['team_member'])
        self.team.members.add(self.user['reviewer'])
        self.team_private.coordinators.add(self.user['team_coordinator'])
        self.team_private.members.add(self.user['team_member'])
        self.team_private.members.add(self.user['reviewer'])

        # Create a release
        self.release = Release.objects.create(slug="releaseslug1",
            name="Release1", project=self.project)
        self.release.resources.add(self.resource)
        self.release_private = Release.objects.create(slug="releaseslug2",
            name="Release2", project=self.project_private)
        self.release_private.resources.add(self.resource_private)


        # Create common URLs
        # Easier to call common URLs in your view/template unit tests.
        self.urls = {
            'project': reverse('project_detail', args=[self.project.slug]),
            'project_edit': reverse('project_edit', args=[self.project.slug]),
            'project_resources': reverse('project_resources', args=[self.project.slug]),
            'resource': reverse('resource_detail', args=[self.resource.project.slug, self.resource.slug]),
            'resource_actions': reverse('resource_actions', args=[self.resource.project.slug, self.resource.slug, self.language.code]),
            'resource_edit': reverse('resource_edit', args=[self.resource.project.slug, self.resource.slug]),
            'translate': reverse('translate_resource', args=[self.resource.project.slug, self.resource.slug, self.language.code]),
            'release': reverse('release_detail', args=[self.release.project.slug, self.release.slug]),
            'release_create': reverse('release_create', args=[self.project.slug]),
            'team': reverse('team_detail', args=[self.resource.project.slug,
                                                 self.language.code]),

            'project_private': reverse('project_detail', args=[self.project_private.slug]),
            'resource_private': reverse('resource_detail', args=[self.resource_private.project.slug, self.resource_private.slug]),
            'translate_private': reverse('translate_resource', args=[self.resource_private.project.slug, self.resource_private.slug, self.language.code]),
        }

        from django.core import management
        management.call_command('txstatsupdate', verbosity=0)

Example 41

Project: pootle
Source File: config.py
View license
@pytest.mark.cmd
@pytest.mark.django_db
def test_cmd_config_clear_instance(capfd):
    project = Project.objects.get(code="project0")

    # -c requires a key
    with pytest.raises(CommandError):
        call_command(
            "config",
            "pootle_project.project",
            str(project.pk),
            "-c")

    # you can clear nothing
    call_command(
        "config",
        "pootle_project.project",
        str(project.pk),
        "-c", "foo")

    # lets add a config and clear it
    config.get(Project, instance=project).append_config("foo", "bar")
    call_command(
        "config",
        "pootle_project.project",
        str(project.pk),
        "-c", "foo")

    assert config.get(Project, instance=project).get_config("foo") is None

    # lets add 2 config and clear them
    config.get(Project, instance=project).append_config("foo", "bar")
    config.get(Project, instance=project).append_config("foo", "bar")
    call_command(
        "config",
        "pootle_project.project",
        str(project.pk),
        "-c", "foo")
    assert config.get(Project, instance=project).get_config("foo") is None

    # lets add 2 config with diff v and clear them
    config.get(Project, instance=project).append_config("foo", "bar")
    config.get(Project, instance=project).append_config("foo", "bar2")
    call_command(
        "config",
        "pootle_project.project",
        str(project.pk),
        "-c", "foo")
    assert config.get(Project, instance=project).get_config("foo") is None

    # lets add 2 config with diff k and clear one
    config.get(Project, instance=project).set_config("foo", "bar")
    config.get(Project, instance=project).set_config("foo2", "bar2")
    call_command(
        "config",
        "pootle_project.project",
        str(project.pk),
        "-c", "foo")
    assert config.get(Project, instance=project).get_config("foo2") == "bar2"

Example 42

Project: pootle
Source File: config.py
View license
@pytest.mark.cmd
@pytest.mark.django_db
def test_cmd_config_bad(capfd):
    project = Project.objects.get(code="project0")

    with pytest.raises(CommandError):
        call_command(
            "config",
            "pootle_project")

    with pytest.raises(CommandError):
        call_command(
            "config",
            "pootle_project.DOESNT_EXIST")

    with pytest.raises(CommandError):
        call_command(
            "config",
            "",
            str(project.pk))

    with pytest.raises(CommandError):
        call_command(
            "config",
            "pootle_project.project",
            "DOES_NOT_EXIST",
            "-o", "code")

    with pytest.raises(CommandError):
        # non-unique
        call_command(
            "config",
            "pootle_project.project",
            "nongnu",
            "-o", "treestyle")

    with pytest.raises(CommandError):
        call_command(
            "config",
            "pootle_project.project",
            project.code,
            "-o", "OBJECT_FIELD_NOT_EXIST")

    with pytest.raises(CommandError):
        call_command(
            "config",
            "-j",
            "-s", "foo", "[BAD JSON]")

    with pytest.raises(CommandError):
        call_command(
            "config",
            "pootle_project.project",
            "asdf")

Example 43

Project: treeio
Source File: creation.py
View license
def DatabaseCreation(domain):
    connection = connections[domain]
    try:
        BaseDatabaseCreation = import_module(
            '.creation', connection.settings_dict['ENGINE']).DatabaseCreation
    except:
        BaseDatabaseCreation = import_module(
            '.creation', 'django.db.backends').BaseDatabaseCreation

    class DBCreation(BaseDatabaseCreation):

        def __init__(self, connection):
            super(DBCreation, self).__init__(connection)
            self.database_name = self.connection.settings_dict.get(
                'NAME') or super(DBCreation, self)._get_test_db_name()

        def _get_test_db_name(self):
            return self.database_name

        def create_db(self, load_initial):
            from django.core.management import call_command

            # Deletes database name because if database doesn't exist,
            # django orm isn't able to connect to it.
            self.connection.settings_dict["NAME"] = None
            self._create_test_db(0, True)
            self.connection.settings_dict["NAME"] = self.database_name

            self.connection.close()
            # Confirm the feature set of the database
            self.connection.features.confirm()

            # Report syncdb messages at one level lower than that requested.
            # This ensures we don't get flooded with messages during testing
            # (unless you really ask to be flooded)
            call_command('syncdb',
                         verbosity=0,
                         interactive=False,
                         database=domain,
                         load_initial_data=load_initial,
                         migrate_all=True)

            from django.core.cache import get_cache
            from django.core.cache.backends.db import BaseDatabaseCache

            for cache_alias in settings.CACHES:
                cache = get_cache(cache_alias)
                if isinstance(cache, BaseDatabaseCache):
                    from django.db import router

                    if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
                        call_command(
                            'createcachetable', cache._table, database=self.connection.alias)

            # Get a cursor (even though we don't need one yet). This has
            # the side effect of initializing the test database.
            cursor = self.connection.cursor()
            return self.database_name

    return DBCreation(connection)

Example 44

Project: federal_spending
Source File: daily_update.py
View license
    def handle(self, day=None, type='all', *args, **kwargs):

        try:
            print "deleting files in /datafeeds and /out"
            
            OUTPATH = settings.CSV_PATH + 'out/'
            
            for f in os.listdir(OUTPATH):
                os.remove(OUTPATH + f)
            
            INPATH = settings.CSV_PATH + 'datafeeds/'
            for f in os.listdir(INPATH):
                os.remove(INPATH + f)

            base_url = 'http://www.usaspending.gov/customcode/build_feed.php?data_source=PrimeAward&detail_level=Complete&ou_code=All&is_dept=false&recipient_state=All&pop_state=All&format=CSV&recovery_only=&record_count=10000000000'

            if not day:
                day = datetime.datetime.now() - datetime.timedelta(days=1)
                day = day.strftime("%Y-%m-%d")

            print "Downloading new files"

            for fy in settings.FISCAL_YEARS:
                url = base_url + '&fiscal_year=' + str(fy) + '&since=' + day
                #grant files
                c = requests.get(url + '&spending_category=Grants')
                outf = open(INPATH + str(fy) + '_All_Grants_Delta_' + day + '.csv', 'w')
                outf.write(c.content)

                c = requests.get(url + '&spending_category=DirectPayments')
                if c.content:
                    outf.write(c.content[c.content.index('\n')+1:])

                c = requests.get(url + '&spending_category=Insurance')
                if c.content:
                    outf.write(c.content[c.content.index('\n')+1:])

                c = requests.get(url + '&spending_category=Loans')
                if c.content:
                    outf.write(c.content[c.content.index('\n')+1:])

                c = requests.get(url + '&spending_category=Contracts')
                outf = open(INPATH + str(fy) + '_All_Contracts_Delta_' + day + '.csv', 'w')
                outf.write(c.content)


            print "sleeping for a minute"
            time.sleep(60)

            print "processing downloaded files into proper format"
            management.call_command('convert_usaspending_contracts')
            management.call_command('convert_usaspending_grants')

            print "looping through files"
            for sname in os.listdir(OUTPATH):
                if 'contracts' in sname:
                    self.process_contract_file(sname, OUTPATH)

                if 'grants' in sname:   
                    self.process_grant_file(sname, OUTPATH)
        except Exception as e:
            logging.debug("An exception was thrown: %s" % e)

Example 45

Project: federal_spending
Source File: fresh_import.py
View license
    def handle(self, import_file='all_downloads.txt', update=False, **options):

        if update:
            warn_text = "This will delete USASpending tables and indexes for years {0}. Proceed? y\\n".format(settings.UPDATE_YEARS)
        else:
            warn_text = "This will delete all USASpending related tables, indexes, etc. Are you sure you want to proceed? y\\n "
        
        confirm = raw_input(warn_text)
        if confirm != 'y':
            return

        print "deleting out files"
        OUTPATH = settings.CSV_PATH + 'out/'
        for f in os.listdir(OUTPATH):
            os.remove(OUTPATH + f)

        
        print "deleting old tables and indexes"
        cursor = connection.cursor()
        if update:
            sql = ""
            for fy in settings.UPDATE_YEARS:
                sql += "Drop table if exists usaspending_contract_{0} cascade; commit; Drop table if exists usaspending_grant_{1} cascade;commit;".format(fy, fy)

            #deleting overall indexes
            sql += ''.join(self.contracts_idx_drop)
            sql += ''.join(self.grants_idx_drop)

        else:
            sql = "Drop table if exists usaspending_contract cascade; commit; Drop table if exists usaspending_grant cascade; commit;"

        print sql
        cursor.execute(sql);

        print "Regenerating tables"
        management.call_command('syncdb')

        print "Creating partition tables"
        if update:
            for fy in settings.UPDATE_YEARS:
                management.call_command('create_partition', fiscal_year=fy, table='usaspending_contract')
                management.call_command('create_partition', fiscal_year=fy, table='usaspending_grant')
        else:
            management.call_command('create_partition', fiscal_year='all')

        print "Downloading links in {0}".format(import_file) 
        management.call_command('download_files', settings.PROJECT_ROOT + '/usaspending/downloads/' + import_file)

        print "sleeping for a minute to allow files to close out"
        time.sleep(60)

        print "processing downloaded files into proper format"
        management.call_command('convert_usaspending_contracts', '--traceback')
        management.call_command('convert_usaspending_grants', '--traceback')

        print "Putting processed Contract CSVs in database"
        print settings.CSV_PATH + 'out/'
        for fname in os.listdir(settings.CSV_PATH + 'out/'):
            print fname
            if 'contracts' in fname:
                management.call_command('loadcontracts', settings.CSV_PATH + 'out/' + fname)

        print"Putting processed Grant CSVs in database"
        for fname in os.listdir(settings.CSV_PATH + 'out/'):
            print fname
            if 'grants' in fname:
                management.call_command('loadgrants', settings.CSV_PATH + 'out/' + fname)

        print "Creating partition indexes"
        management.call_command('create_indexes')

Example 46

Project: pytest_django
Source File: plugin.py
View license
    def pytest_namespace(self):
        """
        Sets up the py.test.params decorator.
        """
        def params(funcarglist):
            """
            A decorator to make parametrised tests easy. Takes a list of 
            dictionaries of keyword arguments for the function. A test is 
            created for each dictionary.

            Example:

                @py.test.params([dict(a=1, b=2), dict(a=3, b=3), dict(a=5, b=4)])  
                def test_equals(a, b):
                    assert a == b
            """
            def wrapper(function):  
                function.funcarglist = funcarglist  
                return function  
            return wrapper

        def load_fixture(fixture):
            """
            Loads a fixture, useful for loading fixtures in funcargs.

            Example:

                def pytest_funcarg__articles(request):
                    py.test.load_fixture('test_articles')
                    return Article.objects.all()
            """
            call_command('loaddata', fixture, **{
                'verbosity': self.verbosity + 1,
                'commit': not settings.DATABASE_SUPPORTS_TRANSACTIONS
            })
        
        def urls(urlconf):
            """
            A decorator to change the URLconf for a particular test, similar 
            to the `urls` attribute on Django's `TestCase`.
            
            Example:
            
                @py.test.urls('myapp.test_urls')
                def test_something(client):
                    assert 'Success!' in client.get('/some_path/')
            """
            def wrapper(function):
                function.urls = urlconf
            return wrapper
        
        return {'params': params, 'load_fixture': load_fixture, 'urls': urls}

Example 47

Project: wger
Source File: tasks.py
View license
@task(help={'settings-path': 'Path to settings file (absolute path recommended). Leave empty for default',
            'database-path': 'Path to sqlite database (absolute path recommended). Leave empty for default',
            'address': 'Address to use. Default: localhost',
            'port': 'Port to use. Default: 8000',
            'browser': 'Whether to open the application in a browser window. Default: false',
            'start-server': 'Whether to start the development server. Default: true'})
def bootstrap_wger(context,
                   settings_path=None,
                   database_path=None,
                   address='localhost',
                   port=8000,
                   browser=False,
                   start_server=True):
    '''
    Performs all steps necessary to bootstrap the application
    '''

    # Find url to wger
    address, port = detect_listen_opts(address, port)
    if port == 80:
        url = "http://{0}".format(address)
    else:
        url = "http://{0}:{1}".format(address, port)

    # Create settings if necessary
    if settings_path is None:
        settings_path = get_user_config_path('wger', 'settings.py')
    if not os.path.exists(settings_path):
        create_settings(context, settings_path=settings_path, database_path=database_path, url=url)

    # Find the path to the settings and setup the django environment
    setup_django_environment(settings_path)

    # Create Database if necessary
    if not database_exists():
        print('*** Database does not exist, creating one now')
        migrate_db(context, settings_path=settings_path)
        load_fixtures(context, settings_path=settings_path)
        create_or_reset_admin(context, settings_path=settings_path)

    # Download JS libraries with bower
    os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'wger'))
    context.run('npm install bower')
    os.chdir(os.path.dirname(os.path.abspath(__file__)))
    call_command('bower', 'install')

    # Start the webserver
    if start_server:
        print('*** Bootstraping complete, starting application')
        start_wger(address=address, port=port, browser=browser, settings_path=settings_path)

Example 48

Project: wger
Source File: tasks.py
View license
@task(help={'settings-path': 'Path to settings file (absolute path recommended). Leave empty for default'})
def load_fixtures(context, settings_path=None):
    '''
    Loads all fixtures
    '''

    # Find the path to the settings and setup the django environment
    setup_django_environment(settings_path)


    # os.chdir(os.path.dirname(inspect.stack()[0][1]))
    # current_dir = os.path.join(os.getcwd(), 'wger')
    current_dir = os.path.dirname(os.path.abspath(__file__))

    # Gym
    path = os.path.join(current_dir, 'wger', 'gym', 'fixtures/')
    call_command("loaddata", path + "gym.json")

    # Core
    path = os.path.join(current_dir, 'wger', 'core', 'fixtures/')
    call_command("loaddata", path + "languages.json")
    call_command("loaddata", path + "groups.json")
    call_command("loaddata", path + "users.json")
    call_command("loaddata", path + "licenses.json")
    call_command("loaddata", path + "days_of_week.json")
    call_command("loaddata", path + "setting_repetition_units.json")
    call_command("loaddata", path + "setting_weight_units.json")

    # Config
    path = os.path.join(current_dir, 'wger', 'config', 'fixtures/')
    call_command("loaddata", path + "language_config.json")
    call_command("loaddata", path + "gym_config.json")

    # Manager
    # path = os.path.join(current_dir, 'manager', 'fixtures/')

    # Exercises
    path = os.path.join(current_dir, 'wger', 'exercises', 'fixtures/')
    call_command("loaddata", path + "equipment.json")
    call_command("loaddata", path + "muscles.json")
    call_command("loaddata", path + "categories.json")
    call_command("loaddata", path + "exercises.json")

    # Nutrition
    path = os.path.join(current_dir, 'wger', 'nutrition', 'fixtures/')
    call_command("loaddata", path + "ingredients.json")
    call_command("loaddata", path + "weight_units.json")
    call_command("loaddata", path + "ingredient_units.json")

    # Gym
    path = os.path.join(current_dir, 'wger', 'gym', 'fixtures/')
    call_command("loaddata", path + "gym.json")
    call_command("loaddata", path + "gym-config.json")
    call_command("loaddata", path + "gym-adminconfig.json")

Example 49

View license
    def test_sync_where_in_facebook_and_in_facetools_but_data_not_synced(self):
        from test_project.testapp3.facebook_test_users import facebook_test_users as t3
        facebook_test_users = t3()
        self.assertEquals(0, TestUser.objects.count())
        management.call_command('sync_facebook_test_users', 'testapp3')

        # Get the test user data from facebook
        test_users_url = "https://graph.facebook.com/%s/accounts/test-users?access_token=%s" % (settings.FACEBOOK_APPLICATION_ID, _get_app_access_token())
        test_users = _merge_with_facebook_data(facebook_test_users, json.loads(requests.get(test_users_url).content)['data'], _get_app_access_token())

        # Make sure the data looks good
        self.assertEquals(3, TestUser.objects.count())
        self.assertEquals(3, len(test_users))
        self.assertEquals(3, len([u for u in test_users if 'graph_user_data' in u]))

        # Now change the user data on facetools, leaving them out of sync with the facebook data
        old_values = {}
        try:
            for test_user in TestUser.objects.all():
                old_values[test_user.name] = {
                    'facebook_id': test_user.facebook_id,
                    'access_token': test_user.access_token
                }
                test_user.facebook_id = 0
                test_user.access_token = "failbear"
                test_user.save()

            # After syncing again the data should be back to normal
            management.call_command('sync_facebook_test_users', 'testapp3')
            test_users = _merge_with_facebook_data(facebook_test_users, json.loads(requests.get(test_users_url).content)['data'], _get_app_access_token())
            self.assertEquals(3, TestUser.objects.count())
            self.assertEquals(3, len(test_users))
            self.assertEquals(3, len([u for u in test_users if 'graph_user_data' in u]))
            for test_user in TestUser.objects.all():
                self.assertNotEquals(0, test_user.facebook_id)
                self.assertNotEquals("failbear", test_user.access_token)
        finally:
            for test_user in TestUser.objects.all():
                test_user.facebook_id = old_values[test_user.name]['facebook_id']
                test_user.access_token = old_values[test_user.name]['access_token']
                test_user.save()

        # Make sure the generated fixture is correct
        self.assertTestUserFixture(testapp3, 'testapp3', t3())

Example 50

Project: yawd-translations
Source File: views.py
View license
    def get_context_data(self, **kwargs):
        context = super(GenerateTranslationMessagesView, self).get_context_data(**kwargs)
        
        if hasattr(self, 'error') and self.error:
            context['error'] = self.error
            return context
        
        #locate the current directory
        curr_dir = os.curdir
        domain_dict = {'django' : ['html','txt'], 'djangojs' : []}
        
        lang_files = []
        #iterate over the installed applications and copy their po files
        #for this language to the appropriate folder 
        for app_name in settings.INSTALLED_APPS:    
            
            mod = import_module(app_name)
            mod_root = os.path.dirname(mod.__file__)

            if not os.path.exists(os.path.join(mod_root, 'locale')):
                continue
            
            original_path = os.path.join(mod_root, 'locale',
                                         to_locale(self.language.name),
                                         'LC_MESSAGES')
            delete_at_the_end = False
            
            if not os.path.exists(original_path):
                if not app_name.startswith('django.contrib'):
                    try: #try to create language directory for the app
                        os.makedirs(original_path)
                        delete_at_the_end = True
                    except:
                        continue
                else:
                    continue

            if not app_name.startswith('django.contrib'):
                #move original files to a temp file
                for file_ in list(os.listdir(original_path)):
                        if file_.endswith('.po'):
                            shutil.copy(os.path.join(original_path, file_),
                                        os.path.join(original_path,
                                                     'original-%s' % file_))

                #copy the project-wise files to the appropriate directory
                if not self.request.GET.get('delete', 0):
                    #replace original file with the yawd version
                    #so that it gets updated
                    for f in list(os.listdir(self.po_path)):
                        if f.startswith('%s-' % app_name) and f.endswith('.po'):
                            shutil.copy(os.path.join(self.po_path, f),
                                        os.path.join(original_path,
                                                     f.replace('%s-' % app_name, '')))  

                #makemessages excluding the core applications
                os.chdir(mod_root)
                for key, value in domain_dict.items():
                    try:
                        management.call_command('makemessages', domain=key,
                                                extensions=value, locale=self.locale,
                                                verbosity=0)
                    except management.CommandError:
                        #Django could throw a CommandError if we process
                        #the domainjs and there are no messages to process.
                        pass
                os.chdir(curr_dir)

            #iterate over the application po files
            for file_ in list(os.listdir(original_path)):
                if not file_.startswith('original-') and file_.endswith('.po'):
                    original_file_path = os.path.join(original_path, file_)
                    file_name = '%s-%s' % (app_name, file_)
                    
                    #copy file
                    copy_path = os.path.join(self.po_path, file_name)
                    if self.request.GET.get('delete', 0) or \
                            not (app_name.startswith('django.contrib') \
                                 and os.path.exists(copy_path)):
                        shutil.copy(original_file_path, copy_path)
                        os.chmod(copy_path, 0664)

                    #unlink updated file
                    if not app_name.startswith('django.contrib'):
                        os.unlink(original_file_path)

                    lang_files.append(file_name)

            if not app_name.startswith('django.contrib'):
                if delete_at_the_end:
                    shutil.rmtree(os.path.join(mod_root, 'locale',
                                               to_locale(self.language.name)))
                else:
                    for file_ in os.listdir(original_path):
                        #put back the original application files
                        if file_.startswith('original-') and file_.endswith('.po'):
                            shutil.move(os.path.join(original_path, file_),
                                        os.path.join(original_path,
                                                     file_.replace('original-','')))

        #concat all messages in a single .po file for each domain
        for domain in domain_dict:
            file_name = '%s.po' % domain
            uni_django_path = os.path.join(self.po_path, file_name)

            if os.path.exists(uni_django_path):
                os.unlink(uni_django_path)

            source_files = [os.path.join(self.po_path, f) for f in lang_files \
                            if f.endswith(file_name)]
            if source_files:
                #merge .po files
                concat_message_files(source_files, uni_django_path)
                #compile django.po
                if not has_bom(uni_django_path):
                    compile_message_file(uni_django_path)

        #reset the cached translation messages so that
        #we do not need to restart the web server
        reset_translations(self.language.name)

        context['lang_files'] = sorted(lang_files)
        return context