sqlalchemy.JSON

Here are the examples of the python api sqlalchemy.JSON taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

35 Examples 7

3 Source : 56dff538c0a7_add_prefs_to_client.py
with MIT License
from airq-dev

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column("clients", sa.Column("preferences", sa.JSON(), nullable=True))
    # ### end Alembic commands ###


def downgrade():

3 Source : 8527f94dfe70_json_blob_of_metric_data.py
with MIT License
from airq-dev

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column("zipcodes", sa.Column("metrics_data", sa.JSON(), nullable=True))
    # ### end Alembic commands ###


def downgrade():

3 Source : db.py
with MIT License
from ajs

    def load_dialect_impl(self, dialect):
        if dialect.name == 'sqlite':
            return dialect.type_descriptor(self.impl)
        return dialect.type_descriptor(sqlalchemy.JSON())

    # rapidjson doesn't appear to let python know that it has a dumps
    # function, so we have to give pylint a heads-up
    # pylint: disable=c-extension-no-member
    def process_bind_param(self, value, dialect):

3 Source : 3cfad1e70305_add_user_settings_field.py
with GNU General Public License v3.0
from bitcartcc

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column("users", sa.Column("settings", sa.JSON(), nullable=True, server_default="{}"))
    op.alter_column("users", "settings", server_default=None)
    # ### end Alembic commands ###


def downgrade():

3 Source : 65986008ad26_added_template_selection_per_store_.py
with GNU General Public License v3.0
from bitcartcc

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column("stores", sa.Column("templates", sa.JSON(), nullable=True))
    op.drop_index("stores_domain_idx", table_name="stores")
    op.drop_column("stores", "domain")
    op.drop_column("stores", "template")
    op.add_column("products", sa.Column("templates", sa.JSON(), nullable=True))
    # ### end Alembic commands ###


def downgrade():

3 Source : 73d794ea10e2_add_store_theme_settings.py
with GNU General Public License v3.0
from bitcartcc

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column("stores", sa.Column("theme_settings", sa.JSON(), nullable=True))
    # ### end Alembic commands ###


def downgrade():

3 Source : d8515a8b706e_move_checkout_settings_to_json.py
with GNU General Public License v3.0
from bitcartcc

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column("stores", sa.Column("checkout_settings", sa.JSON(), nullable=True))
    op.drop_column("stores", "transaction_speed")
    op.drop_column("stores", "use_html_templates")
    op.drop_column("stores", "underpaid_percentage")
    op.drop_column("stores", "expiration")
    # ### end Alembic commands ###


def downgrade():

3 Source : 20200123_23-52-54__save_auth_response_json.py
with MIT License
from busy-beaver-dev

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column(
        "slack_installation", sa.Column("auth_response", sa.JSON(), nullable=True)
    )
    # ### end Alembic commands ###


def downgrade():

3 Source : b8ebf60a501c_add_canvas_size.py
with MIT License
from cclrobotics

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column('artpieces', sa.Column('canvas_size', sa.JSON(), nullable=True))
    # ### end Alembic commands ###

    with session_scope() as session:
        artpieces = session.query(ArtpieceModel).all()
        for artpiece in artpieces:
            artpiece.canvas_size = {'x':39,'y':26}

    op.alter_column('artpieces', 'canvas_size', nullable=False)


def downgrade():

3 Source : model_fields.py
with MIT License
from collerek

    def get_column_type(cls, **kwargs: Any) -> Any:
        """
        Return proper type of db column for given field type.
        Accepts required and optional parameters that each column type accepts.

        :param kwargs: key, value pairs of sqlalchemy options
        :type kwargs: Any
        :return: initialized column with proper options
        :rtype: sqlalchemy Column
        """
        return sqlalchemy.JSON(none_as_null=kwargs.get("sql_nullable", False))


if TYPE_CHECKING:  # pragma: nocover # noqa: C901

3 Source : be7c97191ac2_.py
with Apache License 2.0
from DataBiosphere

def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.add_column('billing', sa.Column('cost_by_analysis', sa.JSON(), nullable=True))
    op.alter_column('billing', 'compute_cost',
               existing_type=sa.NUMERIC(),
               nullable=False)
    op.alter_column('billing', 'storage_cost',
               existing_type=sa.NUMERIC(),
               nullable=False)
    ### end Alembic commands ###


def downgrade():

3 Source : 742a2a556eb9_.py
with Apache License 2.0
from deep-learning-indaba

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column('organisation', sa.Column('languages', sa.JSON(), nullable=True))
    op.execute("""UPDATE organisation SET languages = '[{"code": "en", "description": "English"}]'""")
    op.alter_column('organisation', 'languages', nullable=False)
    # ### end Alembic commands ###


def downgrade():

3 Source : a56f13808882_add_docker_params_to_algorithm.py
with MIT License
from hpi-epic

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column('algorithm', sa.Column('docker_parameters', sa.JSON(), nullable=True))
    # ### end Alembic commands ###


def downgrade():

3 Source : 3aa397e3adac_add_built_packages_field_to_.py
with MIT License
from packit

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column("copr_builds", sa.Column("built_packages", sa.JSON(), nullable=True))
    # ### end Alembic commands ###


def downgrade():

3 Source : 45973dacf7da_add_public_flags_column.py
with MIT License
from python-discord

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column('users', sa.Column('public_flags', sa.JSON(), nullable=True))
    # ### end Alembic commands ###


def downgrade():

3 Source : model.py
with Apache License 2.0
from spotify

    def load_dialect_impl(self, dialect):
        """This is an end-user override hook that can be used to provide
        differing types depending on the given dialect.

        Args:
            dialect (object): SQLAlchemy dialect object
        Returns:
            object: if dialect name is 'mysql' it will override the type descriptor to JSON()
        """
        if dialect.name == "mysql":
            return dialect.type_descriptor(sqlalchemy.JSON())
        return dialect.type_descriptor(self.impl)

    def process_bind_param(self, value, dialect):

3 Source : 917051c561c8_.py
with GNU General Public License v3.0
from teamsempo

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.alter_column('custom_attribute_user_storage', 'value',
               existing_type=sa.JSON(),
               type_=sa.VARCHAR(),
               existing_nullable=True)
    # ### end Alembic commands ###


def downgrade():

3 Source : 07859b6b370b_json_sample_sizes.py
with GNU Affero General Public License v3.0
from votingworks

def upgrade():
    op.alter_column(
        "round_contest",
        "sample_size",
        type_=sa.JSON,
        postgresql_using="json_build_object('key', 'custom', 'size', sample_size::int, 'prob', null)",
    )


def downgrade():  # pragma: no cover

3 Source : 11e35dd1c515_batch_tallies_file.py
with GNU Affero General Public License v3.0
from votingworks

def upgrade():
    op.add_column("jurisdiction", sa.Column("batch_tallies", sa.JSON(), nullable=True))
    op.add_column(
        "jurisdiction",
        sa.Column("batch_tallies_file_id", sa.String(length=200), nullable=True),
    )
    op.create_foreign_key(
        op.f("jurisdiction_batch_tallies_file_id_fkey"),
        "jurisdiction",
        "file",
        ["batch_tallies_file_id"],
        ["id"],
        ondelete="set null",
    )


def downgrade():  # pragma: no cover

3 Source : 9d9c4e058cb2_jurisdiction_contest_name_.py
with GNU Affero General Public License v3.0
from votingworks

def upgrade():
    op.add_column(
        "jurisdiction",
        sa.Column("contest_name_standardizations", sa.JSON(), nullable=True),
    )


def downgrade():  # pragma: no cover

3 Source : f8e901e92f0a_background_sample_size_options.py
with GNU Affero General Public License v3.0
from votingworks

def upgrade():
    op.add_column(
        "election", sa.Column("sample_size_options", sa.JSON(), nullable=True)
    )
    op.add_column(
        "election",
        sa.Column("sample_size_options_task_id", sa.String(length=200), nullable=True),
    )
    op.create_foreign_key(
        op.f("election_sample_size_options_task_id_fkey"),
        "election",
        "background_task",
        ["sample_size_options_task_id"],
        ["id"],
        ondelete="set null",
    )


def downgrade():  # pragma: no cover

0 Source : _sqlbackend.py
with Apache License 2.0
from aplbrain

    def ingest_from_edgelist_dataframe(
        self, edgelist: pd.DataFrame, source_column: str, target_column: str
    ) -> None:
        """
        Ingest an edgelist from a Pandas DataFrame.

        """
        # Produce edge list:

        edge_tic = time.time()
        newlist = edgelist.rename(
            columns={
                source_column: self._edge_source_key,
                target_column: self._edge_target_key,
            }
        )

        newlist[self._primary_key] = edgelist.apply(
            lambda x: f"__{x[source_column]}__{x[target_column]}", axis="columns"
        )
        newlist["_metadata"] = edgelist.apply(
            lambda x: {
                k: v for k, v in x.items() if k not in [source_column, target_column]
            },
            axis="columns",
        )

        newlist[
            [
                self._edge_source_key,
                self._edge_target_key,
                self._primary_key,
                "_metadata",
            ]
        ].to_sql(
            self._edge_table_name,
            self._engine,
            index=False,
            if_exists="append",
            dtype={"_metadata": sqlalchemy.JSON},
        )

        edge_toc = time.time() - edge_tic

        # now ingest nodes:
        node_tic = time.time()
        nodes = edgelist[source_column].append(edgelist[target_column]).unique()
        pd.DataFrame(
            [
                {
                    self._primary_key: node,
                    # no metadata:
                    "_metadata": {},
                }
                for node in nodes
            ]
        ).to_sql(
            self._node_table_name,
            self._engine,
            index=False,
            if_exists="replace",
            dtype={"_metadata": sqlalchemy.JSON},
        )

        return {
            "node_count": len(nodes),
            "node_duration": time.time() - node_tic,
            "edge_count": len(edgelist),
            "edge_duration": edge_toc,
        }

0 Source : fields.py
with BSD 3-Clause "New" or "Revised" License
from awesometoolbox

def JSON(
    *,
    primary_key: bool = False,
    allow_null: bool = False,
    index: bool = False,
    unique: bool = False,
) -> Type[Any]:
    namespace = dict(
        primary_key=primary_key,
        allow_null=allow_null,
        index=index,
        unique=unique,
        column_type=sqlalchemy.JSON(),
    )

    class Json(object):
        @classmethod
        def __get_validators__(cls) -> "CallableGenerator":
            yield cls.validate

        @classmethod
        def validate(cls, v: Any) -> Any:
            try:
                if isinstance(v, str):
                    return json.loads(v)
                else:
                    return v
            except ValueError:
                raise errors.JsonError()
            except TypeError:
                raise errors.JsonTypeError()

    return type("JSON", (Json, ColumnFactory), namespace)


def ForeignKey(to, *, allow_null: bool = False) -> Type[object]:

0 Source : 20200708_21-59-03__set_up_task_model_for_current_workflow.py
with MIT License
from busy-beaver-dev

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column("task", sa.Column("data", sa.JSON(), nullable=True))
    op.add_column(
        "task",
        sa.Column(
            "task_state",
            sqlalchemy_utils.types.choice.ChoiceType(Task.TaskState.STATES),
            nullable=True,
        ),
    )
    op.create_index(op.f("ix_task_task_state"), "task", ["task_state"], unique=False)
    op.drop_column("task", "description")
    op.drop_column("task", "failed")
    op.drop_column("task", "type")
    op.drop_column("task", "complete")
    # ### end Alembic commands ###


def downgrade():

0 Source : fca4d39d6f19_.py
with MIT License
from cclrobotics

def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.drop_table('site_vars')
    # ### end Alembic commands ###
    op.alter_column('artpieces', 'art', new_column_name='art_encoding', type_=sa.JSON(),
            postgresql_using='art::json', nullable=False)
    op.alter_column('artpieces', 'picture', new_column_name='raw_image', nullable=True)
    op.alter_column('artpieces', 'status', new_column_name='submission_status', nullable=False)
    op.alter_column('artpieces', 'title', nullable=False)
    op.alter_column('artpieces', 'submit_date', nullable=False)

    with session_scope() as session:
        artpieces = session.query(ArtpieceModel).filter(ArtpieceModel.raw_image == None).all()
        for artpiece in artpieces:
            artpiece.raw_image = decode_art_to_image(artpiece.art, COLOR_SCHEME)

    op.alter_column('artpieces', 'raw_image', nullable=False)


def downgrade():

0 Source : fields.py
with BSD 3-Clause "New" or "Revised" License
from encode

    def get_column_type(self):
        return sqlalchemy.JSON()


class ForeignKey(ModelField):

0 Source : 852ae6c715af_add_rendered_task_instance_fields_table.py
with Apache License 2.0
from flink-extended

def upgrade():
    """Apply Add RenderedTaskInstanceFields table"""
    json_type = sa.JSON
    conn = op.get_bind()  # pylint: disable=no-member

    if conn.dialect.name != "postgresql":
        # Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for
        # versions, check for the function existing.
        try:
            conn.execute("SELECT JSON_VALID(1)").fetchone()
        except (sa.exc.OperationalError, sa.exc.ProgrammingError):
            json_type = sa.Text

    op.create_table(
        TABLE_NAME,  # pylint: disable=no-member
        sa.Column('dag_id', sa.String(length=250), nullable=False),
        sa.Column('task_id', sa.String(length=250), nullable=False),
        sa.Column('execution_date', sa.TIMESTAMP(timezone=True), nullable=False),
        sa.Column('rendered_fields', json_type(), nullable=False),
        sa.PrimaryKeyConstraint('dag_id', 'task_id', 'execution_date'),
    )


def downgrade():

0 Source : d38e04c12aa2_add_serialized_dag_table.py
with Apache License 2.0
from flink-extended

def upgrade():
    """Upgrade version."""
    json_type = sa.JSON
    conn = op.get_bind()  # pylint: disable=no-member

    if conn.dialect.name != "postgresql":
        # Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for
        # versions, check for the function existing.
        try:
            conn.execute("SELECT JSON_VALID(1)").fetchone()
        except (sa.exc.OperationalError, sa.exc.ProgrammingError):
            json_type = sa.Text

    op.create_table(
        'serialized_dag',  # pylint: disable=no-member
        sa.Column('dag_id', sa.String(length=250), nullable=False),
        sa.Column('fileloc', sa.String(length=2000), nullable=False),
        sa.Column('fileloc_hash', sa.Integer(), nullable=False),
        sa.Column('data', json_type(), nullable=False),
        sa.Column('last_updated', sa.DateTime(), nullable=False),
        sa.PrimaryKeyConstraint('dag_id'),
    )
    op.create_index('idx_fileloc_hash', 'serialized_dag', ['fileloc_hash'])  # pylint: disable=no-member

    if conn.dialect.name == "mysql":
        conn.execute("SET time_zone = '+00:00'")
        cur = conn.execute("SELECT @@explicit_defaults_for_timestamp")
        res = cur.fetchall()
        if res[0][0] == 0:
            raise Exception("Global variable explicit_defaults_for_timestamp needs to be on (1) for mysql")

        op.alter_column(  # pylint: disable=no-member
            table_name="serialized_dag",
            column_name="last_updated",
            type_=mysql.TIMESTAMP(fsp=6),
            nullable=False,
        )
    else:
        # sqlite and mssql datetime are fine as is.  Therefore, not converting
        if conn.dialect.name in ("sqlite", "mssql"):
            return

        # we try to be database agnostic, but not every db (e.g. sqlserver)
        # supports per session time zones
        if conn.dialect.name == "postgresql":
            conn.execute("set timezone=UTC")

        op.alter_column(  # pylint: disable=no-member
            table_name="serialized_dag",
            column_name="last_updated",
            type_=sa.TIMESTAMP(timezone=True),
        )


def downgrade():

0 Source : test_types.py
with Apache License 2.0
from gethue

    def test_variant_righthand_coercion_honors_wrapped(self):
        my_json_normal = JSON()
        my_json_variant = JSON().with_variant(String(), "sqlite")

        tab = table(
            "test",
            column("avalue", my_json_normal),
            column("bvalue", my_json_variant),
        )
        expr = tab.c.avalue["foo"] == "bar"

        is_(expr.right.type._type_affinity, String)
        is_not_(expr.right.type, my_json_normal)

        expr = tab.c.bvalue["foo"] == "bar"

        is_(expr.right.type._type_affinity, String)
        is_not_(expr.right.type, my_json_variant)

    def test_variant_righthand_coercion_returns_self(self):

0 Source : fields.py
with BSD 3-Clause "New" or "Revised" License
from icns-distributed-cloud

    def get_column_type(self):
        return sqlalchemy.JSON()


class ForeignKey(ModelField, typesystem.Field):

0 Source : 9b1e32fde197_remove_metrics.py
with Apache License 2.0
from Open-EO

def downgrade():
    op.add_column('jobs', sa.Column('metrics', sa.JSON(), nullable=True))

0 Source : add_remove_job_columns.py
with Apache License 2.0
from Open-EO

def downgrade():
    op.add_column('jobs', sa.Column('output', sa.JSON(), default=json.dumps({"format": "GTiff"})))

    op.drop_column('jobs', 'progress')
    op.drop_column('jobs', 'error')

0 Source : 76ede68ef627_process_graph_json_column.py
with Apache License 2.0
from Open-EO

def upgrade():
    op.add_column('process_graphs', sa.Column('process_graph', sa.JSON(), default={}))


def downgrade():

0 Source : f970bc0608e5_remove_pg_from_examples.py
with Apache License 2.0
from Open-EO

def downgrade():
    op.alter_column("example", "arguments", nullable=True)
    op.add_column("example", sa.Column("process_graph", sa.JSON(), nullable=True))
    op.create_check_constraint('check_process_graph_or_arguments', 'example',
                               'process_graph' != None or 'arguments' != None)

0 Source : test_types.py
with MIT License
from sqlalchemy

    def test_variant_righthand_coercion_honors_wrapped(self):
        my_json_normal = JSON()
        my_json_variant = JSON().with_variant(String(), "sqlite")

        tab = table(
            "test",
            column("avalue", my_json_normal),
            column("bvalue", my_json_variant),
        )
        expr = tab.c.avalue["foo"] == "bar"

        is_(expr.right.type._type_affinity, String)
        is_not(expr.right.type, my_json_normal)

        expr = tab.c.bvalue["foo"] == "bar"

        is_(expr.right.type._type_affinity, String)
        is_not(expr.right.type, my_json_variant)

    def test_variant_righthand_coercion_returns_self(self):