sqlalchemy.dialects.postgresql.insert.values

Here are the examples of the python api sqlalchemy.dialects.postgresql.insert.values taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

80 Examples 7

3 Source : loader.py
with MIT License
from alephdata

    def _upsert_values(self, conn, values):
        """Use postgres' upsert mechanism (ON CONFLICT TO UPDATE)."""
        istmt = upsert(self.dataset.table).values(values)
        stmt = istmt.on_conflict_do_update(
            index_elements=["id", "origin", "fragment"],
            set_=dict(
                entity=istmt.excluded.entity,
                timestamp=istmt.excluded.timestamp,
            ),
        )
        conn.execute(stmt)

    def flush(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_do_update_set_clause_literal(self):
        i = insert(self.table_with_metadata).values(myid=1, name="foo")
        i = i.on_conflict_do_update(
            index_elements=["myid"],
            set_=OrderedDict(
                [("name", "I'm a name"), ("description", null())]
            ),
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (myid, name) VALUES "
            "(%(myid)s, %(name)s) ON CONFLICT (myid) "
            "DO UPDATE SET name = %(param_1)s, "
            "description = NULL",
            {"myid": 1, "name": "foo", "param_1": "I'm a name"},
        )

    def test_do_update_str_index_elements_target_one(self):

3 Source : database.py
with MIT License
from OpenASL

    async def set_user_timezone(self, user_id: int, timezone: dt.tzinfo | None):
        logger.debug(f"setting timezone for user_id {user_id}")
        stmt = insert(user_settings).values(user_id=user_id, timezone=timezone)
        stmt = stmt.on_conflict_do_update(
            index_elements=(user_settings.c.user_id,),
            set_=dict(timezone=stmt.excluded.timezone),
        )
        await self.db.execute(stmt)

    async def get_user_timezone(self, user_id: int) -> StaticTzInfo | None:

3 Source : database.py
with MIT License
from OpenASL

    async def create_zoom_message(
        self, *, meeting_id: int, message_id: int, channel_id: int
    ):
        await self.db.execute(
            insert(zoom_messages).values(
                meeting_id=meeting_id,
                message_id=message_id,
                channel_id=channel_id,
                # NOTE: need to pass created_at because default=now
                #  doesn't have an effect when using postgresql.insert
                created_at=now(),
            )
        )

    async def remove_zoom_message(self, *, message_id: int):

3 Source : database.py
with MIT License
from OpenASL

    async def create_zzzzoom_meeting(self, *, meeting_id: int):
        created_at = now()
        # TODO: handle id collisions
        stmt = insert(zzzzoom_meetings).values(
            id=generate_nanoid(),
            created_at=created_at,
            meeting_id=meeting_id,
        )
        await self.db.execute(stmt)

    async def get_zzzzoom_meeting(self, id: str):

3 Source : database.py
with MIT License
from OpenASL

    async def save_topics(self, all_topics: Sequence[str]) -> None:
        last_synced_at = now()
        await self.db.execute(topics.delete())
        stmt = insert(topics).values(
            [{"content": topic, "last_synced_at": last_synced_at} for topic in all_topics]
        )
        stmt = stmt.on_conflict_do_update(
            index_elements=(topics.c.content,),
            set_=dict(last_synced_at=stmt.excluded.last_synced_at),
        )
        await self.db.execute(stmt)

    async def get_all_topics(self) -> Sequence[str]:

3 Source : database.py
with MIT License
from OpenASL

    async def create_scheduled_event(self, *, event_id: int, created_by: int):
        await self.db.execute(
            insert(scheduled_events).values(
                event_id=event_id,
                created_by=created_by,
                # NOTE: need to pass created_at because default=now
                #  doesn't have an effect when using postgresql.insert
                created_at=now(),
            )
        )

    async def get_scheduled_events_for_user(self, user_id: int) -> list[Mapping]:

3 Source : database.py
with MIT License
from OpenASL

    async def store_star_reward(self, user_id: int, star_count: int):
        stmt = insert(star_rewards).values(
            id=uuid.uuid4(),
            user_id=user_id,
            star_count=star_count,
            created_at=now(),
        )
        await self.db.execute(stmt)

    async def update_reward_milestones(self, guild_id: int, reward_milestones: list[int]):

3 Source : interface.py
with GNU General Public License v3.0
from Recidiviz

    def create_note(
        session: Session, user_context: UserContext, client: ETLClient, text: str
    ) -> OfficerNote:
        """Creates a new officer note in postgres for a given client."""
        officer_id = user_context.officer_id
        client_id = user_context.person_id(client)
        state_code = user_context.client_state_code(client)
        insert_statement = insert(OfficerNote).values(
            state_code=state_code,
            officer_external_id=officer_id,
            person_external_id=client_id,
            text=text,
        )
        result = session.execute(insert_statement)
        session.commit()

        return session.query(OfficerNote).get(result.inserted_primary_key)

    @staticmethod

3 Source : __init__.py
with MIT License
from solero

async def handle_set_game_data(p, index: OptionalConverter(int) = 0, *, game_data: str):
    if p.room.game:
        data_insert = insert(PenguinGameData).values(penguin_id=p.id, room_id=p.room.id, index=index, data=game_data)
        data_insert = data_insert.on_conflict_do_update(
            constraint='penguin_game_data_pkey',
            set_=dict(data=game_data),
            where=((PenguinGameData.penguin_id == p.id)
                   & (PenguinGameData.room_id == p.room.id)
                   & (PenguinGameData.index == index))
        )

        await data_insert.gino.scalar()


@handlers.handler(XTPacket('zr', ext='z'), client=ClientType.Vanilla)

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_nothing_no_target(self):

        i = (
            insert(self.table1)
            .values(dict(name="foo"))
            .on_conflict_do_nothing()
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT DO NOTHING",
        )

    def test_do_nothing_index_elements_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_nothing_index_elements_target(self):

        i = (
            insert(self.table1)
            .values(dict(name="foo"))
            .on_conflict_do_nothing(index_elements=["myid"])
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT (myid) DO NOTHING",
        )

    def test_do_update_set_clause_none(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_str_index_elements_target_two(self):
        i = insert(self.table1).values(dict(name="foo"))
        i = i.on_conflict_do_update(
            index_elements=["myid"], set_=dict(name=i.excluded.name)
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT (myid) "
            "DO UPDATE SET name = excluded.name",
        )

    def test_do_update_col_index_elements_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_col_index_elements_target(self):
        i = insert(self.table1).values(dict(name="foo"))
        i = i.on_conflict_do_update(
            index_elements=[self.table1.c.myid],
            set_=dict(name=i.excluded.name),
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT (myid) "
            "DO UPDATE SET name = excluded.name",
        )

    def test_do_update_unnamed_pk_constraint_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_unnamed_pk_constraint_target(self):
        i = insert(self.table_with_metadata).values(dict(myid=1, name="foo"))
        i = i.on_conflict_do_update(
            constraint=self.table_with_metadata.primary_key,
            set_=dict(name=i.excluded.name),
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (myid, name) VALUES "
            "(%(myid)s, %(name)s) ON CONFLICT (myid) "
            "DO UPDATE SET name = excluded.name",
        )

    def test_do_update_pk_constraint_index_elements_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_pk_constraint_index_elements_target(self):
        i = insert(self.table_with_metadata).values(dict(myid=1, name="foo"))
        i = i.on_conflict_do_update(
            index_elements=self.table_with_metadata.primary_key,
            set_=dict(name=i.excluded.name),
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (myid, name) VALUES "
            "(%(myid)s, %(name)s) ON CONFLICT (myid) "
            "DO UPDATE SET name = excluded.name",
        )

    def test_do_update_named_unique_constraint_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_named_unique_constraint_target(self):
        i = insert(self.table1).values(dict(name="foo"))
        i = i.on_conflict_do_update(
            constraint=self.unique_constr, set_=dict(myid=i.excluded.myid)
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT ON CONSTRAINT uq_name "
            "DO UPDATE SET myid = excluded.myid",
        )

    def test_do_update_string_constraint_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_string_constraint_target(self):
        i = insert(self.table1).values(dict(name="foo"))
        i = i.on_conflict_do_update(
            constraint=self.unique_constr.name, set_=dict(myid=i.excluded.myid)
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT ON CONSTRAINT uq_name "
            "DO UPDATE SET myid = excluded.myid",
        )

    def test_do_nothing_quoted_string_constraint_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_nothing_quoted_string_constraint_target(self):
        """test #6696"""
        i = insert(self.table1).values(dict(name="foo"))
        i = i.on_conflict_do_nothing(constraint="Some Constraint Name")
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            '(%(name)s) ON CONFLICT ON CONSTRAINT "Some Constraint Name" '
            "DO NOTHING",
        )

    def test_do_nothing_super_long_name_constraint_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_nothing_quoted_named_constraint_target(self):
        """test #6696"""
        i = insert(self.table1).values(dict(name="foo"))
        unique_constr = UniqueConstraint(
            self.table1.c.myid, name="Some Constraint Name"
        )
        i = i.on_conflict_do_nothing(constraint=unique_constr)
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            '(%(name)s) ON CONFLICT ON CONSTRAINT "Some Constraint Name" '
            "DO NOTHING",
        )

    def test_do_update_index_elements_where_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_index_elements_where_target(self):
        i = insert(self.table1).values(dict(name="foo"))
        i = i.on_conflict_do_update(
            index_elements=self.goofy_index.expressions,
            index_where=self.goofy_index.dialect_options["postgresql"][
                "where"
            ],
            set_=dict(name=i.excluded.name),
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT (name) "
            "WHERE name > %(name_1)s "
            "DO UPDATE SET name = excluded.name",
        )

    def test_do_update_index_elements_where_target_multivalues(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_unnamed_index_target(self):
        i = insert(self.table1).values(dict(name="foo"))

        unnamed_goofy = Index(
            None, self.table1.c.name, postgresql_where=self.table1.c.name > "m"
        )

        i = i.on_conflict_do_update(
            constraint=unnamed_goofy, set_=dict(name=i.excluded.name)
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT (name) "
            "WHERE name > %(name_1)s "
            "DO UPDATE SET name = excluded.name",
        )

    def test_do_update_unnamed_exclude_constraint_target(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_unnamed_exclude_constraint_target(self):
        i = insert(self.table1).values(dict(name="foo"))
        i = i.on_conflict_do_update(
            constraint=self.excl_constr_anon, set_=dict(name=i.excluded.name)
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT (name, description) "
            "WHERE description != %(description_1)s "
            "DO UPDATE SET name = excluded.name",
        )

    def test_do_update_add_whereclause(self):

3 Source : test_compiler.py
with MIT License
from sqlalchemy

    def test_do_update_add_whereclause_references_excluded(self):
        i = insert(self.table1).values(dict(name="foo"))
        i = i.on_conflict_do_update(
            constraint=self.excl_constr_anon,
            set_=dict(name=i.excluded.name),
            where=((self.table1.c.name != i.excluded.name)),
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (name) VALUES "
            "(%(name)s) ON CONFLICT (name, description) "
            "WHERE description != %(description_1)s "
            "DO UPDATE SET name = excluded.name "
            "WHERE mytable.name != excluded.name",
        )

    def test_do_update_additional_colnames(self):

3 Source : encodedb.py
with MIT License
from yezz123

    async def request_email_confirmation(self, email: str, token_hash: str) -> None:
        """Request email confirmation
        Args:
            email (str): Email
            token_hash (str): Token hash
        Returns:
            [type]: If user was found Send email confirmation, False otherwise.
        """
        query = (
            pg_insert(self.email_confirmations)
            .values(email=email, token=token_hash)
            .on_conflict_do_update(constraint="email", set_={"token": token_hash})
        )
        await self.database.execute(query)
        return None

    async def confirm_email(self, token_hash: str) -> bool:

0 Source : daily_sorted_letter_dao.py
with MIT License
from cds-snc

def dao_create_or_update_daily_sorted_letter(new_daily_sorted_letter):
    """
    This uses the Postgres upsert to avoid race conditions when two threads try and insert
    at the same row. The excluded object refers to values that we tried to insert but were
    rejected.
    http://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#insert-on-conflict-upsert
    """
    table = DailySortedLetter.__table__
    stmt = insert(table).values(
        billing_day=new_daily_sorted_letter.billing_day,
        file_name=new_daily_sorted_letter.file_name,
        unsorted_count=new_daily_sorted_letter.unsorted_count,
        sorted_count=new_daily_sorted_letter.sorted_count,
    )
    stmt = stmt.on_conflict_do_update(
        index_elements=[table.c.billing_day, table.c.file_name],
        set_={
            "unsorted_count": stmt.excluded.unsorted_count,
            "sorted_count": stmt.excluded.sorted_count,
            "updated_at": datetime.utcnow(),
        },
    )
    db.session.connection().execute(stmt)

0 Source : fact_billing_dao.py
with MIT License
from cds-snc

def update_fact_billing(data, process_day):
    non_letter_rates, letter_rates = get_rates_for_billing()
    rate = get_rate(
        non_letter_rates,
        letter_rates,
        data.notification_type,
        process_day,
        data.crown,
        data.letter_page_count,
        data.postage,
    )
    billing_record = create_billing_record(data, rate, process_day)

    table = FactBilling.__table__
    """
       This uses the Postgres upsert to avoid race conditions when two threads try to insert
       at the same row. The excluded object refers to values that we tried to insert but were
       rejected.
       http://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#insert-on-conflict-upsert
    """
    stmt = insert(table).values(
        bst_date=billing_record.bst_date,
        template_id=billing_record.template_id,
        service_id=billing_record.service_id,
        provider=billing_record.provider,
        rate_multiplier=billing_record.rate_multiplier,
        notification_type=billing_record.notification_type,
        international=billing_record.international,
        billable_units=billing_record.billable_units,
        notifications_sent=billing_record.notifications_sent,
        rate=billing_record.rate,
        postage=billing_record.postage,
    )

    stmt = stmt.on_conflict_do_update(
        constraint="ft_billing_pkey",
        set_={
            "notifications_sent": stmt.excluded.notifications_sent,
            "billable_units": stmt.excluded.billable_units,
            "updated_at": datetime.utcnow(),
        },
    )
    db.session.connection().execute(stmt)
    db.session.commit()


def create_billing_record(data, rate, process_day):

0 Source : fact_notification_status_dao.py
with MIT License
from cds-snc

def update_fact_notification_status(data, process_day):
    table = FactNotificationStatus.__table__
    FactNotificationStatus.query.filter(FactNotificationStatus.bst_date == process_day).delete()

    for row in data:
        stmt = insert(table).values(
            bst_date=process_day,
            template_id=row.template_id,
            service_id=row.service_id,
            job_id=row.job_id,
            notification_type=row.notification_type,
            key_type=row.key_type,
            notification_status=row.status,
            notification_count=row.notification_count,
        )
        db.session.connection().execute(stmt)
        db.session.commit()


def fetch_notification_status_for_service_by_month(start_date, end_date, service_id):

0 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_do_update_set_clause_none(self):
        i = insert(self.table_with_metadata).values(myid=1, name="foo")
        i = i.on_conflict_do_update(
            index_elements=["myid"],
            set_=OrderedDict([("name", "I'm a name"), ("description", None)]),
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (myid, name) VALUES "
            "(%(myid)s, %(name)s) ON CONFLICT (myid) "
            "DO UPDATE SET name = %(param_1)s, "
            "description = %(param_2)s",
            {
                "myid": 1,
                "name": "foo",
                "param_1": "I'm a name",
                "param_2": None,
            },
        )

    def test_do_update_set_clause_literal(self):

0 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_do_update_str_index_elements_target_one(self):
        i = insert(self.table_with_metadata).values(myid=1, name="foo")
        i = i.on_conflict_do_update(
            index_elements=["myid"],
            set_=OrderedDict(
                [
                    ("name", i.excluded.name),
                    ("description", i.excluded.description),
                ]
            ),
        )
        self.assert_compile(
            i,
            "INSERT INTO mytable (myid, name) VALUES "
            "(%(myid)s, %(name)s) ON CONFLICT (myid) "
            "DO UPDATE SET name = excluded.name, "
            "description = excluded.description",
        )

    def test_do_update_str_index_elements_target_two(self):

0 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_quote_raw_string_col(self):
        t = table("t", column("FancyName"), column("other name"))

        stmt = (
            insert(t)
            .values(FancyName="something new")
            .on_conflict_do_update(
                index_elements=["FancyName", "other name"],
                set_=OrderedDict(
                    [
                        ("FancyName", "something updated"),
                        ("other name", "something else"),
                    ]
                ),
            )
        )

        self.assert_compile(
            stmt,
            'INSERT INTO t ("FancyName") VALUES (%(FancyName)s) '
            'ON CONFLICT ("FancyName", "other name") '
            'DO UPDATE SET "FancyName" = %(param_1)s, '
            '"other name" = %(param_2)s',
            {
                "param_1": "something updated",
                "param_2": "something else",
                "FancyName": "something new",
            },
        )


class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):

0 Source : db.py
with MIT License
from kiwicom

    def upsert(self, objs):
        """
        Insert on conflict do update.
        """
        logging.info(f"Upserting {len(objs)} results.")

        data = []
        for o in objs:
            data.append(self.model2dict(o))

        table = objs[0].__table__
        stmt = insert(table).values(data)
        conflicting_cols = get_unique_constraint_names(table)
        excluded_set = {k: getattr(stmt.excluded, k) for k in data[0].keys()}

        on_update_stmt = stmt.on_conflict_do_update(
            index_elements=conflicting_cols, set_=excluded_set
        )

        session = self.make_session()
        try:
            session.execute(on_update_stmt)
            session.commit()
        except:
            session.rollback()
            raise
        finally:
            session.close()

    def get_column_names(self, table_full_name: str) -> List:

0 Source : abstract_query.py
with MIT License
from LuisLuii

    def upsert(self, *,
               insert_arg,
               unique_fields: List[str],
               upsert_one=True,
               ) -> BinaryExpression:
        insert_arg_dict: Union[list, dict] = insert_arg

        insert_with_conflict_handle = insert_arg_dict.pop('on_conflict', None)
        if not upsert_one:
            insert_arg_list: list = insert_arg_dict.pop('insert', None)
            insert_arg_dict = []
            for i in insert_arg_list:
                insert_arg_dict.append(i.__dict__)

        if not isinstance(insert_arg_dict, list):
            insert_arg_dict: list[dict] = [insert_arg_dict]
        insert_arg_dict: list[dict] = [clean_input_fields(model=self.model_columns, param=insert_arg)
                                       for insert_arg in insert_arg_dict]
        insert_stmt = insert(self.model).values(insert_arg_dict)

        if unique_fields and insert_with_conflict_handle:
            update_columns = clean_input_fields(insert_with_conflict_handle.__dict__.get('update_columns', None),
                                                self.model_columns)
            if not update_columns:
                raise UpdateColumnEmptyException('update_columns parameter must be a non-empty list ')
            conflict_update_dict = {}
            for columns in update_columns:
                conflict_update_dict[columns] = getattr(insert_stmt.excluded, columns)

            conflict_list = clean_input_fields(model=self.model_columns, param=unique_fields)
            conflict_update_dict = clean_input_fields(model=self.model_columns, param=conflict_update_dict)
            insert_stmt = insert_stmt.on_conflict_do_update(index_elements=conflict_list,
                                                            set_=conflict_update_dict
                                                            )
        insert_stmt = insert_stmt.returning(text('*'))
        return insert_stmt


class SQLAlchemySQLITEQueryService(SQLAlchemyGeneralSQLQueryService):

0 Source : relational_db_api.py
with MIT License
from mohaseeb

def create_upsert_postgres(table, record):
    """Creates a statement for inserting the passed record to the passed
    table; if the record already exists, the existing record will be updated.
    This uses PostgreSQL `on_conflict_do_update` (hence upsert), and that
    why the returned statement is just valid for PostgreSQL tables. Refer to
    this `SqlAlchemy PostgreSQL documentation`_ for more information.

    The created statement is not executed by this function.

    Args:
        table (sqlalchemy.sql.schema.Table): database table metadata.
        record (dict): a data record, corresponding to one row, to be inserted.

    Returns:
        sqlalchemy.sql.dml.Insert: a statement for inserting the passed
        record to the specified table.

    .. _SqlAlchemy PostgreSQL documentation:
       https://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#insert-on-conflict-upsert
    """
    insert_stmt = postgres_insert(table).values(record)
    return insert_stmt.on_conflict_do_update(
        index_elements=[col for col in table.primary_key],
        set_=record
    )


def create_upsert_mysql(table, record):

0 Source : sql.py
with MIT License
from notsambeck

def _upsert(table: sqa.Table,
            engine: sqa.engine,
            cleaned_data: pd.DataFrame):
    """
    insert data into a table, replacing any rows with duplicate indices

    When upsert finds duplicate records, it overwrites ALL VALUES that are present in source DataFrame, including NaN.

    postgres - see: https://docs.sqlalchemy.org/en/13/dialects/postgresql.html#insert-on-conflict-upsert
    """
    if isinstance(cleaned_data.index, pd.MultiIndex):
        multi = True
        names = cleaned_data.index.names
        index_elements = names
    else:
        multi = False
        names = cleaned_data.index.name
        index_elements = [names]

    cleaned_data = cleaned_data.astype('object')

    def map2none(val):
        """sqlalchemy freaks out about NaNs, so replace them with None"""
        if pd.notna(val):
            return val

    with engine.begin() as con:
        for row in cleaned_data.reset_index(drop=False).itertuples(index=False):
            # check index uniqueness by attempting insert; if it fails, update
            row = {k: map2none(v) for k, v in row._asdict().items()}
            try:

                if engine.dialect.dbapi.__name__ == 'psycopg2':
                    insert = pg_insert(table).values(row).on_conflict_do_update(
                        index_elements=index_elements,
                        set_=row
                    )
                else:
                    insert = table.insert().values(row)

                con.execute(insert)

            except sqa.exc.IntegrityError:
                if multi:
                    upsert = table.update()
                    for n in names:
                        upsert = upsert.where(table.c[n] == row[n])
                    con.execute(upsert.values(row))
                else:
                    upsert = table.update() \
                        .where(table.c[names] == row[names]) \
                        .values(row)
                    con.execute(upsert)


def read_sql(table_name: str,

0 Source : database.py
with MIT License
from OpenASL

    async def create_zoom_meeting(
        self,
        *,
        zoom_user: str,
        meeting_id: int,
        join_url: str,
        passcode: str,
        topic: str,
        set_up: bool,
    ):
        created_at = now()
        stmt = insert(zoom_meetings).values(
            zoom_user=zoom_user,
            meeting_id=meeting_id,
            join_url=join_url,
            passcode=passcode,
            topic=topic,
            # NOTE: need to pass created_at because default=now
            #  doesn't have an effect when using postgresql.insert
            created_at=created_at,
            setup_at=created_at if set_up else None,
        )
        await self.db.execute(stmt)

    async def get_zoom_meeting(self, meeting_id: int) -> Mapping | None:

0 Source : database.py
with MIT License
from OpenASL

    async def add_zoom_participant(
        self,
        *,
        meeting_id: int,
        name: str,
        zoom_id: str | None,
        email: str | None,
        joined_at: dt.datetime,
    ):
        stmt = insert(zoom_participants).values(
            meeting_id=meeting_id,
            name=name,
            zoom_id=zoom_id,
            email=email,
            joined_at=joined_at,
            # NOTE: need to pass created_at because `default` doesn't execute
            #  when using postgres's insert
            created_at=now(),
        )
        stmt = stmt.on_conflict_do_update(
            index_elements=(zoom_participants.c.meeting_id, zoom_participants.c.name),
            set_=dict(
                zoom_id=stmt.excluded.zoom_id,
                email=stmt.excluded.email,
                joined_at=stmt.excluded.joined_at,
            ),
        )
        await self.db.execute(stmt)

    async def get_zoom_participant(self, *, meeting_id: int, name: str) -> Mapping | None:

0 Source : database.py
with MIT License
from OpenASL

    async def add_sign_cafe_intro(
        self,
        *,
        message_id: int,
        user_id: int,
        posted_at: dt.datetime,
    ):
        stmt = insert(sign_cafe_intros).values(
            message_id=message_id,
            user_id=user_id,
            posted_at=posted_at,
            # NOTE: need to pass created_at because `default` doesn't execute
            #  when using postgres's insert
            created_at=now(),
        )
        stmt = stmt.on_conflict_do_update(
            index_elements=(sign_cafe_intros.c.message_id,),
            set_=dict(
                user_id=stmt.excluded.user_id,
                posted_at=stmt.excluded.posted_at,
                created_at=stmt.excluded.created_at,
            ),
        )
        await self.db.execute(stmt)

    async def clear_sign_cafe_members(self):

0 Source : database.py
with MIT License
from OpenASL

    async def upsert_sign_cafe_member(self, *, member: Member):
        user_id = member.id
        joined_at = member.joined_at
        role_ids = {role.id for role in member.roles}
        # skip @everyone
        roles_concatenated = "|".join([role.name for role in member.roles[1:]])
        has_acknowledged_rules = settings.SIGN_CAFE_ACKNOWLEDGED_RULES_ROLE_ID in role_ids
        stmt = insert(sign_cafe_members).values(
            user_id=user_id,
            joined_at=joined_at,
            roles=roles_concatenated,
            has_acknowledged_rules=has_acknowledged_rules,
            created_at=now(),
        )
        stmt = stmt.on_conflict_do_update(
            index_elements=(sign_cafe_members.c.user_id,),
            set_=dict(
                joined_at=stmt.excluded.joined_at,
                roles=roles_concatenated,
                has_acknowledged_rules=has_acknowledged_rules,
            ),
        )
        await self.db.execute(stmt)

    async def remove_sign_cafe_member(self, *, user_id: int):

0 Source : database.py
with MIT License
from OpenASL

    async def give_stars(
        self,
        *,
        from_user_id: int,
        to_user_id: int,
        n_stars: int,
        message_id: int | None,
        jump_url: str | None,
    ):
        created_at = now()
        # Insert a star log
        stmt = insert(star_logs).values(
            id=uuid.uuid4(),
            from_user_id=from_user_id,
            to_user_id=to_user_id,
            message_id=message_id,
            jump_url=jump_url,
            created_at=created_at,
            action="ADD",
        )
        await self.db.execute(stmt)

        # Update the user's star count
        stmt = insert(user_stars).values(
            user_id=to_user_id,
            star_count=1,
            created_at=created_at,
            updated_at=created_at,
        )
        stmt = stmt.on_conflict_do_update(
            index_elements=(user_stars.c.user_id,),
            set_=dict(
                star_count=user_stars.c.star_count + n_stars, updated_at=created_at
            ),
        )
        await self.db.execute(stmt)

    async def remove_stars(

0 Source : database.py
with MIT License
from OpenASL

    async def remove_stars(
        self,
        *,
        from_user_id: int,
        to_user_id: int,
        n_stars: int,
        message_id: int | None,
        jump_url: str | None,
    ):
        created_at = now()
        # Insert a star log
        stmt = insert(star_logs).values(
            id=uuid.uuid4(),
            from_user_id=from_user_id,
            to_user_id=to_user_id,
            message_id=message_id,
            jump_url=jump_url,
            created_at=created_at,
            action="REMOVE",
        )
        await self.db.execute(stmt)

        # Update the user's star count
        stmt = insert(user_stars).values(
            user_id=to_user_id,
            star_count=0,
            created_at=created_at,
            updated_at=created_at,
        )
        stmt = stmt.on_conflict_do_update(
            index_elements=(user_stars.c.user_id,),
            set_=dict(
                # TODO: don't allow negative count
                star_count=user_stars.c.star_count - n_stars,
                updated_at=created_at,
            ),
        )
        await self.db.execute(stmt)
        star_count = await self.get_user_stars(user_id=to_user_id)
        await self._clean_rewards(user_id=to_user_id, star_count=star_count)

    async def get_user_stars(self, user_id: int) -> int:

0 Source : database.py
with MIT License
from OpenASL

    async def set_user_stars(
        self, *, from_user_id: int, to_user_id: int, star_count: int
    ):
        created_at = now()
        # Insert a star log
        stmt = insert(star_logs).values(
            id=uuid.uuid4(),
            from_user_id=from_user_id,
            to_user_id=to_user_id,
            message_id=None,
            jump_url=None,
            created_at=created_at,
            action="SET",
        )
        await self.db.execute(stmt)
        # Update the user's star count
        stmt = insert(user_stars).values(
            user_id=to_user_id,
            star_count=star_count,
            created_at=created_at,
            updated_at=created_at,
        )
        stmt = stmt.on_conflict_do_update(
            index_elements=(user_stars.c.user_id,),
            set_=dict(star_count=stmt.excluded.star_count, updated_at=created_at),
        )
        await self.db.execute(stmt)

        await self._clean_rewards(user_id=to_user_id, star_count=star_count)

    async def _clean_rewards(self, user_id: int, star_count: int):

0 Source : willy.py
with MIT License
from opennem

def update_weather() -> None:
    wc = WillyClient()

    for bom_code, willyid in WILLY_MAP.items():
        r = wc.get_location_temp(willyid, days=3, start_date="2021-10-14")
        data_points = r["observationalGraphs"]["temperature"]["dataConfig"]["series"]["groups"]

        records = []
        pprint(r)

        for pointset in data_points:
            # print(p)
            for p in pointset["points"]:
                r_dict = {
                    "station_id": bom_code,
                    "observation_time": unix_timestamp_to_aware_datetime(
                        p["x"], "Australia/Sydney"
                    ),
                    "temp_air": p["y"],
                }
                print("{} -> {}".format(r_dict["observation_time"], r_dict["temp_air"]))
                records.append(r_dict)

    session = SessionLocal()
    engine = get_database_engine()

    stmt = insert(BomObservation).values(records)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["observation_time", "station_id"],
        set_={
            # "temp_apparent": stmt.excluded.temp_apparent,
            "temp_air": stmt.excluded.temp_air,
        },
    )

    try:
        session.execute(stmt)
        session.commit()
    except Exception as e:
        logger.error("Error: {}".format(e))
    finally:
        session.close()


if __name__ == "__main__":

0 Source : apvi.py
with MIT License
from opennem

def store_apvi_forecastset(forecast_set: APVIForecastSet) -> ControllerReturn:
    """Persist an APVI forecast set to the database"""
    engine = get_database_engine()
    session = get_scoped_session()
    cr = ControllerReturn()

    records_to_store = []

    if not forecast_set.intervals:
        return cr

    cr.total_records = len(forecast_set.intervals)

    for _rec in forecast_set.intervals:
        records_to_store.append(
            {**_rec.dict(exclude={"state"}), "created_by": "apvi.controller", "is_forecast": False}
        )
        cr.processed_records += 1

    if len(records_to_store)   <   1:
        return cr

    stmt = insert(FacilityScada).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["trading_interval", "network_id", "facility_code", "is_forecast"],
        set_={
            "generated": stmt.excluded.generated,
            "created_by": stmt.excluded.created_by,
        },
    )

    try:
        session.execute(stmt)
        session.commit()
        cr.inserted_records = len(records_to_store)
    except Exception as e:
        logger.error("Error: {}".format(e))
        cr.errors = len(records_to_store)
        cr.error_detail.append(str(e))
    finally:
        session.close()
        engine.dispose()

    return cr


def update_apvi_facility_capacities(forecast_set: APVIForecastSet) -> None:

0 Source : bom.py
with MIT License
from opennem

def store_bom_observation_intervals(observations: BOMObservationReturn) -> ControllerReturn:
    """Store BOM Observations"""

    engine = get_database_engine()

    cr = ControllerReturn(total_records=len(observations.observations))

    latest_forecast: Optional[datetime] = max(
        [o.observation_time for o in observations.observations]
    )

    if latest_forecast:
        latest_forecast = latest_forecast.astimezone(pytz.timezone("Australia/Sydney"))
        logger.debug("server_latest is {}".format(latest_forecast))

        cr.server_latest = latest_forecast

    records_to_store = []

    for obs in observations.observations:
        records_to_store.append(
            {
                "station_id": observations.station_code,
                "observation_time": obs.observation_time,
                "temp_apparent": obs.apparent_t,
                "temp_air": obs.air_temp,
                "press_qnh": obs.press_qnh,
                "wind_dir": obs.wind_dir,
                "wind_spd": obs.wind_spd_kmh,
                "wind_gust": obs.gust_kmh,
                "cloud": obs.cloud,
                "cloud_type": obs.cloud_type,
                "humidity": obs.rel_hum,
            }
        )
        cr.processed_records += 1

    if not len(records_to_store):
        return cr

    stmt = insert(BomObservation).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["observation_time", "station_id"],
        set_={
            "temp_apparent": stmt.excluded.temp_apparent,
            "temp_air": stmt.excluded.temp_air,
            "press_qnh": stmt.excluded.press_qnh,
            "wind_dir": stmt.excluded.wind_dir,
            "wind_spd": stmt.excluded.wind_spd,
            "wind_gust": stmt.excluded.wind_gust,
            "cloud": stmt.excluded.cloud,
            "cloud_type": stmt.excluded.cloud_type,
            "humidity": stmt.excluded.humidity,
        },
    )

    with get_scoped_session() as session:
        try:
            session.execute(stmt)
            session.commit()
        except Exception as e:
            logger.error("Error: {}".format(e))
            cr.errors = cr.processed_records
            cr.error_detail.append(str(e))
        finally:
            session.close()
            engine.dispose()

    cr.inserted_records = cr.processed_records

    return cr

0 Source : nem.py
with MIT License
from opennem

def process_dispatch_interconnectorres(table: AEMOTableSchema) -> ControllerReturn:
    session = get_scoped_session()
    engine = get_database_engine()

    cr = ControllerReturn(total_records=len(table.records))
    records_to_store = []

    for record in table.records:
        records_to_store.append(
            {
                "network_id": "NEM",
                "created_by": "opennem.controller",
                "facility_code": record.interconnectorid,  # type:ignore
                "trading_interval": record.settlementdate,  # type:ignore
                "generated": record.mwflow,  # type:ignore
            }
        )
        cr.processed_records += 1

    # insert
    stmt = insert(FacilityScada).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["trading_interval", "network_id", "facility_code", "is_forecast"],
        set_={"generated": stmt.excluded.generated},
    )

    try:
        session.execute(stmt)
        session.commit()
        cr.inserted_records = cr.processed_records
        cr.server_latest = max([i["trading_interval"] for i in records_to_store])
    except Exception as e:
        logger.error("Error inserting records")
        logger.error(e)
        cr.errors = cr.processed_records
        return cr
    finally:
        session.rollback()
        session.close()
        engine.dispose()

    return cr


def process_nem_price(table: AEMOTableSchema) -> ControllerReturn:

0 Source : nem.py
with MIT License
from opennem

def process_nem_price(table: AEMOTableSchema) -> ControllerReturn:
    session = get_scoped_session()
    engine = get_database_engine()

    cr = ControllerReturn(total_records=len(table.records))
    records_to_store = []
    primary_keys = []

    price_field = "price"

    if table.full_name == "dispatch_price":
        price_field = "price_dispatch"

    for record in table.records:
        primary_key = set([record.settlementdate, record.regionid])

        if primary_key in primary_keys:
            continue

        primary_keys.append(primary_key)

        records_to_store.append(
            {
                "network_id": "NEM",
                "created_by": "opennem.controllers.nem",
                "network_region": record.regionid,
                "trading_interval": record.settlementdate,
                price_field: record.rrp,
            }
        )

        cr.processed_records += 1

    stmt = insert(BalancingSummary).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["trading_interval", "network_id", "network_region"],
        set_={price_field: getattr(stmt.excluded, price_field)},
    )

    try:
        session.execute(stmt)
        session.commit()
        cr.inserted_records = cr.processed_records
        cr.server_latest = max([i["trading_interval"] for i in records_to_store])
    except Exception as e:
        logger.error("Error inserting NEM price records")
        logger.error(e)
        cr.errors = cr.processed_records
    finally:
        session.rollback()
        session.close()
        engine.dispose()

    return cr


def process_dispatch_regionsum(table: AEMOTableSchema) -> ControllerReturn:

0 Source : nem.py
with MIT License
from opennem

def process_dispatch_regionsum(table: AEMOTableSchema) -> ControllerReturn:
    session = get_scoped_session()
    engine = get_database_engine()

    cr = ControllerReturn(total_records=len(table.records))
    records_to_store = []

    for record in table.records:
        records_to_store.append(
            {
                "network_id": "NEM",
                "created_by": "opennem.controller",
                "network_region": record.regionid,
                "trading_interval": record.settlementdate,
                "net_interchange": record.netinterchange,
                "demand_total": record.demand_and_nonschedgen,
            }
        )

        cr.processed_records += 1

    stmt = insert(BalancingSummary).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["trading_interval", "network_id", "network_region"],
        set_={
            "net_interchange": stmt.excluded.net_interchange,
            "demand_total": stmt.excluded.demand_total,
        },
    )

    try:
        session.execute(stmt)
        session.commit()
        cr.inserted_records = cr.processed_records
        cr.server_latest = max([i["trading_interval"] for i in records_to_store])
    except Exception as e:
        logger.error("Error inserting records")
        logger.error(e)
        cr.errors = cr.processed_records
    finally:
        session.rollback()
        session.close()
        engine.dispose()

    return cr


def process_trading_regionsum(table: Dict[str, Any]) -> Dict:

0 Source : nem.py
with MIT License
from opennem

def process_trading_regionsum(table: Dict[str, Any]) -> Dict:
    engine = get_database_engine()

    if "records" not in table:
        raise Exception("Invalid table no records")

    records = table["records"]

    limit = None
    records_to_store = []
    records_processed = 0
    primary_keys = []

    for record in records:
        trading_interval = parse_date(
            record["SETTLEMENTDATE"],
            network=NetworkNEM,
            dayfirst=False,
            date_format="%Y/%m/%d %H:%M:%S",
        )

        if not trading_interval:
            continue

        _pk = set([trading_interval, record["REGIONID"]])

        if _pk in primary_keys:
            continue

        primary_keys.append(_pk)

        net_interchange = None

        if "NETINTERCHANGE" in record:
            net_interchange = clean_float(record["NETINTERCHANGE"])

        demand_total = None

        if "TOTALDEMAND" in record:
            demand_total = clean_float(record["TOTALDEMAND"])

        records_to_store.append(
            {
                "network_id": "NEM",
                "created_by": "opennem.controller.nem",
                "network_region": record["REGIONID"],
                "net_interchange_trading": net_interchange,
                "trading_interval": trading_interval,
                "demand_total": demand_total,
            }
        )

        records_processed += 1

        if limit and records_processed >= limit:
            logger.info("Reached limit of: {} {}".format(limit, records_processed))
            break

    stmt = insert(BalancingSummary).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["trading_interval", "network_id", "network_region"],
        set_={
            "demand_total": stmt.excluded.demand_total,
            "net_interchange_trading": stmt.excluded.net_interchange_trading,
        },
    )

    session = get_scoped_session()

    try:
        session.execute(stmt)
        session.commit()
    except Exception as e:
        logger.error("Error inserting records")
        logger.error(e)
        records_to_store = []
    finally:
        session.rollback()
        session.close()
        engine.dispose()

    return {"num_records": len(records_to_store)}


def process_unit_scada(table: AEMOTableSchema) -> ControllerReturn:

0 Source : wem.py
with MIT License
from opennem

def store_wem_balancingsummary_set(balancing_set: WEMBalancingSummarySet) -> ControllerReturn:
    """Persist wem balancing set to the database"""
    engine = get_database_engine()
    session = get_scoped_session()
    cr = ControllerReturn()

    records_to_store = []

    if not balancing_set.intervals:
        return cr

    cr.total_records = len(balancing_set.intervals)
    cr.server_latest = balancing_set.server_latest

    for _rec in balancing_set.intervals:
        records_to_store.append(
            {
                "created_by": "wem.controller",
                "trading_interval": _rec.trading_day_interval,
                "network_id": "WEM",
                "network_region": "WEM",
                "is_forecast": _rec.is_forecast,
                "forecast_load": _rec.forecast_mw,
                "generation_total": _rec.actual_total_generation,
                "generation_scheduled": _rec.actual_nsg_mw,
                "price": _rec.price,
            }
        )
        cr.processed_records += 1

    if len(records_to_store)   <   1:
        return cr

    stmt = insert(BalancingSummary).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=[
            "trading_interval",
            "network_id",
            "network_region",
        ],
        set_={
            "price": stmt.excluded.price,
            "forecast_load": stmt.excluded.forecast_load,
            "generation_total": stmt.excluded.generation_total,
            "is_forecast": stmt.excluded.is_forecast,
        },
    )

    try:
        session.execute(stmt)
        session.commit()
        cr.inserted_records = len(records_to_store)
    except Exception as e:
        logger.error("Error: {}".format(e))
        cr.errors = len(records_to_store)
        cr.error_detail.append(str(e))
    finally:
        session.close()
        engine.dispose()

    return cr


def store_wem_facility_intervals(balancing_set: WEMFacilityIntervalSet) -> ControllerReturn:

See More Examples