Here are the examples of the python api sqlalchemy.func.length taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
39 Examples
3
Source : cust_filters_sql.py
with GNU General Public License v3.0
from AASFCYBERKING
with GNU General Public License v3.0
from AASFCYBERKING
def get_chat_filters(chat_id):
try:
return (
SESSION.query(CustomFilters)
.filter(CustomFilters.chat_id == str(chat_id))
.order_by(func.length(CustomFilters.keyword).desc())
.order_by(CustomFilters.keyword.asc())
.all()
)
finally:
SESSION.close()
def get_filter(chat_id, keyword):
3
Source : cust_filters_sql.py
with GNU General Public License v3.0
from Aid-3n
with GNU General Public License v3.0
from Aid-3n
def get_chat_filters(chat_id):
try:
return SESSION.query(CustomFilters).filter(CustomFilters.chat_id == str(chat_id)).order_by(
func.length(CustomFilters.keyword).desc()).order_by(CustomFilters.keyword.asc()).all()
finally:
SESSION.close()
def get_filter(chat_id, keyword):
3
Source : cust_filters_sql.py
with GNU General Public License v3.0
from AmineSoukara
with GNU General Public License v3.0
from AmineSoukara
def get_chat_filters(chat_id):
try:
return SESSION.query(CustomFilters).filter(CustomFilters.chat_id == str(chat_id)).order_by(
func.length(CustomFilters.keyword).desc()).order_by(CustomFilters.keyword.asc()).all()
finally:
SESSION.close()
def get_filter(chat_id, keyword):
3
Source : cust_filters_sql.py
with GNU General Public License v3.0
from Awesome-Prince
with GNU General Public License v3.0
from Awesome-Prince
def get_chat_filters(chat_id):
try:
return (SESSION.query(CustomFilters).filter(
CustomFilters.chat_id == str(chat_id)).order_by(
func.length(CustomFilters.keyword).desc()).order_by(
CustomFilters.keyword.asc()).all())
finally:
SESSION.close()
def get_filter(chat_id, keyword):
3
Source : cust_filters_sql.py
with GNU General Public License v3.0
from HarukaNetwork
with GNU General Public License v3.0
from HarukaNetwork
def get_chat_filters(chat_id):
try:
return SESSION.query(CustomFilters).filter(
CustomFilters.chat_id == str(chat_id)).order_by(
func.length(CustomFilters.keyword).desc()).order_by(
CustomFilters.keyword.asc()).all()
finally:
SESSION.close()
def get_filter(chat_id, keyword):
3
Source : cust_filters_sql.py
with GNU General Public License v3.0
from kennedy-ex
with GNU General Public License v3.0
from kennedy-ex
def get_chat_filters(chat_id):
try:
return (
SESSION.query(CustomFilters)
.filter(CustomFilters.chat_id == str(chat_id))
.order_by(func.length(CustomFilters.keyword).desc())
.order_by(CustomFilters.keyword.asc())
.all()
)
finally:
SESSION.close()
def get_filter(chat_id, keyword):
3
Source : search.py
with MIT License
from sepro
with MIT License
from sepro
def search_typeahead_interpro(term):
"""
Controller required for populating predictive search forms using typeahead.js.
:param term: partial search term
:return: JSON object compatible with typeahead.js
"""
if len(term) > 7:
interpro = Interpro.query.filter(or_(Interpro.description.ilike("%" + term + "%"), Interpro.label.ilike(term + "%"))).order_by(
func.length(Interpro.description)).all()
else:
interpro = Interpro.query.filter(Interpro.description.ilike("%"+term+"%")).order_by(func.length(Interpro.description)).all()
return Response(json.dumps([{'value': i.description, 'tokens': i.description.split() + [i.label], 'label': i.label} for i in interpro]),
mimetype='application/json')
@search.route('/typeahead/interpro/prefetch')
3
Source : search.py
with MIT License
from sepro
with MIT License
from sepro
def search_typeahead_prefetch_interpro():
"""
Controller returning a small subset of GO terms (the short ones) to be used as the prefetched data for typeahead.js
:param term: partial search term
:return: JSON object compatible with typeahead.js
"""
interpro = Interpro.query.filter(func.length(Interpro.description) < 7).order_by(func.length(Interpro.description)).all()
return Response(json.dumps([{'value': i.description, 'tokens': i.description.split() + [i.label], 'label': i.label} for i in interpro]),
mimetype='application/json')
@search.route('/typeahead/go/ < term>.json')
3
Source : search.py
with MIT License
from sepro
with MIT License
from sepro
def search_typeahead_go(term):
"""
Controller required for populating predictive search forms using typeahead.js.
:param term: partial search term
:return: JSON object compatible with typeahead.js
"""
if term.lower().startswith('go:') and len(term) > 7:
go = GO.query.filter(GO.obsolete == 0).filter(GO.label.ilike(term + "%")).all()
else:
go = GO.query.filter(GO.obsolete == 0).filter(GO.name.ilike("%"+term+"%")).order_by(func.length(GO.name)).all()
return Response(json.dumps([{'value': g.name, 'tokens': g.name.split() + [g.label], 'label': g.label} for g in go]),
mimetype='application/json')
@search.route('/typeahead/go/prefetch')
3
Source : search.py
with MIT License
from sepro
with MIT License
from sepro
def search_typeahead_prefetch_go():
"""
Controller returning a small subset of GO terms (the short ones) to be used as the prefetched data for typeahead.js
:return: JSON object compatible with typeahead.js
"""
go = GO.query.filter(GO.obsolete == 0).filter(func.length(GO.name) < 7).order_by(func.length(GO.name)).all()
return Response(json.dumps([{'value': g.name, 'tokens': g.name.split() + [g.label], 'label': g.label} for g in go]),
mimetype='application/json')
@search.route('/whooshee/ < keyword>')
3
Source : cli.py
with MIT License
from xolox
with MIT License
from xolox
def stats_cmd(self, arguments):
"""Show some statistics about the local chat archive."""
logger.info("Statistics about %s:", format_path(self.database_file))
logger.info(" - Number of contacts: %i", self.num_contacts)
logger.info(" - Number of conversations: %i", self.num_conversations)
logger.info(" - Number of messages: %i", self.num_messages)
logger.info(" - Database file size: %s", format_size(os.path.getsize(self.database_file)))
logger.info(
" - Size of %s: %s",
pluralize(self.num_messages, "plain text chat message"),
format_size(self.session.query(func.coalesce(func.sum(func.length(Message.text)), 0)).scalar()),
)
logger.info(
" - Size of %s: %s",
pluralize(self.num_html_messages, "HTML formatted chat message"),
format_size(self.session.query(func.coalesce(func.sum(func.length(Message.html)), 0)).scalar()),
)
def sync_cmd(self, arguments):
0
Source : 150_add_polymorphic_events.py
with GNU Affero General Public License v3.0
from closeio
with GNU Affero General Public License v3.0
from closeio
def populate():
# Populate new classes from the existing data
from inbox.events.recurring import link_events
from inbox.events.util import parse_datetime
from inbox.models.event import Event, RecurringEvent, RecurringEventOverride
from inbox.models.session import session_scope
with session_scope() as db:
# Redo recurrence rule population, since we extended the column length
print("Repopulating max-length recurrences...", end=" ")
for e in db.query(Event).filter(sa.func.length(Event.recurrence) > 250):
try:
raw_data = json.loads(e.raw_data)
except:
try:
raw_data = ast.literal_eval(e.raw_data)
except:
print("Could not load raw data for event {}".format(e.id))
continue
e.recurrence = raw_data["recurrence"]
db.commit()
print("done.")
print("Updating types for Override...", end=" ")
# Slightly hacky way to convert types (only needed for one-off import)
convert = """UPDATE event SET type='recurringeventoverride' WHERE
raw_data LIKE '%recurringEventId%'"""
db.execute(convert)
create = """INSERT INTO recurringeventoverride (id)
SELECT id FROM event
WHERE type='recurringeventoverride'
AND id NOT IN
(SELECT id FROM recurringeventoverride)"""
try:
db.execute(create)
except Exception as e:
print("Couldn't insert RecurringEventOverrides: {}".format(e))
exit(2)
print("done.")
c = 0
print("Expanding Overrides .", end=" ")
query = db.query(RecurringEventOverride)
for e in query:
try:
# Some raw data is str(dict), other is json.dumps
raw_data = json.loads(e.raw_data)
except:
try:
raw_data = ast.literal_eval(e.raw_data)
except:
print("Could not load raw data for event {}".format(e.id))
continue
rec_uid = raw_data.get("recurringEventId")
if rec_uid:
e.master_event_uid = rec_uid
ost = raw_data.get("originalStartTime")
if ost:
# this is a dictionary with one value
start_time = ost.values().pop()
e.original_start_time = parse_datetime(start_time)
# attempt to get the ID for the event, if we can, and
# set the relationship appropriately
if raw_data.get("status") == "cancelled":
e.cancelled = True
link_events(db, e)
c += 1
if c % 100 == 0:
print(".", end=" ")
sys.stdout.flush()
db.commit()
print("done. ({} modified)".format(c))
# Convert Event to RecurringEvent
print("Updating types for RecurringEvent...", end=" ")
convert = """UPDATE event SET type='recurringevent' WHERE
recurrence IS NOT NULL"""
db.execute(convert)
create = """INSERT INTO recurringevent (id)
SELECT id FROM event
WHERE type='recurringevent'
AND id NOT IN
(SELECT id FROM recurringevent)"""
try:
db.execute(create)
except Exception as e:
print("Couldn't insert RecurringEvents: {}".format(e))
exit(2)
print("done.")
# Pull out recurrence metadata from recurrence
c = 0
print("Expanding master events .", end=" ")
query = db.query(RecurringEvent)
for r in query:
r.unwrap_rrule()
try:
raw_data = json.loads(r.raw_data)
except:
try:
raw_data = ast.literal_eval(r.raw_data)
except:
print("Could not load raw data for event {}".format(r.id))
continue
r.start_timezone = raw_data["start"].get("timeZone")
# find any un-found overrides that didn't have masters earlier
link_events(db, r)
db.add(r)
c += 1
if c % 100 == 0:
print(".", end=" ")
sys.stdout.flush()
db.commit()
print("done. ({} modified)".format(c))
# Finally, convert all remaining Events to type='event'
convert = """UPDATE event SET type='event' WHERE type IS NULL"""
db.execute(convert)
if __name__ == "__main__":
0
Source : logs.py
with GNU General Public License v2.0
from flathub
with GNU General Public License v2.0
from flathub
def compressLog(self, logid, force=False):
def thdcompressLog(conn):
tbl = self.db.model.logchunks
q = sa.select([tbl.c.first_line, tbl.c.last_line, sa.func.length(tbl.c.content),
tbl.c.compressed])
q = q.where(tbl.c.logid == logid)
q = q.order_by(tbl.c.first_line)
rows = conn.execute(q)
todo_gather_list = []
numchunks = 0
totlength = 0
todo_numchunks = 0
todo_first_line = 0
todo_last_line = 0
todo_length = 0
# first pass, we fetch the full list of chunks (without content) and find out
# the chunk groups which could use some gathering.
for row in rows:
if (todo_length + row.length_1 > self.MAX_CHUNK_SIZE or
(row.last_line - todo_first_line) > self.MAX_CHUNK_LINES):
if todo_numchunks > 1 or (force and todo_numchunks):
# this group is worth re-compressing
todo_gather_list.append((todo_first_line, todo_last_line))
todo_first_line = row.first_line
todo_length = 0
todo_numchunks = 0
todo_last_line = row.last_line
# note that we count the compressed size for efficiency reason
# unlike to the on-the-flow chunk splitter
todo_length += row.length_1
totlength += row.length_1
todo_numchunks += 1
numchunks += 1
rows.close()
if totlength == 0:
# empty log
return 0
if todo_numchunks > 1 or (force and todo_numchunks):
# last chunk group
todo_gather_list.append((todo_first_line, todo_last_line))
for todo_first_line, todo_last_line in todo_gather_list:
# decompress this group of chunks. Note that the content is binary bytes.
# no need to decode anything as we are going to put in back stored as bytes anyway
q = sa.select(
[tbl.c.first_line, tbl.c.last_line, tbl.c.content, tbl.c.compressed])
q = q.where(tbl.c.logid == logid)
q = q.where(tbl.c.first_line >= todo_first_line)
q = q.where(tbl.c.last_line < = todo_last_line)
q = q.order_by(tbl.c.first_line)
rows = conn.execute(q)
chunk = b""
for row in rows:
if chunk:
chunk += b"\n"
chunk += self.COMPRESSION_BYID[row.compressed][
"read"](row.content)
rows.close()
# Transaction is necessary so that readers don't see disappeared chunks
transaction = conn.begin()
# we remove the chunks that we are compressing
d = tbl.delete()
d = d.where(tbl.c.logid == logid)
d = d.where(tbl.c.first_line >= todo_first_line)
d = d.where(tbl.c.last_line < = todo_last_line)
conn.execute(d).close()
# and we recompress them in one big chunk
chunk, compressed_id = self.thdCompressChunk(chunk)
conn.execute(tbl.insert(),
dict(logid=logid, first_line=todo_first_line,
last_line=todo_last_line, content=chunk,
compressed=compressed_id)).close()
transaction.commit()
# calculate how many bytes we saved
q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))])
q = q.where(tbl.c.logid == logid)
newsize = conn.execute(q).fetchone()[0]
return totlength - newsize
saved = yield self.db.pool.do(thdcompressLog)
return saved
# returns a Deferred that returns a value
def deleteOldLogChunks(self, older_than_timestamp):
0
Source : test_cleanupdb.py
with GNU General Public License v2.0
from flathub
with GNU General Public License v2.0
from flathub
def test_cleanup(self):
# we reuse the fake db background data from db.logs unit tests
yield self.insertTestData(test_logs.Tests.backgroundData)
# insert a log with lots of redundancy
LOGDATA = "xx\n" * 2000
logid = yield self.master.db.logs.addLog(102, "x", "x", "s")
yield self.master.db.logs.appendLog(logid, LOGDATA)
# test all methods
lengths = {}
for mode in self.master.db.logs.COMPRESSION_MODE:
if mode == "lz4" and not hasLz4:
# ok.. lz4 is not installed, don't fail
lengths["lz4"] = 40
continue
# create a master.cfg with different compression method
self.createMasterCfg("c['logCompressionMethod'] = '{}'".format(mode))
res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir'))
self.assertEqual(res, 0)
# make sure the compression don't change the data we can retrieve
# via api
res = yield self.master.db.logs.getLogLines(logid, 0, 2000)
self.assertEqual(res, LOGDATA)
# retrieve the actual data size in db using raw sqlalchemy
def thd(conn):
tbl = self.master.db.model.logchunks
q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))])
q = q.where(tbl.c.logid == logid)
return conn.execute(q).fetchone()[0]
lengths[mode] = yield self.master.db.pool.do(thd)
self.assertDictAlmostEqual(
lengths, {'raw': 5999, 'bz2': 44, 'lz4': 40, 'gz': 31})
0
Source : test_zoomark.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _baseline_4_expressions(self):
Zoo = self.metadata.tables["Zoo"]
Animal = self.metadata.tables["Animal"]
engine = self.metadata.bind
def fulltable(select):
"""Iterate over the full result table."""
return [list(row) for row in engine.execute(select).fetchall()]
for x in range(ITERATIONS):
assert len(fulltable(Zoo.select())) == 5
assert len(fulltable(Animal.select())) == ITERATIONS + 12
assert len(fulltable(Animal.select(Animal.c.Legs == 4))) == 4
assert len(fulltable(Animal.select(Animal.c.Legs == 2))) == 5
assert (
len(
fulltable(
Animal.select(
and_(Animal.c.Legs >= 2, Animal.c.Legs < 20)
)
)
)
== ITERATIONS + 9
)
assert len(fulltable(Animal.select(Animal.c.Legs > 10))) == 2
assert len(fulltable(Animal.select(Animal.c.Lifespan > 70))) == 2
assert (
len(fulltable(Animal.select(Animal.c.Species.startswith("L"))))
== 2
)
assert (
len(
fulltable(Animal.select(Animal.c.Species.endswith("pede")))
)
== 2
)
assert (
len(fulltable(Animal.select(Animal.c.LastEscape != None))) == 1
) # noqa
assert (
len(fulltable(Animal.select(None == Animal.c.LastEscape)))
== ITERATIONS + 11
) # noqa
# In operator (containedby)
assert (
len(fulltable(Animal.select(Animal.c.Species.like("%pede%"))))
== 2
)
assert (
len(
fulltable(
Animal.select(
Animal.c.Species.in_(["Lion", "Tiger", "Bear"])
)
)
)
== 3
)
# Try In with cell references
class thing(object):
pass
pet, pet2 = thing(), thing()
pet.Name, pet2.Name = "Slug", "Ostrich"
assert (
len(
fulltable(
Animal.select(
Animal.c.Species.in_([pet.Name, pet2.Name])
)
)
)
== 2
)
# logic and other functions
assert (
len(fulltable(Animal.select(Animal.c.Species.like("Slug"))))
== 1
)
assert (
len(fulltable(Animal.select(Animal.c.Species.like("%pede%"))))
== 2
)
name = "Lion"
assert (
len(
fulltable(
Animal.select(
func.length(Animal.c.Species) == len(name)
)
)
)
== ITERATIONS + 3
)
assert (
len(fulltable(Animal.select(Animal.c.Species.like("%i%"))))
== ITERATIONS + 7
)
# Test now(), today(), year(), month(), day()
assert (
len(
fulltable(
Zoo.select(
and_(
Zoo.c.Founded != None, # noqa
Zoo.c.Founded
< func.current_timestamp(_type=Date),
)
)
)
)
== 3
)
assert (
len(
fulltable(
Animal.select(
Animal.c.LastEscape
== func.current_timestamp(_type=Date)
)
)
)
== 0
)
assert (
len(
fulltable(
Animal.select(
func.date_part("year", Animal.c.LastEscape) == 2004
)
)
)
== 1
)
assert (
len(
fulltable(
Animal.select(
func.date_part("month", Animal.c.LastEscape) == 12
)
)
)
== 1
)
assert (
len(
fulltable(
Animal.select(
func.date_part("day", Animal.c.LastEscape) == 21
)
)
)
== 1
)
def _baseline_5_aggregates(self):
0
Source : test_zoomark_orm.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _baseline_4_expressions(self):
for x in range(ITERATIONS):
assert len(list(self.session.query(Zoo))) == 5
assert len(list(self.session.query(Animal))) == ITERATIONS + 12
assert (
len(list(self.session.query(Animal).filter(Animal.Legs == 4)))
== 4
)
assert (
len(list(self.session.query(Animal).filter(Animal.Legs == 2)))
== 5
)
assert (
len(
list(
self.session.query(Animal).filter(
and_(Animal.Legs >= 2, Animal.Legs < 20)
)
)
)
== ITERATIONS + 9
)
assert (
len(list(self.session.query(Animal).filter(Animal.Legs > 10)))
== 2
)
assert (
len(
list(
self.session.query(Animal).filter(Animal.Lifespan > 70)
)
)
== 2
)
assert (
len(
list(
self.session.query(Animal).filter(
Animal.Species.like("L%")
)
)
)
== 2
)
assert (
len(
list(
self.session.query(Animal).filter(
Animal.Species.like("%pede")
)
)
)
== 2
)
assert (
len(
list(
self.session.query(Animal).filter(
Animal.LastEscape != None
)
)
)
== 1
) # noqa
assert (
len(
list(
self.session.query(Animal).filter(
Animal.LastEscape == None
)
)
)
== ITERATIONS + 11
) # noqa
# In operator (containedby)
assert (
len(
list(
self.session.query(Animal).filter(
Animal.Species.like("%pede%")
)
)
)
== 2
)
assert (
len(
list(
self.session.query(Animal).filter(
Animal.Species.in_(("Lion", "Tiger", "Bear"))
)
)
)
== 3
)
# Try In with cell references
class thing(object):
pass
pet, pet2 = thing(), thing()
pet.Name, pet2.Name = "Slug", "Ostrich"
assert (
len(
list(
self.session.query(Animal).filter(
Animal.Species.in_((pet.Name, pet2.Name))
)
)
)
== 2
)
# logic and other functions
name = "Lion"
assert (
len(
list(
self.session.query(Animal).filter(
func.length(Animal.Species) == len(name)
)
)
)
== ITERATIONS + 3
)
assert (
len(
list(
self.session.query(Animal).filter(
Animal.Species.like("%i%")
)
)
)
== ITERATIONS + 7
)
# Test now(), today(), year(), month(), day()
assert (
len(
list(
self.session.query(Zoo).filter(
and_(
Zoo.Founded != None, Zoo.Founded < func.now()
) # noqa
)
)
)
== 3
)
assert (
len(
list(
self.session.query(Animal).filter(
Animal.LastEscape == func.now()
)
)
)
== 0
)
assert (
len(
list(
self.session.query(Animal).filter(
func.date_part("year", Animal.LastEscape) == 2004
)
)
)
== 1
)
assert (
len(
list(
self.session.query(Animal).filter(
func.date_part("month", Animal.LastEscape) == 12
)
)
)
== 1
)
assert (
len(
list(
self.session.query(Animal).filter(
func.date_part("day", Animal.LastEscape) == 21
)
)
)
== 1
)
def _baseline_5_aggregates(self):
0
Source : test_compiler.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_update_returning(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
u = update(table1, values=dict(name="foo")).returning(
table1.c.myid, table1.c.name
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"inserted.myid, inserted.name",
)
u = update(table1, values=dict(name="foo")).returning(table1)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"inserted.myid, inserted.name, "
"inserted.description",
)
u = (
update(table1, values=dict(name="foo"))
.returning(table1)
.where(table1.c.name == "bar")
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"inserted.myid, inserted.name, "
"inserted.description WHERE mytable.name = "
":name_1",
)
u = update(table1, values=dict(name="foo")).returning(
func.length(table1.c.name)
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"LEN(inserted.name) AS length_1",
)
def test_delete_returning(self):
0
Source : test_compiler.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_insert_returning(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
i = insert(table1, values=dict(name="foo")).returning(
table1.c.myid, table1.c.name
)
self.assert_compile(
i,
"INSERT INTO mytable (name) OUTPUT "
"inserted.myid, inserted.name VALUES "
"(:name)",
)
i = insert(table1, values=dict(name="foo")).returning(table1)
self.assert_compile(
i,
"INSERT INTO mytable (name) OUTPUT "
"inserted.myid, inserted.name, "
"inserted.description VALUES (:name)",
)
i = insert(table1, values=dict(name="foo")).returning(
func.length(table1.c.name)
)
self.assert_compile(
i,
"INSERT INTO mytable (name) OUTPUT "
"LEN(inserted.name) AS length_1 VALUES "
"(:name)",
)
def test_limit_using_top(self):
0
Source : test_compiler.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_update_returning(self):
dialect = postgresql.dialect()
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
u = update(table1, values=dict(name="foo")).returning(
table1.c.myid, table1.c.name
)
self.assert_compile(
u,
"UPDATE mytable SET name=%(name)s "
"RETURNING mytable.myid, mytable.name",
dialect=dialect,
)
u = update(table1, values=dict(name="foo")).returning(table1)
self.assert_compile(
u,
"UPDATE mytable SET name=%(name)s "
"RETURNING mytable.myid, mytable.name, "
"mytable.description",
dialect=dialect,
)
u = update(table1, values=dict(name="foo")).returning(
func.length(table1.c.name)
)
self.assert_compile(
u,
"UPDATE mytable SET name=%(name)s "
"RETURNING length(mytable.name) AS length_1",
dialect=dialect,
)
def test_insert_returning(self):
0
Source : test_compiler.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_insert_returning(self):
dialect = postgresql.dialect()
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
i = insert(table1, values=dict(name="foo")).returning(
table1.c.myid, table1.c.name
)
self.assert_compile(
i,
"INSERT INTO mytable (name) VALUES "
"(%(name)s) RETURNING mytable.myid, "
"mytable.name",
dialect=dialect,
)
i = insert(table1, values=dict(name="foo")).returning(table1)
self.assert_compile(
i,
"INSERT INTO mytable (name) VALUES "
"(%(name)s) RETURNING mytable.myid, "
"mytable.name, mytable.description",
dialect=dialect,
)
i = insert(table1, values=dict(name="foo")).returning(
func.length(table1.c.name)
)
self.assert_compile(
i,
"INSERT INTO mytable (name) VALUES "
"(%(name)s) RETURNING length(mytable.name) "
"AS length_1",
dialect=dialect,
)
def test_create_drop_enum(self):
0
Source : test_firebird.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_update_returning(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
u = update(table1, values=dict(name="foo")).returning(
table1.c.myid, table1.c.name
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name RETURNING "
"mytable.myid, mytable.name",
)
u = update(table1, values=dict(name="foo")).returning(table1)
self.assert_compile(
u,
"UPDATE mytable SET name=:name RETURNING "
"mytable.myid, mytable.name, "
"mytable.description",
)
u = update(table1, values=dict(name="foo")).returning(
func.length(table1.c.name)
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name RETURNING "
"char_length(mytable.name) AS length_1",
)
def test_insert_returning(self):
0
Source : test_firebird.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_insert_returning(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
i = insert(table1, values=dict(name="foo")).returning(
table1.c.myid, table1.c.name
)
self.assert_compile(
i,
"INSERT INTO mytable (name) VALUES (:name) "
"RETURNING mytable.myid, mytable.name",
)
i = insert(table1, values=dict(name="foo")).returning(table1)
self.assert_compile(
i,
"INSERT INTO mytable (name) VALUES (:name) "
"RETURNING mytable.myid, mytable.name, "
"mytable.description",
)
i = insert(table1, values=dict(name="foo")).returning(
func.length(table1.c.name)
)
self.assert_compile(
i,
"INSERT INTO mytable (name) VALUES (:name) "
"RETURNING char_length(mytable.name) AS "
"length_1",
)
def test_charset(self):
0
Source : test_froms.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_aliased_adapt_on_names(self):
User, Address = self.classes.User, self.classes.Address
sess = Session()
agg_address = sess.query(
Address.id,
func.sum(func.length(Address.email_address)).label(
"email_address"
),
).group_by(Address.user_id)
ag1 = aliased(Address, agg_address.subquery())
ag2 = aliased(Address, agg_address.subquery(), adapt_on_names=True)
# first, without adapt on names, 'email_address' isn't matched up - we
# get the raw "address" element in the SELECT
self.assert_compile(
sess.query(User, ag1.email_address)
.join(ag1, User.addresses)
.filter(ag1.email_address > 5),
"SELECT users.id "
"AS users_id, users.name AS users_name, addresses.email_address "
"AS addresses_email_address FROM addresses, users JOIN "
"(SELECT addresses.id AS id, sum(length(addresses.email_address)) "
"AS email_address FROM addresses GROUP BY addresses.user_id) AS "
"anon_1 ON users.id = addresses.user_id "
"WHERE addresses.email_address > :email_address_1",
)
# second, 'email_address' matches up to the aggregate, and we get a
# smooth JOIN from users->subquery and that's it
self.assert_compile(
sess.query(User, ag2.email_address)
.join(ag2, User.addresses)
.filter(ag2.email_address > 5),
"SELECT users.id AS users_id, users.name AS users_name, "
"anon_1.email_address AS anon_1_email_address FROM users "
"JOIN ("
"SELECT addresses.id AS id, sum(length(addresses.email_address)) "
"AS email_address FROM addresses GROUP BY addresses.user_id) AS "
"anon_1 ON users.id = addresses.user_id "
"WHERE anon_1.email_address > :email_address_1",
)
class SelectFromTest(QueryTest, AssertsCompiledSQL):
0
Source : test_defaults.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_py_vs_server_default_detection_one(self):
has_ = self._check_default_slots
metadata = MetaData()
tbl = Table(
"default_test",
metadata,
# python function
Column("col1", Integer, primary_key=True, default="1"),
# python literal
Column(
"col2",
String(20),
default="imthedefault",
onupdate="im the update",
),
# preexecute expression
Column(
"col3",
Integer,
default=func.length("abcdef"),
onupdate=func.length("abcdefghijk"),
),
# SQL-side default from sql expression
Column("col4", Integer, server_default="1"),
# SQL-side default from literal expression
Column("col5", Integer, server_default="1"),
# preexecute + update timestamp
Column(
"col6",
sa.Date,
default=datetime.datetime.today,
onupdate=datetime.datetime.today,
),
Column("boolcol1", sa.Boolean, default=True),
Column("boolcol2", sa.Boolean, default=False),
# python function which uses ExecutionContext
Column("col7", Integer, default=lambda: 5, onupdate=lambda: 10,),
# python builtin
Column(
"col8",
sa.Date,
default=datetime.date.today,
onupdate=datetime.date.today,
),
Column("col9", String(20), default="py", server_default="ddl"),
)
has_(tbl, "col1", "default")
has_(tbl, "col2", "default", "onupdate")
has_(tbl, "col3", "default", "onupdate")
has_(tbl, "col4", "server_default")
has_(tbl, "col5", "server_default")
has_(tbl, "col6", "default", "onupdate")
has_(tbl, "boolcol1", "default")
has_(tbl, "boolcol2", "default")
has_(tbl, "col7", "default", "onupdate")
has_(tbl, "col8", "default", "onupdate")
has_(tbl, "col9", "default", "server_default")
def test_py_vs_server_default_detection_two(self):
0
Source : test_defaults.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _test_autoincrement(self, bind):
aitable = self.tables.aitable
ids = set()
rs = bind.execute(aitable.insert(), int1=1)
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
rs = bind.execute(aitable.insert(), str1="row 2")
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
rs = bind.execute(aitable.insert(), int1=3, str1="row 3")
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
rs = bind.execute(aitable.insert(values={"int1": func.length("four")}))
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
eq_(ids, set([1, 2, 3, 4]))
eq_(
list(bind.execute(aitable.select().order_by(aitable.c.id))),
[(1, 1, None), (2, None, "row 2"), (3, 3, "row 3"), (4, 4, None)],
)
def test_autoincrement_autocommit(self):
0
Source : test_functions.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_update(self):
"""
Tests sending functions and SQL expressions to the VALUES and SET
clauses of INSERT/UPDATE instances, and that column-level defaults
get overridden.
"""
meta = self.metadata
t = Table(
"t1",
meta,
Column(
"id",
Integer,
Sequence("t1idseq", optional=True),
primary_key=True,
),
Column("value", Integer),
)
t2 = Table(
"t2",
meta,
Column(
"id",
Integer,
Sequence("t2idseq", optional=True),
primary_key=True,
),
Column("value", Integer, default=7),
Column("stuff", String(20), onupdate="thisisstuff"),
)
meta.create_all()
t.insert(values=dict(value=func.length("one"))).execute()
assert t.select().execute().first()["value"] == 3
t.update(values=dict(value=func.length("asfda"))).execute()
assert t.select().execute().first()["value"] == 5
r = t.insert(values=dict(value=func.length("sfsaafsda"))).execute()
id_ = r.inserted_primary_key[0]
assert t.select(t.c.id == id_).execute().first()["value"] == 9
t.update(values={t.c.value: func.length("asdf")}).execute()
assert t.select().execute().first()["value"] == 4
t2.insert().execute()
t2.insert(values=dict(value=func.length("one"))).execute()
t2.insert(values=dict(value=func.length("asfda") + -19)).execute(
stuff="hi"
)
res = exec_sorted(select([t2.c.value, t2.c.stuff]))
eq_(res, [(-14, "hi"), (3, None), (7, None)])
t2.update(values=dict(value=func.length("asdsafasd"))).execute(
stuff="some stuff"
)
assert select([t2.c.value, t2.c.stuff]).execute().fetchall() == [
(9, "some stuff"),
(9, "some stuff"),
(9, "some stuff"),
]
t2.delete().execute()
t2.insert(values=dict(value=func.length("one") + 8)).execute()
assert t2.select().execute().first()["value"] == 11
t2.update(values=dict(value=func.length("asfda"))).execute()
eq_(
select([t2.c.value, t2.c.stuff]).execute().first(),
(5, "thisisstuff"),
)
t2.update(
values={t2.c.value: func.length("asfdaasdf"), t2.c.stuff: "foo"}
).execute()
eq_(select([t2.c.value, t2.c.stuff]).execute().first(), (9, "foo"))
@testing.fails_on_everything_except("postgresql")
0
Source : stats.py
with GNU General Public License v3.0
from mkdryden
with GNU General Public License v3.0
from mkdryden
def get_word_stats(self, n: int = 4, limit: int = 20, start: str = None, end: str = None,
user: Tuple[int, str] = None, **kwargs) -> Tuple[str, None]:
"""
Print table of lexeme statistics.
:param n: Only consider lexemes with length of at least n
:param limit: Number of top lexemes to return
:param start: Start timestamp (e.g. 2019, 2019-01, 2019-01-01, "2019-01-01 14:21")
:param end: End timestamp (e.g. 2019, 2019-01, 2019-01-01, "2019-01-01 14:21")
"""
q = select(messages.c['text_index_col'])
if user:
q = q.where(messages.c['from_user'] == user[0])
if start:
q = q.where(messages.c['date'] >= str(pd.to_datetime('2019')))
if end:
q = q.where(messages.c['date'] < str(pd.to_datetime('2019')))
q = q.scalar_subquery()
f = TsStat(q)
stmt = select([f.c['word'], f.c['ndoc'], f.c['nentry']]) \
.select_from(f)
if n:
stmt = stmt.where(func.length(f.c['word']) >= n)
stmt = stmt.order_by(f.c.nentry.desc(),
f.c.ndoc.desc(),
f.c.word)
if limit:
stmt = stmt.limit(limit)\
.compile(dialect=postgresql.dialect())
with self.engine.connect() as con:
df = pd.read_sql_query(stmt, con)
df.columns = ['Lexeme', 'Messages', 'Uses']
text = df.to_string(index=False, header=True, float_format=lambda x: f"{x:.1f}")
if user:
return f"**Most frequently used lexemes, {escape_markdown(user[1].lstrip('@'))}\n```\n{text}\n```", None
else:
return f"**Most frequently used lexemes, all users:**\n```\n{text}\n```", None
def get_random_message(self, lquery: str = None, start: str = None, end: str = None,
0
Source : database.py
with MIT License
from spirali
with MIT License
from spirali
def builder_summaries(self, registered_builders):
c = self.jobs.c
query = sa.select(
[c.builder, sa.func.sum(sa.func.length(c.config)).label("size")]
).group_by(c.builder)
# sa.func.length(c.config)
def create_counter(name, size):
d = {name: 0 for name in STATE_COUNTERS.values()}
d["name"] = name
d["size"] = size
return d
result = {
row.builder: create_counter(row.builder, row.size)
for row in self.conn.execute(query)
}
query = sa.select(
[c.builder, c.state, sa.func.count(c.key).label("count")]
).group_by(c.builder, c.state)
for r in self.conn.execute(query):
result[r.builder][STATE_COUNTERS[r.state]] += r.count
query = (
sa.select(
[
c.builder,
sa.func.sum(sa.func.length(self.blobs.c.data)).label("size"),
]
)
.select_from(self.blobs.join(self.jobs))
.group_by(c.builder)
)
for row in self.conn.execute(query):
result[row.builder]["size"] += row.size
for builder in registered_builders:
if builder.name not in result:
result[builder.name] = create_counter(builder.name, 0)
return sorted(result.values(), key=lambda r: r["name"])
def job_summaries(self, builder_name):
0
Source : database.py
with MIT License
from spirali
with MIT License
from spirali
def job_summaries(self, builder_name):
c = self.jobs.c
query = (
sa.select(
[
c.id,
c.key,
c.state,
c.config,
c.created_date,
c.finished_date,
c.computation_time,
sa.func.sum(sa.func.length(c.config)).label("size"),
]
)
.select_from(self.jobs.join(self.blobs, isouter=True))
.where(c.builder == builder_name)
.group_by(c.id)
)
return [
{
"id": row.id,
"key": row.key,
"state": row.state.value,
"config": row.config,
"size": row.size,
"comp_time": row.computation_time,
"created": str(row.created_date),
"finished": str(row.finished_date),
}
for row in self.conn.execute(query)
]
def blob_summaries(self, job_id):
0
Source : database.py
with MIT License
from spirali
with MIT License
from spirali
def blob_summaries(self, job_id):
def process_value(value, mime):
if value is None:
return None
if mime == consts.MIME_TEXT:
return value.decode()
return base64.b64encode(value).decode()
c = self.blobs.c
query = sa.select(
[
c.name,
c.repr,
c.mime,
sa.func.length(c.data).label("size"),
sa.case(
[
(c.mime == consts.MIME_TEXT, c.data),
(c.mime == "image/png", c.data),
],
else_=sa.null(),
).label("value"),
]
).where(c.job_id == job_id)
return [
{
"name": row.name,
"repr": row.repr,
"size": row.size,
"value": process_value(row.value, row.mime),
"mime": row.mime,
}
for row in self.conn.execute(query)
]
def _get_current_jobs(self):
0
Source : test_compiler.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_update_returning(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
u = (
update(table1)
.values(dict(name="foo"))
.returning(table1.c.myid, table1.c.name)
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"inserted.myid, inserted.name",
)
u = update(table1).values(dict(name="foo")).returning(table1)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"inserted.myid, inserted.name, "
"inserted.description",
)
u = (
update(table1)
.values(dict(name="foo"))
.returning(table1)
.where(table1.c.name == "bar")
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"inserted.myid, inserted.name, "
"inserted.description WHERE mytable.name = "
":name_1",
)
u = (
update(table1)
.values(dict(name="foo"))
.returning(func.length(table1.c.name))
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"LEN(inserted.name) AS length_1",
)
def test_delete_returning(self):
0
Source : test_compiler.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_insert_returning(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
i = (
insert(table1)
.values(dict(name="foo"))
.returning(table1.c.myid, table1.c.name)
)
self.assert_compile(
i,
"INSERT INTO mytable (name) OUTPUT "
"inserted.myid, inserted.name VALUES "
"(:name)",
)
i = insert(table1).values(dict(name="foo")).returning(table1)
self.assert_compile(
i,
"INSERT INTO mytable (name) OUTPUT "
"inserted.myid, inserted.name, "
"inserted.description VALUES (:name)",
)
i = (
insert(table1)
.values(dict(name="foo"))
.returning(func.length(table1.c.name))
)
self.assert_compile(
i,
"INSERT INTO mytable (name) OUTPUT "
"LEN(inserted.name) AS length_1 VALUES "
"(:name)",
)
def test_limit_using_top(self):
0
Source : test_compiler.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_update_returning(self):
dialect = postgresql.dialect()
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
u = (
update(table1)
.values(dict(name="foo"))
.returning(table1.c.myid, table1.c.name)
)
self.assert_compile(
u,
"UPDATE mytable SET name=%(name)s "
"RETURNING mytable.myid, mytable.name",
dialect=dialect,
)
u = update(table1).values(dict(name="foo")).returning(table1)
self.assert_compile(
u,
"UPDATE mytable SET name=%(name)s "
"RETURNING mytable.myid, mytable.name, "
"mytable.description",
dialect=dialect,
)
u = (
update(table1)
.values(dict(name="foo"))
.returning(func.length(table1.c.name))
)
self.assert_compile(
u,
"UPDATE mytable SET name=%(name)s "
"RETURNING length(mytable.name) AS length_1",
dialect=dialect,
)
def test_insert_returning(self):
0
Source : test_compiler.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_insert_returning(self):
dialect = postgresql.dialect()
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
i = (
insert(table1)
.values(dict(name="foo"))
.returning(table1.c.myid, table1.c.name)
)
self.assert_compile(
i,
"INSERT INTO mytable (name) VALUES "
"(%(name)s) RETURNING mytable.myid, "
"mytable.name",
dialect=dialect,
)
i = insert(table1).values(dict(name="foo")).returning(table1)
self.assert_compile(
i,
"INSERT INTO mytable (name) VALUES "
"(%(name)s) RETURNING mytable.myid, "
"mytable.name, mytable.description",
dialect=dialect,
)
i = (
insert(table1)
.values(dict(name="foo"))
.returning(func.length(table1.c.name))
)
self.assert_compile(
i,
"INSERT INTO mytable (name) VALUES "
"(%(name)s) RETURNING length(mytable.name) "
"AS length_1",
dialect=dialect,
)
def test_create_drop_enum(self):
0
Source : test_froms.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_aliased_adapt_on_names(self):
User, Address = self.classes.User, self.classes.Address
sess = fixture_session()
agg_address = sess.query(
Address.id,
func.sum(func.length(Address.email_address)).label(
"email_address"
),
).group_by(Address.user_id)
ag1 = aliased(Address, agg_address.subquery())
ag2 = aliased(Address, agg_address.subquery(), adapt_on_names=True)
# first, without adapt on names, 'email_address' isn't matched up - we
# get the raw "address" element in the SELECT
self.assert_compile(
sess.query(User, ag1.email_address)
.join(ag1, User.addresses)
.filter(ag1.email_address > 5),
"SELECT users.id "
"AS users_id, users.name AS users_name, addresses.email_address "
"AS addresses_email_address FROM addresses, users JOIN "
"(SELECT addresses.id AS id, sum(length(addresses.email_address)) "
"AS email_address FROM addresses GROUP BY addresses.user_id) AS "
"anon_1 ON users.id = addresses.user_id "
"WHERE addresses.email_address > :email_address_1",
)
# second, 'email_address' matches up to the aggregate, and we get a
# smooth JOIN from users->subquery and that's it
self.assert_compile(
sess.query(User, ag2.email_address)
.join(ag2, User.addresses)
.filter(ag2.email_address > 5),
"SELECT users.id AS users_id, users.name AS users_name, "
"anon_1.email_address AS anon_1_email_address FROM users "
"JOIN ("
"SELECT addresses.id AS id, sum(length(addresses.email_address)) "
"AS email_address FROM addresses GROUP BY addresses.user_id) AS "
"anon_1 ON users.id = addresses.user_id "
"WHERE anon_1.email_address > :email_address_1",
)
class SelectFromTest(QueryTest, AssertsCompiledSQL):
0
Source : test_defaults.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_py_vs_server_default_detection_one(self):
has_ = self._check_default_slots
metadata = MetaData()
tbl = Table(
"default_test",
metadata,
# python function
Column("col1", Integer, primary_key=True, default="1"),
# python literal
Column(
"col2",
String(20),
default="imthedefault",
onupdate="im the update",
),
# preexecute expression
Column(
"col3",
Integer,
default=func.length("abcdef"),
onupdate=func.length("abcdefghijk"),
),
# SQL-side default from sql expression
Column("col4", Integer, server_default="1"),
# SQL-side default from literal expression
Column("col5", Integer, server_default="1"),
# preexecute + update timestamp
Column(
"col6",
sa.Date,
default=datetime.datetime.today,
onupdate=datetime.datetime.today,
),
Column("boolcol1", sa.Boolean, default=True),
Column("boolcol2", sa.Boolean, default=False),
# python function which uses ExecutionContext
Column(
"col7",
Integer,
default=lambda: 5,
onupdate=lambda: 10,
),
# python builtin
Column(
"col8",
sa.Date,
default=datetime.date.today,
onupdate=datetime.date.today,
),
Column("col9", String(20), default="py", server_default="ddl"),
)
has_(tbl, "col1", "default")
has_(tbl, "col2", "default", "onupdate")
has_(tbl, "col3", "default", "onupdate")
has_(tbl, "col4", "server_default")
has_(tbl, "col5", "server_default")
has_(tbl, "col6", "default", "onupdate")
has_(tbl, "boolcol1", "default")
has_(tbl, "boolcol2", "default")
has_(tbl, "col7", "default", "onupdate")
has_(tbl, "col8", "default", "onupdate")
has_(tbl, "col9", "default", "server_default")
def test_py_vs_server_default_detection_two(self):
0
Source : test_defaults.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_autoincrement(self, connection):
aitable = self.tables.aitable
ids = set()
rs = connection.execute(aitable.insert(), dict(int1=1))
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
rs = connection.execute(aitable.insert(), dict(str1="row 2"))
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
rs = connection.execute(aitable.insert(), dict(int1=3, str1="row 3"))
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
rs = connection.execute(
aitable.insert().values({"int1": func.length("four")})
)
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
eq_(
ids,
set(
range(
testing.db.dialect.default_sequence_base,
testing.db.dialect.default_sequence_base + 4,
)
),
)
eq_(
list(connection.execute(aitable.select().order_by(aitable.c.id))),
[
(testing.db.dialect.default_sequence_base, 1, None),
(testing.db.dialect.default_sequence_base + 1, None, "row 2"),
(testing.db.dialect.default_sequence_base + 2, 3, "row 3"),
(testing.db.dialect.default_sequence_base + 3, 4, None),
],
)
class AutoIncrementTest(fixtures.TestBase):
0
Source : test_deprecations.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def _test_autoincrement(self, connection):
aitable = self.tables.aitable
ids = set()
rs = connection.execute(aitable.insert(), int1=1)
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
rs = connection.execute(aitable.insert(), str1="row 2")
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
rs = connection.execute(aitable.insert(), int1=3, str1="row 3")
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
rs = connection.execute(
aitable.insert().values({"int1": func.length("four")})
)
last = rs.inserted_primary_key[0]
self.assert_(last)
self.assert_(last not in ids)
ids.add(last)
eq_(
ids,
set(
range(
testing.db.dialect.default_sequence_base,
testing.db.dialect.default_sequence_base + 4,
)
),
)
eq_(
list(connection.execute(aitable.select().order_by(aitable.c.id))),
[
(testing.db.dialect.default_sequence_base, 1, None),
(testing.db.dialect.default_sequence_base + 1, None, "row 2"),
(testing.db.dialect.default_sequence_base + 2, 3, "row 3"),
(testing.db.dialect.default_sequence_base + 3, 4, None),
],
)
class TableDeprecationTest(fixtures.TestBase):
0
Source : test_functions.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_update(self, connection):
"""
Tests sending functions and SQL expressions to the VALUES and SET
clauses of INSERT/UPDATE instances, and that column-level defaults
get overridden.
"""
meta = self.metadata
t = Table(
"t1",
meta,
Column(
"id",
Integer,
Sequence("t1idseq", optional=True),
primary_key=True,
),
Column("value", Integer),
)
t2 = Table(
"t2",
meta,
Column(
"id",
Integer,
Sequence("t2idseq", optional=True),
primary_key=True,
),
Column("value", Integer, default=7),
Column("stuff", String(20), onupdate="thisisstuff"),
)
meta.create_all(connection)
connection.execute(t.insert().values(value=func.length("one")))
eq_(connection.execute(t.select()).first().value, 3)
connection.execute(t.update().values(value=func.length("asfda")))
eq_(connection.execute(t.select()).first().value, 5)
r = connection.execute(
t.insert().values(value=func.length("sfsaafsda"))
)
id_ = r.inserted_primary_key[0]
eq_(
connection.execute(t.select().where(t.c.id == id_)).first().value,
9,
)
connection.execute(t.update().values({t.c.value: func.length("asdf")}))
eq_(connection.execute(t.select()).first().value, 4)
connection.execute(t2.insert())
connection.execute(t2.insert().values(value=func.length("one")))
connection.execute(
t2.insert().values(value=func.length("asfda") + -19),
dict(stuff="hi"),
)
res = sorted(connection.execute(select(t2.c.value, t2.c.stuff)))
eq_(res, [(-14, "hi"), (3, None), (7, None)])
connection.execute(
t2.update().values(value=func.length("asdsafasd")),
dict(stuff="some stuff"),
)
eq_(
connection.execute(select(t2.c.value, t2.c.stuff)).fetchall(),
[(9, "some stuff"), (9, "some stuff"), (9, "some stuff")],
)
connection.execute(t2.delete())
connection.execute(t2.insert().values(value=func.length("one") + 8))
eq_(connection.execute(t2.select()).first().value, 11)
connection.execute(t2.update().values(value=func.length("asfda")))
eq_(
connection.execute(select(t2.c.value, t2.c.stuff)).first(),
(5, "thisisstuff"),
)
connection.execute(
t2.update().values(
{t2.c.value: func.length("asfdaasdf"), t2.c.stuff: "foo"}
)
)
eq_(
connection.execute(select(t2.c.value, t2.c.stuff)).first(),
(9, "foo"),
)
@testing.fails_on_everything_except("postgresql")