Here are the examples of the python api sqlalchemy.dialects.postgresql.dialect taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
26 Examples
3
Example 1
Project: sqlalchemy Source File: test_functions.py
def test_generic_now(self):
assert isinstance(func.now().type, sqltypes.DateTime)
for ret, dialect in [
('CURRENT_TIMESTAMP', sqlite.dialect()),
('now()', postgresql.dialect()),
('now()', mysql.dialect()),
('CURRENT_TIMESTAMP', oracle.dialect())
]:
self.assert_compile(func.now(), ret, dialect=dialect)
3
Example 2
Project: sqlalchemy Source File: test_functions.py
def test_generic_random(self):
assert func.random().type == sqltypes.NULLTYPE
assert isinstance(func.random(type_=Integer).type, Integer)
for ret, dialect in [
('random()', sqlite.dialect()),
('random()', postgresql.dialect()),
('rand()', mysql.dialect()),
('random()', oracle.dialect())
]:
self.assert_compile(func.random(), ret, dialect=dialect)
3
Example 3
Project: sqlalchemy Source File: test_insert.py
def test_unconsumed_names_values_dict(self):
table1 = self.tables.mytable
checkparams = {
'myid': 3,
'name': 'jack',
'unknowncol': 'oops'
}
stmt = insert(table1, values=checkparams)
assert_raises_message(
exc.CompileError,
'Unconsumed column names: unknowncol',
stmt.compile,
dialect=postgresql.dialect()
)
3
Example 4
Project: sqlalchemy Source File: test_insert.py
def test_anticipate_no_pk_composite_pk_implicit_returning(self):
t = Table(
't', MetaData(), Column('x', Integer, primary_key=True),
Column('y', Integer, primary_key=True)
)
d = postgresql.dialect()
d.implicit_returning = True
with expect_warnings(
"Column 't.y' is marked as a member.*"
"Note that as of SQLAlchemy 1.1,",
):
self.assert_compile(
t.insert(),
"INSERT INTO t (x) VALUES (%(x)s)",
params={"x": 5},
dialect=d
)
3
Example 5
Project: sqlalchemy Source File: test_insert.py
def test_anticipate_no_pk_composite_pk_prefetch(self):
t = Table(
't', MetaData(), Column('x', Integer, primary_key=True),
Column('y', Integer, primary_key=True)
)
d = postgresql.dialect()
d.implicit_returning = False
with expect_warnings(
"Column 't.y' is marked as a member.*"
"Note that as of SQLAlchemy 1.1,"
):
self.assert_compile(
t.insert(),
"INSERT INTO t (x) VALUES (%(x)s)",
params={'x': 5},
dialect=d
)
3
Example 6
Project: sqlalchemy Source File: test_insert.py
def test_anticipate_no_pk_non_composite_pk_implicit_returning(self):
t = Table(
't', MetaData(),
Column('x', Integer, primary_key=True, autoincrement=False),
Column('q', Integer)
)
d = postgresql.dialect()
d.implicit_returning = True
with expect_warnings(
"Column 't.x' is marked as a member.*"
"may not store NULL.$",
):
self.assert_compile(
t.insert(),
"INSERT INTO t (q) VALUES (%(q)s)",
params={"q": 5},
dialect=d
)
3
Example 7
Project: sqlalchemy Source File: test_insert.py
def test_anticipate_no_pk_non_composite_pk_prefetch(self):
t = Table(
't', MetaData(),
Column('x', Integer, primary_key=True, autoincrement=False),
Column('q', Integer)
)
d = postgresql.dialect()
d.implicit_returning = False
with expect_warnings(
"Column 't.x' is marked as a member.*"
"may not store NULL.$"
):
self.assert_compile(
t.insert(),
"INSERT INTO t (q) VALUES (%(q)s)",
params={"q": 5},
dialect=d
)
3
Example 8
Project: sqlalchemy-utils Source File: test_quote.py
def test_quote_with_non_preserved_keyword(
self,
engine,
connection,
session
):
assert quote(connection, 'some_order') == 'some_order'
assert quote(session, 'some_order') == 'some_order'
assert quote(engine, 'some_order') == 'some_order'
assert quote(postgresql.dialect(), 'some_order') == 'some_order'
3
Example 9
Project: sqlalchemy-utils Source File: test_expressions.py
@pytest.fixture
def assert_startswith(session):
def assert_startswith(query, query_part):
assert str(
query.compile(dialect=postgresql.dialect())
).startswith(query_part)
# Check that query executes properly
session.execute(query)
return assert_startswith
3
Example 10
Project: sqlalchemy-utils Source File: test_expressions.py
def test_render_explain_analyze(self, session, Article):
assert str(
explain_analyze(session.query(Article))
.compile(
dialect=postgresql.dialect()
)
).startswith('EXPLAIN (ANALYZE true) SELECT')
3
Example 11
Project: sqlalchemy-utils Source File: test_expressions.py
def test_with_quoted_identifier(self):
Base = sa.ext.declarative.declarative_base()
class User(Base):
__tablename__ = 'user'
id = sa.Column(sa.Integer, primary_key=True)
assert str(Asterisk(User.__table__).compile(
dialect=postgresql.dialect()
)) == '"user".*'
3
Example 12
Project: sqlalchemy-utils Source File: test_expressions.py
def test_array_agg_with_default(self):
Base = sa.ext.declarative.declarative_base()
class Article(Base):
__tablename__ = 'article'
id = sa.Column(sa.Integer, primary_key=True)
assert str(sa.func.array_agg(Article.id, [1]).compile(
dialect=postgresql.dialect()
)) == (
'coalesce(array_agg(article.id), CAST(ARRAY[%(param_1)s]'
' AS INTEGER[]))'
)
0
Example 13
def test_types(self):
class MyType(TypeEngine):
pass
@compiles(MyType, 'sqlite')
def visit_type(type, compiler, **kw):
return "SQLITE_FOO"
@compiles(MyType, 'postgresql')
def visit_type(type, compiler, **kw):
return "POSTGRES_FOO"
from sqlalchemy.dialects.sqlite import base as sqlite
from sqlalchemy.dialects.postgresql import base as postgresql
self.assert_compile(
MyType(),
"SQLITE_FOO",
dialect=sqlite.dialect()
)
self.assert_compile(
MyType(),
"POSTGRES_FOO",
dialect=postgresql.dialect()
)
0
Example 14
def test_functions(self):
from sqlalchemy.dialects import postgresql
class MyUtcFunction(FunctionElement):
pass
@compiles(MyUtcFunction)
def visit_myfunc(element, compiler, **kw):
return "utcnow()"
@compiles(MyUtcFunction, 'postgresql')
def visit_myfunc(element, compiler, **kw):
return "timezone('utc', current_timestamp)"
self.assert_compile(
MyUtcFunction(),
"utcnow()",
use_default_dialect=True
)
self.assert_compile(
MyUtcFunction(),
"timezone('utc', current_timestamp)",
dialect=postgresql.dialect()
)
0
Example 15
Project: sqlalchemy Source File: test_insert.py
def test_unconsumed_names_multi_values_dict(self):
table1 = self.tables.mytable
checkparams = [{
'myid': 3,
'name': 'jack',
'unknowncol': 'oops'
}, {
'myid': 4,
'name': 'someone',
'unknowncol': 'oops'
}]
stmt = insert(table1, values=checkparams)
assert_raises_message(
exc.CompileError,
'Unconsumed column names: unknowncol',
stmt.compile,
dialect=postgresql.dialect()
)
0
Example 16
Project: sqlalchemy Source File: test_insert.py
def test_inline_default(self):
metadata = MetaData()
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
Column('data', String),
Column('foo', Integer, default=func.foobar()))
values = [
{'id': 1, 'data': 'data1'},
{'id': 2, 'data': 'data2', 'foo': 'plainfoo'},
{'id': 3, 'data': 'data3'},
]
checkparams = {
'id_m0': 1,
'id_m1': 2,
'id_m2': 3,
'data_m0': 'data1',
'data_m1': 'data2',
'data_m2': 'data3',
'foo_m1': 'plainfoo',
}
self.assert_compile(
table.insert().values(values),
'INSERT INTO sometable (id, data, foo) VALUES '
'(%(id_m0)s, %(data_m0)s, foobar()), '
'(%(id_m1)s, %(data_m1)s, %(foo_m1)s), '
'(%(id_m2)s, %(data_m2)s, foobar())',
checkparams=checkparams,
dialect=postgresql.dialect())
0
Example 17
Project: sqlalchemy Source File: test_insert.py
def test_python_scalar_default(self):
metadata = MetaData()
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
Column('data', String),
Column('foo', Integer, default=10))
values = [
{'id': 1, 'data': 'data1'},
{'id': 2, 'data': 'data2', 'foo': 15},
{'id': 3, 'data': 'data3'},
]
checkparams = {
'id_m0': 1,
'id_m1': 2,
'id_m2': 3,
'data_m0': 'data1',
'data_m1': 'data2',
'data_m2': 'data3',
'foo': None, # evaluated later
'foo_m1': 15,
'foo_m2': None # evaluated later
}
stmt = table.insert().values(values)
eq_(
dict([
(k, v.type._type_affinity)
for (k, v) in
stmt.compile(dialect=postgresql.dialect()).binds.items()]),
{
'foo': Integer, 'data_m2': String, 'id_m0': Integer,
'id_m2': Integer, 'foo_m1': Integer, 'data_m1': String,
'id_m1': Integer, 'foo_m2': Integer, 'data_m0': String}
)
self.assert_compile(
stmt,
'INSERT INTO sometable (id, data, foo) VALUES '
'(%(id_m0)s, %(data_m0)s, %(foo)s), '
'(%(id_m1)s, %(data_m1)s, %(foo_m1)s), '
'(%(id_m2)s, %(data_m2)s, %(foo_m2)s)',
checkparams=checkparams,
dialect=postgresql.dialect())
0
Example 18
Project: sqlalchemy Source File: test_insert.py
def test_python_fn_default(self):
metadata = MetaData()
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
Column('data', String),
Column('foo', Integer, default=lambda: 10))
values = [
{'id': 1, 'data': 'data1'},
{'id': 2, 'data': 'data2', 'foo': 15},
{'id': 3, 'data': 'data3'},
]
checkparams = {
'id_m0': 1,
'id_m1': 2,
'id_m2': 3,
'data_m0': 'data1',
'data_m1': 'data2',
'data_m2': 'data3',
'foo': None, # evaluated later
'foo_m1': 15,
'foo_m2': None, # evaluated later
}
stmt = table.insert().values(values)
eq_(
dict([
(k, v.type._type_affinity)
for (k, v) in
stmt.compile(dialect=postgresql.dialect()).binds.items()]),
{
'foo': Integer, 'data_m2': String, 'id_m0': Integer,
'id_m2': Integer, 'foo_m1': Integer, 'data_m1': String,
'id_m1': Integer, 'foo_m2': Integer, 'data_m0': String}
)
self.assert_compile(
stmt,
"INSERT INTO sometable (id, data, foo) VALUES "
"(%(id_m0)s, %(data_m0)s, %(foo)s), "
"(%(id_m1)s, %(data_m1)s, %(foo_m1)s), "
"(%(id_m2)s, %(data_m2)s, %(foo_m2)s)",
checkparams=checkparams,
dialect=postgresql.dialect())
0
Example 19
Project: sqlalchemy Source File: test_insert.py
def test_sql_functions(self):
metadata = MetaData()
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
Column('data', String),
Column('foo', Integer))
values = [
{"id": 1, "data": "foo", "foo": func.foob()},
{"id": 2, "data": "bar", "foo": func.foob()},
{"id": 3, "data": "bar", "foo": func.bar()},
{"id": 4, "data": "bar", "foo": 15},
{"id": 5, "data": "bar", "foo": func.foob()},
]
checkparams = {
'id_m0': 1,
'data_m0': 'foo',
'id_m1': 2,
'data_m1': 'bar',
'id_m2': 3,
'data_m2': 'bar',
'id_m3': 4,
'data_m3': 'bar',
'foo_m3': 15,
'id_m4': 5,
'data_m4': 'bar'
}
self.assert_compile(
table.insert().values(values),
"INSERT INTO sometable (id, data, foo) VALUES "
"(%(id_m0)s, %(data_m0)s, foob()), "
"(%(id_m1)s, %(data_m1)s, foob()), "
"(%(id_m2)s, %(data_m2)s, bar()), "
"(%(id_m3)s, %(data_m3)s, %(foo_m3)s), "
"(%(id_m4)s, %(data_m4)s, foob())",
checkparams=checkparams,
dialect=postgresql.dialect())
0
Example 20
Project: sqlalchemy Source File: test_insert.py
def test_server_default(self):
metadata = MetaData()
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
Column('data', String),
Column('foo', Integer, server_default=func.foobar()))
values = [
{'id': 1, 'data': 'data1'},
{'id': 2, 'data': 'data2', 'foo': 'plainfoo'},
{'id': 3, 'data': 'data3'},
]
checkparams = {
'id_m0': 1,
'id_m1': 2,
'id_m2': 3,
'data_m0': 'data1',
'data_m1': 'data2',
'data_m2': 'data3',
}
self.assert_compile(
table.insert().values(values),
'INSERT INTO sometable (id, data) VALUES '
'(%(id_m0)s, %(data_m0)s), '
'(%(id_m1)s, %(data_m1)s), '
'(%(id_m2)s, %(data_m2)s)',
checkparams=checkparams,
dialect=postgresql.dialect())
0
Example 21
Project: depsy Source File: jobs.py
def enqueue_jobs(cls,
method,
ids_q_or_list,
queue_number,
use_rq="rq",
chunk_size=10,
shortcut_fn=None
):
"""
Takes sqlalchemy query with (login, repo_name) IDs, runs fn on those repos.
"""
shortcut_data = None
if use_rq == "rq":
empty_queue(queue_number)
if shortcut_fn:
raise ValueError("you can't use RQ with a shortcut_fn")
else:
if shortcut_fn:
shortcut_data_start = time()
print "Getting shortcut data..."
shortcut_data = shortcut_fn()
print "Got shortcut data in {}sec".format(
elapsed(shortcut_data_start)
)
chunk_size = int(chunk_size)
start_time = time()
new_loop_start_time = time()
index = 0
print "running this query: \n{}\n".format(
ids_q_or_list.statement.compile(dialect=postgresql.dialect())
)
row_list = ids_q_or_list.all()
print "finished query in {}sec".format(elapsed(start_time))
if row_list is None:
print "no IDs, all done."
return None
object_ids = [row[0] for row in row_list]
num_jobs = len(object_ids)
print "adding {} jobs to queue...".format(num_jobs)
# iterate through chunks of IDs like [[id1, id2], [id3, id4], ... ]
object_ids_chunk = []
for object_ids_chunk in chunks(object_ids, chunk_size):
update_fn_args = [cls, method, object_ids_chunk]
if use_rq == "rq":
job = ti_queues[queue_number].enqueue_call(
func=update_fn,
args=update_fn_args,
timeout=60 * 10,
result_ttl=0 # number of seconds
)
job.meta["object_ids_chunk"] = object_ids_chunk
job.save()
else:
update_fn_args.append(shortcut_data)
update_fn(*update_fn_args)
if index % 1000 == 0 and index != 0:
print "added {} jobs to queue in {}sec total, {}sec this loop".format(
index,
elapsed(start_time),
elapsed(new_loop_start_time)
)
new_loop_start_time = time()
index += 1
print "last object added to the queue was {}".format(list(object_ids_chunk))
db.session.remove() # close connection nicely
return True
0
Example 22
Project: impactstory-tng Source File: jobs.py
def enqueue_jobs(cls,
method,
ids_q_or_list,
queue_number,
use_rq=True,
chunk_size=25,
shortcut_fn=None
):
"""
Takes sqlalchemy query with IDs, runs fn on those repos.
"""
shortcut_data = None
if use_rq:
empty_queue(queue_number)
if shortcut_fn:
raise ValueError("you can't use RQ with a shortcut_fn")
else:
if shortcut_fn:
shortcut_data_start = time()
print "Getting shortcut data..."
shortcut_data = shortcut_fn()
print "Got shortcut data in {}sec".format(
elapsed(shortcut_data_start)
)
chunk_size = int(chunk_size)
start_time = time()
new_loop_start_time = time()
index = 0
print "running this query: \n{}\n".format(
ids_q_or_list.statement.compile(dialect=postgresql.dialect())
)
row_list = ids_q_or_list.all()
print "finished query in {}sec".format(elapsed(start_time))
if row_list is None:
print "no IDs, all done."
return None
object_ids = [row[0] for row in row_list]
num_jobs = len(object_ids)
print "adding {} jobs to queue...".format(num_jobs)
# iterate through chunks of IDs like [[id1, id2], [id3, id4], ... ]
object_ids_chunk = []
for object_ids_chunk in chunks(object_ids, chunk_size):
update_fn_args = [cls, method, object_ids_chunk]
if use_rq:
job = ti_queues[queue_number].enqueue_call(
func=update_fn,
args=update_fn_args,
timeout=60 * 10,
result_ttl=0 # number of seconds
)
job.meta["object_ids_chunk"] = object_ids_chunk
job.save()
else:
update_fn_args.append(shortcut_data)
update_fn(*update_fn_args, index=index)
if True: # index % 10 == 0 and index != 0:
num_jobs_remaining = num_jobs - (index * chunk_size)
try:
jobs_per_hour_this_chunk = chunk_size / float(elapsed(new_loop_start_time) / 3600)
predicted_mins_to_finish = round(
(num_jobs_remaining / float(jobs_per_hour_this_chunk)) * 60,
1
)
print "\n\nWe're doing {} jobs per hour. At this rate, done in {}min".format(
int(jobs_per_hour_this_chunk),
predicted_mins_to_finish
)
print "(finished chunk {} of {} chunks in {}sec total, {}sec this loop)\n".format(
index,
num_jobs/chunk_size,
elapsed(start_time),
elapsed(new_loop_start_time)
)
except ZeroDivisionError:
print u"not printing status because divide by zero"
new_loop_start_time = time()
index += 1
print "last chunk of ids: {}".format(list(object_ids_chunk))
db.session.remove() # close connection nicely
return True
0
Example 23
Project: sqlalchemy-json-api Source File: test_select.py
@pytest.mark.parametrize(
('fields', 'result'),
(
(
{'articles': ['name_upper']},
{
'data': [{
'type': 'articles',
'id': '1',
'attributes': {
'name_upper': 'SOME ARTICLE'
}
}]
}
),
)
)
def test_hybrid_property_inclusion_uses_clause_adaptation(
self,
query_builder,
session,
article_cls,
fields,
result
):
query = query_builder.select(
article_cls,
fields=fields,
from_obj=session.query(article_cls)
)
compiled = query.compile(dialect=sa.dialects.postgresql.dialect())
assert 'upper(anon_2.name)' in str(compiled)
0
Example 24
Project: sqlalchemy-utils Source File: test_quote.py
def test_quote_with_preserved_keyword(self, engine, connection, session):
assert quote(connection, 'order') == '"order"'
assert quote(session, 'order') == '"order"'
assert quote(engine, 'order') == '"order"'
assert quote(postgresql.dialect(), 'order') == '"order"'
0
Example 25
def test_compiler_with_postgresql(self):
assert str(row_to_json(sa.text('article.*')).compile(
dialect=postgresql.dialect()
)) == 'row_to_json(article.*)'
0
Example 26
def test_compiler_with_postgresql(self):
assert str(sa.func.array_agg(sa.text('u.name')).compile(
dialect=postgresql.dialect()
)) == 'array_agg(u.name)'