Skip to content

Commit 47c9f55

Browse files
committed
Upgrade SQLAlchemy to version 2.0
1 parent 327070d commit 47c9f55

File tree

15 files changed

+117
-98
lines changed

15 files changed

+117
-98
lines changed

web/requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@
77
Authlib<2
88
requests<3 # Required by Authlib. Not installed automatically for some reason.
99
lxml<6
10-
sqlalchemy<2
11-
alembic<2
10+
sqlalchemy~=2.0
11+
alembic~=1.5
1212
portalocker<4
1313
psutil<8
1414
multiprocess<0.71

web/requirements_py/db_pg8000/requirements.txt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
lxml<6
2-
sqlalchemy<2
3-
alembic<2
4-
pg8000<=1.31.4
2+
sqlalchemy~=2.0
3+
alembic~=1.5
4+
pg8000~=1.31
55
psutil<8
66
portalocker<4
77

web/requirements_py/db_psycopg2/requirements.txt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
lxml<6
2-
sqlalchemy<2
3-
alembic<2
4-
psycopg2-binary<=2.9.10
2+
sqlalchemy~=2.0
3+
alembic~=1.5
4+
psycopg2-binary~=2.9
55
psutil<8
66
portalocker<4
77

web/server/codechecker_server/api/product_server.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
import os
1414
import random
1515

16+
from sqlalchemy import text
1617
from sqlalchemy.sql.expression import and_
1718

1819
from sqlalchemy import create_engine, exc
@@ -354,7 +355,7 @@ def __create_product_database(self, product):
354355
db_pass = convert.from_b64(product_info.password_b64)
355356
db_name = product_info.database
356357

357-
engine_url = URL(
358+
engine_url = URL.create(
358359
drivername=db_engine,
359360
username=db_user,
360361
password=db_pass,
@@ -365,9 +366,9 @@ def __create_product_database(self, product):
365366
engine = create_engine(engine_url)
366367
try:
367368
with engine.connect() as conn:
368-
conn.execute("commit")
369+
conn.execute(text("commit"))
369370
LOG.info("Creating database '%s'", db_name)
370-
conn.execute(f"CREATE DATABASE {db_name}")
371+
conn.execute(text(f"CREATE DATABASE {db_name}"))
371372
conn.close()
372373
except exc.ProgrammingError as e:
373374
LOG.error("ProgrammingError occurred: %s", str(e))

web/server/codechecker_server/api/report_server.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -265,10 +265,10 @@ def process_report_filter(
265265
if report_filter.cleanupPlanNames:
266266
OR = []
267267
for cleanup_plan_name in report_filter.cleanupPlanNames:
268-
q = select([CleanupPlanReportHash.bug_hash]) \
268+
q = select(CleanupPlanReportHash.bug_hash) \
269269
.where(
270270
CleanupPlanReportHash.cleanup_plan_id.in_(
271-
select([CleanupPlan.id])
271+
select(CleanupPlan.id)
272272
.where(CleanupPlan.name == cleanup_plan_name)
273273
.distinct()
274274
)) \
@@ -477,12 +477,12 @@ def get_include_skip_queries(
477477
478478
To get the include and skip lists use the 'get_component_values' function.
479479
"""
480-
include_q = select([File.id]) \
480+
include_q = select(File.id) \
481481
.where(or_(*[
482482
File.filepath.like(conv(fp)) for fp in include])) \
483483
.distinct()
484484

485-
skip_q = select([File.id]) \
485+
skip_q = select(File.id) \
486486
.where(or_(*[
487487
File.filepath.like(conv(fp)) for fp in skip])) \
488488
.distinct()
@@ -1367,7 +1367,7 @@ def get_is_enabled_case(subquery):
13671367
))
13681368

13691369
return case(
1370-
[(detection_status_filters, False)],
1370+
(detection_status_filters, False),
13711371
else_=True
13721372
)
13731373

@@ -1393,7 +1393,7 @@ def get_is_opened_case(subquery):
13931393
review_status_str, review_statuses)))
13941394
]
13951395
return case(
1396-
[(and_(*detection_and_review_status_filters), True)],
1396+
(and_(*detection_and_review_status_filters), True),
13971397
else_=False
13981398
)
13991399

@@ -1850,9 +1850,9 @@ def getDiffResultsHash(self, run_ids, report_hashes, diff_type,
18501850
base_hashes, run_ids, tag_ids)
18511851

18521852
if self._product.driver_name == 'postgresql':
1853-
new_hashes = select([
1853+
new_hashes = select(
18541854
func.unnest(cast(report_hashes, ARRAY(String)))
1855-
.label('bug_id')]) \
1855+
.label('bug_id')) \
18561856
.except_(base_hashes).alias('new_bugs')
18571857
return [res[0] for res in session.query(new_hashes)]
18581858
else:
@@ -1865,10 +1865,10 @@ def getDiffResultsHash(self, run_ids, report_hashes, diff_type,
18651865
for chunk in util.chunks(
18661866
iter(report_hashes), SQLITE_MAX_COMPOUND_SELECT):
18671867
new_hashes_query = union_all(*[
1868-
select([bindparam('bug_id' + str(i), h)
1869-
.label('bug_id')])
1868+
select(bindparam('bug_id' + str(i), h)
1869+
.label('bug_id'))
18701870
for i, h in enumerate(chunk)])
1871-
q = select([new_hashes_query.subquery()]) \
1871+
q = select(new_hashes_query.subquery()) \
18721872
.except_(base_hashes)
18731873
new_hashes.extend([
18741874
res[0] for res in session.query(q.subquery())])
@@ -1983,10 +1983,10 @@ def getRunResults(self, run_ids, limit, offset, sort_types,
19831983

19841984
annotation_cols = OrderedDict()
19851985
for col in annotation_keys:
1986-
annotation_cols[col] = func.max(sqlalchemy.case([(
1986+
annotation_cols[col] = func.max(sqlalchemy.case((
19871987
ReportAnnotations.key == col,
19881988
cast(ReportAnnotations.value,
1989-
report_annotation_types[col]["db"]))])) \
1989+
report_annotation_types[col]["db"])))) \
19901990
.label(f"annotation_{col}")
19911991

19921992
if report_filter.isUnique:
@@ -3960,7 +3960,7 @@ def getMissingContentHashes(self, file_hashes):
39603960
with DBSession(self._Session) as session:
39613961

39623962
q = session.query(FileContent) \
3963-
.options(sqlalchemy.orm.load_only('content_hash')) \
3963+
.options(sqlalchemy.orm.load_only(FileContent.content_hash)) \
39643964
.filter(FileContent.content_hash.in_(file_hashes))
39653965

39663966
return list(set(file_hashes) -
@@ -3977,7 +3977,7 @@ def getMissingContentHashesForBlameInfo(self, file_hashes):
39773977
with DBSession(self._Session) as session:
39783978

39793979
q = session.query(FileContent) \
3980-
.options(sqlalchemy.orm.load_only('content_hash')) \
3980+
.options(sqlalchemy.orm.load_only(FileContent.content_hash)) \
39813981
.filter(FileContent.content_hash.in_(file_hashes)) \
39823982
.filter(FileContent.blame_info.isnot(None))
39833983

web/server/codechecker_server/database/config_db_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
from sqlalchemy import Boolean, CHAR, Column, DateTime, Enum, ForeignKey, \
1616
Integer, MetaData, String, Text, UniqueConstraint
17-
from sqlalchemy.ext.declarative import declarative_base
17+
from sqlalchemy.orm import declarative_base
1818
from sqlalchemy.sql.expression import false
1919

2020
from ..permissions import get_permissions

web/server/codechecker_server/database/database.py

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -396,15 +396,16 @@ def create_engine(self):
396396
# FIXME: workaround for locking errors
397397
# FIXME: why is the connection used by multiple threads
398398
# is that a problem ??? do we need some extra locking???
399-
engine = sqlalchemy.create_engine(self.get_connection_string(),
400-
encoding='utf8',
401-
connect_args={'timeout': 600,
402-
'check_same_thread': False},
403-
poolclass=NullPool)
399+
engine = sqlalchemy.create_engine(
400+
self.get_connection_string(),
401+
connect_args={'timeout': 600, 'check_same_thread': False},
402+
poolclass=NullPool)
404403
else:
405-
engine = sqlalchemy.create_engine(self.get_connection_string(),
406-
encoding='utf8',
407-
poolclass=NullPool)
404+
engine = sqlalchemy.create_engine(
405+
self.get_connection_string(),
406+
client_encoding='utf8',
407+
#connect_args={'client_encoding': 'UTF8'},
408+
poolclass=NullPool)
408409

409410
self._register_engine_hooks(engine)
410411
return engine
@@ -556,13 +557,14 @@ def _get_connection_string(self, database):
556557
extra_args = {}
557558
if driver == "psycopg2":
558559
extra_args = {'client_encoding': 'utf8'}
559-
return str(URL('postgresql+' + driver,
560-
username=self.user,
561-
password=password,
562-
host=self.host,
563-
port=str(self.port),
564-
database=database,
565-
query=extra_args))
560+
return URL.create(
561+
drivername='postgresql+' + driver,
562+
username=self.user,
563+
password=password,
564+
host=self.host,
565+
port=str(self.port),
566+
database=database,
567+
query=extra_args).render_as_string(hide_password=False)
566568

567569
def connect(self, init=False):
568570
"""
@@ -653,7 +655,9 @@ def connect(self, init=False):
653655
return self.check_schema()
654656

655657
def get_connection_string(self) -> str:
656-
return str(URL('sqlite+pysqlite', None, None, None, None, self.dbpath))
658+
return str(URL.create(
659+
drivername='sqlite+pysqlite',
660+
database=self.dbpath))
657661

658662
def get_db_location(self):
659663
return self.dbpath

web/server/codechecker_server/database/run_db_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer, \
1717
LargeBinary, MetaData, String, UniqueConstraint, Table, Text
18-
from sqlalchemy.ext.declarative import declarative_base
18+
from sqlalchemy.orm import declarative_base
1919
from sqlalchemy.orm import relationship
2020
from sqlalchemy.sql.expression import true, false
2121

web/server/codechecker_server/migrations/config/env.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -43,14 +43,7 @@ def run_migrations_online():
4343
In this scenario we need to create an Engine and associate a connection
4444
with the context.
4545
"""
46-
connectable = config.attributes.get('connection', None)
47-
if connectable is None:
48-
connectable = engine_from_config(
49-
config.get_section(config.config_ini_section),
50-
prefix='sqlalchemy.',
51-
poolclass=pool.NullPool)
52-
53-
with connectable.connect() as connection:
46+
def migrate(connection):
5447
context.configure(
5548
connection=connection,
5649
target_metadata=target_metadata
@@ -61,6 +54,18 @@ def run_migrations_online():
6154
with context.begin_transaction():
6255
context.run_migrations()
6356

57+
connection = config.attributes.get('connection', None)
58+
if connection:
59+
migrate(connection)
60+
else:
61+
connectable = engine_from_config(
62+
config.get_section(config.config_ini_section),
63+
prefix='sqlalchemy.',
64+
poolclass=pool.NullPool)
65+
66+
with connectable.connect() as connection:
67+
migrate(connection)
68+
6469

6570
if context.is_offline_mode():
6671
raise NotImplementedError(f"Offline '{schema}' migration is not possible!")

web/server/codechecker_server/migrations/report/env.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -37,14 +37,7 @@ def run_migrations_online():
3737
In this scenario we need to create an Engine and associate a connection
3838
with the context.
3939
"""
40-
connectable = config.attributes.get('connection', None)
41-
if connectable is None:
42-
connectable = engine_from_config(
43-
config.get_section(config.config_ini_section),
44-
prefix='sqlalchemy.',
45-
poolclass=pool.NullPool)
46-
47-
with connectable.connect() as connection:
40+
def migrate(connection):
4841
context.configure(
4942
connection=connection,
5043
target_metadata=target_metadata
@@ -55,6 +48,18 @@ def run_migrations_online():
5548
with context.begin_transaction():
5649
context.run_migrations()
5750

51+
connection = config.attributes.get('connection', None)
52+
if connection:
53+
migrate(connection)
54+
else:
55+
connectable = engine_from_config(
56+
config.get_section(config.config_ini_section),
57+
prefix='sqlalchemy.',
58+
poolclass=pool.NullPool)
59+
60+
with connectable.connect() as connection:
61+
migrate(connection)
62+
5863

5964
if context.is_offline_mode():
6065
raise NotImplementedError(f"Offline '{schema}' migration is not possible!")

0 commit comments

Comments
 (0)