Skip to content

Commit 66f6485

Browse files
authored
Merge pull request #246 switch main to v3 version
2 parents ee57513 + 12a4b71 commit 66f6485

File tree

102 files changed

+9405
-418
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

102 files changed

+9405
-418
lines changed

.github/scripts/increment_version_test.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,17 @@
77
[
88
("0.0.0", 'patch', False, "0.0.1"),
99
("0.0.1", 'patch', False, "0.0.2"),
10-
("0.0.1a1", 'patch', False, "0.0.1"),
11-
("0.0.0", 'patch', True, "0.0.1a1"),
12-
("0.0.1", 'patch', True, "0.0.2a1"),
13-
("0.0.2a1", 'patch', True, "0.0.2a2"),
10+
("0.0.1b1", 'patch', False, "0.0.1"),
11+
("0.0.0", 'patch', True, "0.0.1b1"),
12+
("0.0.1", 'patch', True, "0.0.2b1"),
13+
("0.0.2b1", 'patch', True, "0.0.2b2"),
1414
("0.0.1", 'minor', False, "0.1.0"),
15-
("0.0.1a1", 'minor', False, "0.1.0"),
16-
("0.1.0a1", 'minor', False, "0.1.0"),
17-
("0.1.0", 'minor', True, "0.2.0a1"),
18-
("0.1.0a1", 'minor', True, "0.1.0a2"),
19-
("0.1.1a1", 'minor', True, "0.2.0a1"),
15+
("0.0.1b1", 'minor', False, "0.1.0"),
16+
("0.1.0b1", 'minor', False, "0.1.0"),
17+
("0.1.0", 'minor', True, "0.2.0b1"),
18+
("0.1.0b1", 'minor', True, "0.1.0b2"),
19+
("0.1.1b1", 'minor', True, "0.2.0b1"),
20+
("3.0.0b1", 'patch', True, "3.0.0b2"),
2021
]
2122
)
2223
def test_increment_version(source, inc_type, with_beta, result):

.github/workflows/tests.yaml

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,24 +11,33 @@ jobs:
1111
runs-on: ubuntu-latest
1212

1313
concurrency:
14-
group: unit-${{ github.ref }}-${{ matrix.environment }}-${{ matrix.python-version }}
14+
group: unit-${{ github.ref }}-${{ matrix.environment }}-${{ matrix.python-version }}-${{ matrix.folder }}
1515
cancel-in-progress: true
1616

1717
strategy:
18+
fail-fast: false
1819
max-parallel: 4
1920
matrix:
2021
python-version: [3.8]
2122
environment: [py, py-tls, py-proto3, py-tls-proto3]
23+
folder: [ydb, tests]
24+
exclude:
25+
- environment: py-tls
26+
folder: ydb
27+
- environment: py-tls-proto3
28+
folder: ydb
2229

2330
steps:
2431
- uses: actions/checkout@v1
2532
- name: Set up Python ${{ matrix.python-version }}
2633
uses: actions/setup-python@v2
2734
with:
2835
python-version: ${{ matrix.python-version }}
29-
- name: Install dependencies
36+
37+
- name: Install tox
3038
run: |
3139
python -m pip install --upgrade pip
3240
pip install tox==4.2.6
33-
- name: Test with tox
34-
run: tox -e ${{ matrix.environment }}
41+
42+
- name: Run unit tests
43+
run: tox -e ${{ matrix.environment }} -- ${{ matrix.folder }}

AUTHORS

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
The following authors have created the source code of "YDB Python SDK"
1+
The following authors have created the source code of "Yandex Database Python SDK"
22
published and distributed by YANDEX LLC as the owner:
33

44
Vitalii Gridnev <[email protected]>
5+
Timofey Koolin <[email protected]>

CHANGELOG.md

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,54 @@
1+
## 3.0.1b11 ##
2+
* Fixed global_allow_split_transactions
3+
* Added reader.receive_message() method
4+
* Swap topic_path and consumer arguments in topic_client.reader method
5+
6+
## 3.0.1b10 ##
7+
* fixed sqlalchemy get_columns method with not null columns
8+
* fixed requirements.txt
9+
10+
## 3.0.1b9 ##
11+
* Add function for global revert broken behaviour: ydb.global_allow_truncated_result, ydb.global_allow_split_transactions
12+
* Change argument names from deny_split_transactions to allow_split_transactions (with reverse value
13+
* Fixed check retriable for idempotent error
14+
* Reader codecs
15+
* Read one message
16+
* fixed sqlalchemy get_columns method with not null columns
17+
18+
## 3.0.1b8 ##
19+
* Fixed exception while create ResultSet with None table_settings
20+
21+
## 3.0.1b7 ##
22+
* BROKEN CHANGE: deny any action in transaction after commit/rollback
23+
* BROKEN CHANGE: raise exception for truncated response by default
24+
* Compatible protobaf detection for arcadia
25+
* Add codecs support for topic writer
26+
27+
## 3.0.1b6 ##
28+
* BROKEN CHANGES: remove writer.write(mess1, mess2) variant, use list instead: writer.write([mess1, mess2])
29+
* BROKEN CHANGES: change names of public method in topic client
30+
* BROKEN CHANGES: rename parameter producer_and_message_group_id to producer_id
31+
* producer_id is optional now
32+
33+
## 3.0.1b5 ##
34+
* Remove six package from code and dependencies (remove support python2)
35+
* Use anonymous credentials by default instead of iam metadata (use ydb.driver.credentials_from_env_variables for creds by env var)
36+
* Close grpc streams while closing readers/writers
37+
* Add control plane operations for topic api: create, drop
38+
* Add six package to requirements
39+
40+
## 3.0.1b4 ##
41+
* Initial implementation of topic reader
42+
43+
## 3.0.1b3 ##
44+
* Fix error of check retriable error for idempotent operations (error exist since 2.12.1)
45+
46+
## 3.0.1b2 ##
47+
* Add initial topic writer
48+
49+
## 3.0.1b1 ##
50+
* start 3.0 beta branch
51+
152
## 2.13.4 ##
253
* fixed sqlalchemy get_columns method with not null columns
354

docker-compose-tls.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
version: "3.9"
22
services:
33
ydb:
4-
image: cr.yandex/yc/yandex-docker-local-ydb:latest
4+
image: cr.yandex/yc/yandex-docker-local-ydb@sha256:b569c23d6854564ec4d970bda86cddcf5b11c7c6362df62beb8ba8eafb8d54fd
55
restart: always
66
ports:
77
- 2136:2136

docker-compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
version: "3.9"
22
services:
33
ydb:
4-
image: cr.yandex/yc/yandex-docker-local-ydb:latest
4+
image: cr.yandex/yc/yandex-docker-local-ydb@sha256:b569c23d6854564ec4d970bda86cddcf5b11c7c6362df62beb8ba8eafb8d54fd
55
restart: always
66
ports:
77
- 2136:2136
Lines changed: 229 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,229 @@
1+
import datetime
2+
import logging
3+
import argparse
4+
import sqlalchemy as sa
5+
from sqlalchemy import orm, exc, sql
6+
from sqlalchemy import Table, Column, Integer, String, Float, TIMESTAMP
7+
from ydb._sqlalchemy import register_dialect
8+
9+
from fill_tables import fill_all_tables, to_days
10+
from models import Base, Series, Episodes
11+
12+
13+
def describe_table(engine, name):
14+
inspect = sa.inspect(engine)
15+
print(f"describe table {name}:")
16+
for col in inspect.get_columns(name):
17+
print(f"\t{col['name']}: {col['type']}")
18+
19+
20+
def simple_select(conn):
21+
stm = sa.select(Series).where(Series.series_id == 1)
22+
res = conn.execute(stm)
23+
print(res.one())
24+
25+
26+
def simple_insert(conn):
27+
stm = Episodes.__table__.insert().values(
28+
series_id=3, season_id=6, episode_id=1, title="TBD"
29+
)
30+
conn.execute(stm)
31+
32+
33+
def test_types(conn):
34+
types_tb = Table(
35+
"test_types",
36+
Base.metadata,
37+
Column("id", Integer, primary_key=True),
38+
Column("str", String),
39+
Column("num", Float),
40+
Column("dt", TIMESTAMP),
41+
)
42+
types_tb.drop(bind=conn.engine, checkfirst=True)
43+
types_tb.create(bind=conn.engine, checkfirst=True)
44+
45+
stm = types_tb.insert().values(
46+
id=1,
47+
str=b"Hello World!",
48+
num=3.1415,
49+
dt=datetime.datetime.now(),
50+
)
51+
conn.execute(stm)
52+
53+
# GROUP BY
54+
stm = sa.select(types_tb.c.str, sa.func.max(types_tb.c.num)).group_by(
55+
types_tb.c.str
56+
)
57+
rs = conn.execute(stm)
58+
for x in rs:
59+
print(x)
60+
61+
62+
def run_example_orm(engine):
63+
Base.metadata.bind = engine
64+
Base.metadata.drop_all()
65+
Base.metadata.create_all()
66+
67+
session = orm.sessionmaker(bind=engine)()
68+
69+
rs = session.query(Episodes).all()
70+
for e in rs:
71+
print(f"{e.episode_id}: {e.title}")
72+
73+
fill_all_tables(session.connection())
74+
75+
try:
76+
session.add_all(
77+
[
78+
Episodes(
79+
series_id=2,
80+
season_id=1,
81+
episode_id=1,
82+
title="Minimum Viable Product",
83+
air_date=to_days("2014-04-06"),
84+
),
85+
Episodes(
86+
series_id=2,
87+
season_id=1,
88+
episode_id=2,
89+
title="The Cap Table",
90+
air_date=to_days("2014-04-13"),
91+
),
92+
Episodes(
93+
series_id=2,
94+
season_id=1,
95+
episode_id=3,
96+
title="Articles of Incorporation",
97+
air_date=to_days("2014-04-20"),
98+
),
99+
Episodes(
100+
series_id=2,
101+
season_id=1,
102+
episode_id=4,
103+
title="Fiduciary Duties",
104+
air_date=to_days("2014-04-27"),
105+
),
106+
Episodes(
107+
series_id=2,
108+
season_id=1,
109+
episode_id=5,
110+
title="Signaling Risk",
111+
air_date=to_days("2014-05-04"),
112+
),
113+
]
114+
)
115+
session.commit()
116+
except exc.DatabaseError:
117+
print("Episodes already added!")
118+
session.rollback()
119+
120+
rs = session.query(Episodes).all()
121+
for e in rs:
122+
print(f"{e.episode_id}: {e.title}")
123+
124+
rs = session.query(Episodes).filter(Episodes.title == "abc??").all()
125+
for e in rs:
126+
print(e.title)
127+
128+
print("Episodes count:", session.query(Episodes).count())
129+
130+
max_episode = session.query(sql.expression.func.max(Episodes.episode_id)).scalar()
131+
print("Maximum episodes id:", max_episode)
132+
133+
session.add(
134+
Episodes(
135+
series_id=2,
136+
season_id=1,
137+
episode_id=max_episode + 1,
138+
title="Signaling Risk",
139+
air_date=to_days("2014-05-04"),
140+
)
141+
)
142+
143+
print("Episodes count:", session.query(Episodes).count())
144+
145+
146+
def run_example_core(engine):
147+
with engine.connect() as conn:
148+
# raw sql
149+
rs = conn.execute("SELECT 1 AS value")
150+
print(rs.fetchone()["value"])
151+
152+
fill_all_tables(conn)
153+
154+
for t in "series seasons episodes".split():
155+
describe_table(engine, t)
156+
157+
tb = sa.Table("episodes", sa.MetaData(engine), autoload=True)
158+
stm = (
159+
sa.select([tb.c.title])
160+
.where(sa.and_(tb.c.series_id == 1, tb.c.season_id == 3))
161+
.where(tb.c.title.like("%"))
162+
.order_by(sa.asc(tb.c.title))
163+
# TODO: limit isn't working now
164+
# .limit(3)
165+
)
166+
rs = conn.execute(stm)
167+
print(rs.fetchall())
168+
169+
simple_select(conn)
170+
171+
simple_insert(conn)
172+
173+
# simple join
174+
stm = sa.select(
175+
[Episodes.__table__.join(Series, Episodes.series_id == Series.series_id)]
176+
).where(sa.and_(Series.series_id == 1, Episodes.season_id == 1))
177+
rs = conn.execute(stm)
178+
for row in rs:
179+
print(f"{row.series_title}({row.episode_id}): {row.title}")
180+
181+
rs = conn.execute(sa.select(Episodes).where(Episodes.series_id == 3))
182+
print(rs.fetchall())
183+
184+
# count
185+
cnt = conn.execute(sa.func.count(Episodes.episode_id)).scalar()
186+
print("Episodes cnt:", cnt)
187+
188+
# simple delete
189+
conn.execute(sa.delete(Episodes).where(Episodes.title == "TBD"))
190+
cnt = conn.execute(sa.func.count(Episodes.episode_id)).scalar()
191+
print("Episodes cnt:", cnt)
192+
193+
test_types(conn)
194+
195+
196+
def main():
197+
parser = argparse.ArgumentParser(
198+
formatter_class=argparse.RawDescriptionHelpFormatter,
199+
description="""\033[92mYandex.Database examples sqlalchemy usage.\x1b[0m\n""",
200+
)
201+
parser.add_argument(
202+
"-d",
203+
"--database",
204+
help="Name of the database to use",
205+
default="/local",
206+
)
207+
parser.add_argument(
208+
"-e",
209+
"--endpoint",
210+
help="Endpoint url to use",
211+
default="grpc://localhost:2136",
212+
)
213+
214+
args = parser.parse_args()
215+
register_dialect()
216+
engine = sa.create_engine(
217+
"yql:///ydb/",
218+
connect_args={"database": args.database, "endpoint": args.endpoint},
219+
)
220+
221+
logging.basicConfig(level=logging.INFO)
222+
logging.getLogger("_sqlalchemy.engine").setLevel(logging.INFO)
223+
224+
run_example_core(engine)
225+
# run_example_orm(engine)
226+
227+
228+
if __name__ == "__main__":
229+
main()

0 commit comments

Comments
 (0)