diff options
| author | Mike Bayer <mike_mp@zzzcomputing.com> | 2023-01-24 11:05:12 -0500 |
|---|---|---|
| committer | Mike Bayer <mike_mp@zzzcomputing.com> | 2023-01-25 19:42:14 -0500 |
| commit | 8a32f367175871500723c5ebfc0f1af1564d3478 (patch) | |
| tree | 90ab26593282bf30a6ac0a3b002493ebb8cb8e4f /examples | |
| parent | d426d3bbad1d3e4a0b80e83c4423dea055609c15 (diff) | |
| download | sqlalchemy-8a32f367175871500723c5ebfc0f1af1564d3478.tar.gz | |
add set_shard_id() loader option for horizontal shard
Added new option to horizontal sharding API
:class:`_horizontal.set_shard_id` which sets the effective shard identifier
to query against, for both the primary query as well as for all secondary
loaders including relationship eager loaders as well as relationship and
column lazy loaders.
Modernize sharding examples with new-style mappings, add new asyncio example.
Fixes: #7226
Fixes: #7028
Change-Id: Ie69248060c305e8de04f75a529949777944ad511
Diffstat (limited to 'examples')
| -rw-r--r-- | examples/sharding/asyncio.py | 351 | ||||
| -rw-r--r-- | examples/sharding/separate_databases.py | 165 | ||||
| -rw-r--r-- | examples/sharding/separate_schema_translates.py | 192 | ||||
| -rw-r--r-- | examples/sharding/separate_tables.py | 165 |
4 files changed, 643 insertions, 230 deletions
diff --git a/examples/sharding/asyncio.py b/examples/sharding/asyncio.py new file mode 100644 index 000000000..a66689a5b --- /dev/null +++ b/examples/sharding/asyncio.py @@ -0,0 +1,351 @@ +"""Illustrates sharding API used with asyncio. + +For the sync version of this example, see separate_databases.py. + +Most of the code here is copied from separate_databases.py and works +in exactly the same way. The main change is how the +``async_sessionmaker`` is configured, and as is specific to this example +the routine that generates new primary keys. + +""" +from __future__ import annotations + +import asyncio +import datetime + +from sqlalchemy import Column +from sqlalchemy import ForeignKey +from sqlalchemy import inspect +from sqlalchemy import Integer +from sqlalchemy import select +from sqlalchemy import Table +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine +from sqlalchemy.ext.horizontal_shard import set_shard_id +from sqlalchemy.ext.horizontal_shard import ShardedSession +from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import immediateload +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship +from sqlalchemy.sql import operators +from sqlalchemy.sql import visitors + + +echo = True +db1 = create_async_engine("sqlite+aiosqlite://", echo=echo) +db2 = create_async_engine("sqlite+aiosqlite://", echo=echo) +db3 = create_async_engine("sqlite+aiosqlite://", echo=echo) +db4 = create_async_engine("sqlite+aiosqlite://", echo=echo) + + +# for asyncio, the ShardedSession class is passed +# via sync_session_class. The shards themselves are used within +# implicit-awaited internals, so we use the sync_engine Engine objects +# in the shards dictionary. +Session = async_sessionmaker( + sync_session_class=ShardedSession, + expire_on_commit=False, + shards={ + "north_america": db1.sync_engine, + "asia": db2.sync_engine, + "europe": db3.sync_engine, + "south_america": db4.sync_engine, + }, +) + + +# mappings and tables +class Base(DeclarativeBase): + pass + + +# we need a way to create identifiers which are unique across all databases. +# one easy way would be to just use a composite primary key, where one value +# is the shard id. but here, we'll show something more "generic", an id +# generation function. we'll use a simplistic "id table" stored in database +# #1. Any other method will do just as well; UUID, hilo, application-specific, +# etc. + +ids = Table("ids", Base.metadata, Column("nextid", Integer, nullable=False)) + + +def id_generator(ctx): + # id_generator is run within a "synchronous" context, where + # we use an implicit-await API that will convert back to explicit await + # calls when it reaches the driver. + with db1.sync_engine.begin() as conn: + nextid = conn.scalar(ids.select().with_for_update()) + conn.execute(ids.update().values({ids.c.nextid: ids.c.nextid + 1})) + return nextid + + +# table setup. we'll store a lead table of continents/cities, and a secondary +# table storing locations. a particular row will be placed in the database +# whose shard id corresponds to the 'continent'. in this setup, secondary rows +# in 'weather_reports' will be placed in the same DB as that of the parent, but +# this can be changed if you're willing to write more complex sharding +# functions. + + +class WeatherLocation(Base): + __tablename__ = "weather_locations" + + id: Mapped[int] = mapped_column(primary_key=True, default=id_generator) + continent: Mapped[str] + city: Mapped[str] + + reports: Mapped[list[Report]] = relationship(back_populates="location") + + def __init__(self, continent: str, city: str): + self.continent = continent + self.city = city + + +class Report(Base): + __tablename__ = "weather_reports" + + id: Mapped[int] = mapped_column(primary_key=True) + location_id: Mapped[int] = mapped_column( + ForeignKey("weather_locations.id") + ) + temperature: Mapped[float] + report_time: Mapped[datetime.datetime] = mapped_column( + default=datetime.datetime.now + ) + + location: Mapped[WeatherLocation] = relationship(back_populates="reports") + + def __init__(self, temperature: float): + self.temperature = temperature + + +# step 5. define sharding functions. + +# we'll use a straight mapping of a particular set of "country" +# attributes to shard id. +shard_lookup = { + "North America": "north_america", + "Asia": "asia", + "Europe": "europe", + "South America": "south_america", +} + + +def shard_chooser(mapper, instance, clause=None): + """shard chooser. + + looks at the given instance and returns a shard id + note that we need to define conditions for + the WeatherLocation class, as well as our secondary Report class which will + point back to its WeatherLocation via its 'location' attribute. + + """ + if isinstance(instance, WeatherLocation): + return shard_lookup[instance.continent] + else: + return shard_chooser(mapper, instance.location) + + +def identity_chooser(mapper, primary_key, *, lazy_loaded_from, **kw): + """identity chooser. + + given a primary key, returns a list of shards + to search. here, we don't have any particular information from a + pk so we just return all shard ids. often, you'd want to do some + kind of round-robin strategy here so that requests are evenly + distributed among DBs. + + """ + if lazy_loaded_from: + # if we are in a lazy load, we can look at the parent object + # and limit our search to that same shard, assuming that's how we've + # set things up. + return [lazy_loaded_from.identity_token] + else: + return ["north_america", "asia", "europe", "south_america"] + + +def execute_chooser(context): + """statement execution chooser. + + this also returns a list of shard ids, which can just be all of them. but + here we'll search into the execution context in order to try to narrow down + the list of shards to SELECT. + + """ + ids = [] + + # we'll grab continent names as we find them + # and convert to shard ids + for column, operator, value in _get_select_comparisons(context.statement): + # "shares_lineage()" returns True if both columns refer to the same + # statement column, adjusting for any annotations present. + # (an annotation is an internal clone of a Column object + # and occur when using ORM-mapped attributes like + # "WeatherLocation.continent"). A simpler comparison, though less + # accurate, would be "column.key == 'continent'". + if column.shares_lineage(WeatherLocation.__table__.c.continent): + if operator == operators.eq: + ids.append(shard_lookup[value]) + elif operator == operators.in_op: + ids.extend(shard_lookup[v] for v in value) + + if len(ids) == 0: + return ["north_america", "asia", "europe", "south_america"] + else: + return ids + + +def _get_select_comparisons(statement): + """Search a Select or Query object for binary expressions. + + Returns expressions which match a Column against one or more + literal values as a list of tuples of the form + (column, operator, values). "values" is a single value + or tuple of values depending on the operator. + + """ + binds = {} + clauses = set() + comparisons = [] + + def visit_bindparam(bind): + # visit a bind parameter. + + value = bind.effective_value + binds[bind] = value + + def visit_column(column): + clauses.add(column) + + def visit_binary(binary): + if binary.left in clauses and binary.right in binds: + comparisons.append( + (binary.left, binary.operator, binds[binary.right]) + ) + + elif binary.left in binds and binary.right in clauses: + comparisons.append( + (binary.right, binary.operator, binds[binary.left]) + ) + + # here we will traverse through the query's criterion, searching + # for SQL constructs. We will place simple column comparisons + # into a list. + if statement.whereclause is not None: + visitors.traverse( + statement.whereclause, + {}, + { + "bindparam": visit_bindparam, + "binary": visit_binary, + "column": visit_column, + }, + ) + return comparisons + + +# further configure create_session to use these functions +Session.configure( + shard_chooser=shard_chooser, + identity_chooser=identity_chooser, + execute_chooser=execute_chooser, +) + + +async def setup(): + # create tables + for db in (db1, db2, db3, db4): + async with db.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + # establish initial "id" in db1 + async with db1.begin() as conn: + await conn.execute(ids.insert(), {"nextid": 1}) + + +async def main(): + await setup() + + # save and load objects! + + tokyo = WeatherLocation("Asia", "Tokyo") + newyork = WeatherLocation("North America", "New York") + toronto = WeatherLocation("North America", "Toronto") + london = WeatherLocation("Europe", "London") + dublin = WeatherLocation("Europe", "Dublin") + brasilia = WeatherLocation("South America", "Brasila") + quito = WeatherLocation("South America", "Quito") + + tokyo.reports.append(Report(80.0)) + newyork.reports.append(Report(75)) + quito.reports.append(Report(85)) + + async with Session() as sess: + + sess.add_all( + [tokyo, newyork, toronto, london, dublin, brasilia, quito] + ) + + await sess.commit() + + t = await sess.get( + WeatherLocation, + tokyo.id, + options=[immediateload(WeatherLocation.reports)], + ) + assert t.city == tokyo.city + assert t.reports[0].temperature == 80.0 + + # select across shards + asia_and_europe = ( + await sess.execute( + select(WeatherLocation).filter( + WeatherLocation.continent.in_(["Europe", "Asia"]) + ) + ) + ).scalars() + + assert {c.city for c in asia_and_europe} == { + "Tokyo", + "London", + "Dublin", + } + + # optionally set a shard id for the query and all related loaders + north_american_cities_w_t = ( + await sess.execute( + select(WeatherLocation) + .filter(WeatherLocation.city.startswith("T")) + .options(set_shard_id("north_america")) + ) + ).scalars() + + # Tokyo not included since not in the north_america shard + assert {c.city for c in north_american_cities_w_t} == { + "Toronto", + } + + # the Report class uses a simple integer primary key. So across two + # databases, a primary key will be repeated. The "identity_token" + # tracks in memory that these two identical primary keys are local to + # different shards. + newyork_report = newyork.reports[0] + tokyo_report = tokyo.reports[0] + + assert inspect(newyork_report).identity_key == ( + Report, + (1,), + "north_america", + ) + assert inspect(tokyo_report).identity_key == (Report, (1,), "asia") + + # the token representing the originating shard is also available + # directly + assert inspect(newyork_report).identity_token == "north_america" + assert inspect(tokyo_report).identity_token == "asia" + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/sharding/separate_databases.py b/examples/sharding/separate_databases.py index fe92fd3ba..65364773b 100644 --- a/examples/sharding/separate_databases.py +++ b/examples/sharding/separate_databases.py @@ -1,19 +1,20 @@ """Illustrates sharding using distinct SQLite databases.""" +from __future__ import annotations import datetime from sqlalchemy import Column from sqlalchemy import create_engine -from sqlalchemy import DateTime -from sqlalchemy import Float from sqlalchemy import ForeignKey from sqlalchemy import inspect from sqlalchemy import Integer from sqlalchemy import select -from sqlalchemy import String from sqlalchemy import Table -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.horizontal_shard import set_shard_id from sqlalchemy.ext.horizontal_shard import ShardedSession +from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship from sqlalchemy.orm import sessionmaker from sqlalchemy.sql import operators @@ -41,7 +42,9 @@ Session = sessionmaker( # mappings and tables -Base = declarative_base() +class Base(DeclarativeBase): + pass + # we need a way to create identifiers which are unique across all databases. # one easy way would be to just use a composite primary key, where one value @@ -72,13 +75,13 @@ def id_generator(ctx): class WeatherLocation(Base): __tablename__ = "weather_locations" - id = Column(Integer, primary_key=True, default=id_generator) - continent = Column(String(30), nullable=False) - city = Column(String(50), nullable=False) + id: Mapped[int] = mapped_column(primary_key=True, default=id_generator) + continent: Mapped[str] + city: Mapped[str] - reports = relationship("Report", backref="location") + reports: Mapped[list[Report]] = relationship(back_populates="location") - def __init__(self, continent, city): + def __init__(self, continent: str, city: str): self.continent = continent self.city = city @@ -86,29 +89,22 @@ class WeatherLocation(Base): class Report(Base): __tablename__ = "weather_reports" - id = Column(Integer, primary_key=True) - location_id = Column( - "location_id", Integer, ForeignKey("weather_locations.id") + id: Mapped[int] = mapped_column(primary_key=True) + location_id: Mapped[int] = mapped_column( + ForeignKey("weather_locations.id") ) - temperature = Column("temperature", Float) - report_time = Column( - "report_time", DateTime, default=datetime.datetime.now + temperature: Mapped[float] + report_time: Mapped[datetime.datetime] = mapped_column( + default=datetime.datetime.now ) - def __init__(self, temperature): - self.temperature = temperature - + location: Mapped[WeatherLocation] = relationship(back_populates="reports") -# create tables -for db in (db1, db2, db3, db4): - Base.metadata.create_all(db) - -# establish initial "id" in db1 -with db1.begin() as conn: - conn.execute(ids.insert(), {"nextid": 1}) + def __init__(self, temperature: float): + self.temperature = temperature -# step 5. define sharding functions. +# define sharding functions. # we'll use a straight mapping of a particular set of "country" # attributes to shard id. @@ -241,61 +237,90 @@ Session.configure( execute_chooser=execute_chooser, ) -# save and load objects! -tokyo = WeatherLocation("Asia", "Tokyo") -newyork = WeatherLocation("North America", "New York") -toronto = WeatherLocation("North America", "Toronto") -london = WeatherLocation("Europe", "London") -dublin = WeatherLocation("Europe", "Dublin") -brasilia = WeatherLocation("South America", "Brasila") -quito = WeatherLocation("South America", "Quito") +def setup(): + # create tables + for db in (db1, db2, db3, db4): + Base.metadata.create_all(db) -tokyo.reports.append(Report(80.0)) -newyork.reports.append(Report(75)) -quito.reports.append(Report(85)) + # establish initial "id" in db1 + with db1.begin() as conn: + conn.execute(ids.insert(), {"nextid": 1}) -with Session() as sess: - sess.add_all([tokyo, newyork, toronto, london, dublin, brasilia, quito]) +def main(): + setup() - sess.commit() + # save and load objects! - t = sess.get(WeatherLocation, tokyo.id) - assert t.city == tokyo.city - assert t.reports[0].temperature == 80.0 + tokyo = WeatherLocation("Asia", "Tokyo") + newyork = WeatherLocation("North America", "New York") + toronto = WeatherLocation("North America", "Toronto") + london = WeatherLocation("Europe", "London") + dublin = WeatherLocation("Europe", "Dublin") + brasilia = WeatherLocation("South America", "Brasila") + quito = WeatherLocation("South America", "Quito") - north_american_cities = sess.execute( - select(WeatherLocation).filter( - WeatherLocation.continent == "North America" - ) - ).scalars() + tokyo.reports.append(Report(80.0)) + newyork.reports.append(Report(75)) + quito.reports.append(Report(85)) - assert {c.city for c in north_american_cities} == {"New York", "Toronto"} + with Session() as sess: - asia_and_europe = sess.execute( - select(WeatherLocation).filter( - WeatherLocation.continent.in_(["Europe", "Asia"]) + sess.add_all( + [tokyo, newyork, toronto, london, dublin, brasilia, quito] ) - ).scalars() - assert {c.city for c in asia_and_europe} == {"Tokyo", "London", "Dublin"} + sess.commit() - # the Report class uses a simple integer primary key. So across two - # databases, a primary key will be repeated. The "identity_token" tracks - # in memory that these two identical primary keys are local to different - # databases. - newyork_report = newyork.reports[0] - tokyo_report = tokyo.reports[0] + t = sess.get(WeatherLocation, tokyo.id) + assert t.city == tokyo.city + assert t.reports[0].temperature == 80.0 - assert inspect(newyork_report).identity_key == ( - Report, - (1,), - "north_america", - ) - assert inspect(tokyo_report).identity_key == (Report, (1,), "asia") + # select across shards + asia_and_europe = sess.execute( + select(WeatherLocation).filter( + WeatherLocation.continent.in_(["Europe", "Asia"]) + ) + ).scalars() + + assert {c.city for c in asia_and_europe} == { + "Tokyo", + "London", + "Dublin", + } + + # optionally set a shard id for the query and all related loaders + north_american_cities_w_t = sess.execute( + select(WeatherLocation) + .filter(WeatherLocation.city.startswith("T")) + .options(set_shard_id("north_america")) + ).scalars() + + # Tokyo not included since not in the north_america shard + assert {c.city for c in north_american_cities_w_t} == { + "Toronto", + } + + # the Report class uses a simple integer primary key. So across two + # databases, a primary key will be repeated. The "identity_token" + # tracks in memory that these two identical primary keys are local to + # different shards. + newyork_report = newyork.reports[0] + tokyo_report = tokyo.reports[0] + + assert inspect(newyork_report).identity_key == ( + Report, + (1,), + "north_america", + ) + assert inspect(tokyo_report).identity_key == (Report, (1,), "asia") + + # the token representing the originating shard is also available + # directly + assert inspect(newyork_report).identity_token == "north_america" + assert inspect(tokyo_report).identity_token == "asia" - # the token representing the originating shard is also available directly - assert inspect(newyork_report).identity_token == "north_america" - assert inspect(tokyo_report).identity_token == "asia" +if __name__ == "__main__": + main() diff --git a/examples/sharding/separate_schema_translates.py b/examples/sharding/separate_schema_translates.py index f7bdc6250..0b5b08e57 100644 --- a/examples/sharding/separate_schema_translates.py +++ b/examples/sharding/separate_schema_translates.py @@ -4,20 +4,20 @@ where a different "schema_translates_map" can be used for each shard. In this example we will set a "shard id" at all times. """ +from __future__ import annotations + import datetime import os -from sqlalchemy import Column from sqlalchemy import create_engine -from sqlalchemy import DateTime -from sqlalchemy import Float from sqlalchemy import ForeignKey from sqlalchemy import inspect -from sqlalchemy import Integer from sqlalchemy import select -from sqlalchemy import String -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.horizontal_shard import set_shard_id from sqlalchemy.ext.horizontal_shard import ShardedSession +from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship from sqlalchemy.orm import sessionmaker @@ -55,7 +55,8 @@ Session = sessionmaker( # mappings and tables -Base = declarative_base() +class Base(DeclarativeBase): + pass # table setup. we'll store a lead table of continents/cities, and a secondary @@ -69,13 +70,13 @@ Base = declarative_base() class WeatherLocation(Base): __tablename__ = "weather_locations" - id = Column(Integer, primary_key=True) - continent = Column(String(30), nullable=False) - city = Column(String(50), nullable=False) + id: Mapped[int] = mapped_column(primary_key=True) + continent: Mapped[str] + city: Mapped[str] - reports = relationship("Report", backref="location") + reports: Mapped[list[Report]] = relationship(back_populates="location") - def __init__(self, continent, city): + def __init__(self, continent: str, city: str): self.continent = continent self.city = city @@ -83,25 +84,22 @@ class WeatherLocation(Base): class Report(Base): __tablename__ = "weather_reports" - id = Column(Integer, primary_key=True) - location_id = Column( - "location_id", Integer, ForeignKey("weather_locations.id") + id: Mapped[int] = mapped_column(primary_key=True) + location_id: Mapped[int] = mapped_column( + ForeignKey("weather_locations.id") ) - temperature = Column("temperature", Float) - report_time = Column( - "report_time", DateTime, default=datetime.datetime.now + temperature: Mapped[float] + report_time: Mapped[datetime.datetime] = mapped_column( + default=datetime.datetime.now ) - def __init__(self, temperature): - self.temperature = temperature - + location: Mapped[WeatherLocation] = relationship(back_populates="reports") -# create tables -for db in (db1, db2, db3, db4): - Base.metadata.create_all(db) + def __init__(self, temperature: float): + self.temperature = temperature -# step 5. define sharding functions. +# define sharding functions. # we'll use a straight mapping of a particular set of "country" # attributes to shard id. @@ -154,15 +152,11 @@ def execute_chooser(context): given an :class:`.ORMExecuteState` for a statement, return a list of shards we should consult. - As before, we want a "shard_id" execution option to be present. - Otherwise, this would be a lazy load from a parent object where we - will look for the previous token. - """ if context.lazy_loaded_from: return [context.lazy_loaded_from.identity_token] else: - return [context.execution_options["shard_id"]] + return ["north_america", "asia", "europe", "south_america"] # configure shard chooser @@ -172,70 +166,90 @@ Session.configure( execute_chooser=execute_chooser, ) -# save and load objects! -tokyo = WeatherLocation("Asia", "Tokyo") -newyork = WeatherLocation("North America", "New York") -toronto = WeatherLocation("North America", "Toronto") -london = WeatherLocation("Europe", "London") -dublin = WeatherLocation("Europe", "Dublin") -brasilia = WeatherLocation("South America", "Brasila") -quito = WeatherLocation("South America", "Quito") +def setup(): + # create tables + for db in (db1, db2, db3, db4): + Base.metadata.create_all(db) -tokyo.reports.append(Report(80.0)) -newyork.reports.append(Report(75)) -quito.reports.append(Report(85)) -with Session() as sess: +def main(): + setup() - sess.add_all([tokyo, newyork, toronto, london, dublin, brasilia, quito]) + # save and load objects! - sess.commit() + tokyo = WeatherLocation("Asia", "Tokyo") + newyork = WeatherLocation("North America", "New York") + toronto = WeatherLocation("North America", "Toronto") + london = WeatherLocation("Europe", "London") + dublin = WeatherLocation("Europe", "Dublin") + brasilia = WeatherLocation("South America", "Brasila") + quito = WeatherLocation("South America", "Quito") - t = sess.get( - WeatherLocation, - tokyo.id, - # for session.get(), we currently need to use identity_token. - # the horizontal sharding API does not yet pass through the - # execution options - identity_token="asia", - # future version - # execution_options={"shard_id": "asia"} - ) - assert t.city == tokyo.city - assert t.reports[0].temperature == 80.0 - - north_american_cities = sess.execute( - select(WeatherLocation).filter( - WeatherLocation.continent == "North America" - ), - execution_options={"shard_id": "north_america"}, - ).scalars() - - assert {c.city for c in north_american_cities} == {"New York", "Toronto"} - - europe = sess.execute( - select(WeatherLocation).filter(WeatherLocation.continent == "Europe"), - execution_options={"shard_id": "europe"}, - ).scalars() - - assert {c.city for c in europe} == {"London", "Dublin"} - - # the Report class uses a simple integer primary key. So across two - # databases, a primary key will be repeated. The "identity_token" tracks - # in memory that these two identical primary keys are local to different - # databases. - newyork_report = newyork.reports[0] - tokyo_report = tokyo.reports[0] - - assert inspect(newyork_report).identity_key == ( - Report, - (1,), - "north_america", - ) - assert inspect(tokyo_report).identity_key == (Report, (1,), "asia") + tokyo.reports.append(Report(80.0)) + newyork.reports.append(Report(75)) + quito.reports.append(Report(85)) + + with Session() as sess: + + sess.add_all( + [tokyo, newyork, toronto, london, dublin, brasilia, quito] + ) + + sess.commit() + + t = sess.get( + WeatherLocation, + tokyo.id, + identity_token="asia", + ) + assert t.city == tokyo.city + assert t.reports[0].temperature == 80.0 + + # select across shards + asia_and_europe = sess.execute( + select(WeatherLocation).filter( + WeatherLocation.continent.in_(["Europe", "Asia"]) + ) + ).scalars() + + assert {c.city for c in asia_and_europe} == { + "Tokyo", + "London", + "Dublin", + } + + # optionally set a shard id for the query and all related loaders + north_american_cities_w_t = sess.execute( + select(WeatherLocation) + .filter(WeatherLocation.city.startswith("T")) + .options(set_shard_id("north_america")) + ).scalars() + + # Tokyo not included since not in the north_america shard + assert {c.city for c in north_american_cities_w_t} == { + "Toronto", + } + + # the Report class uses a simple integer primary key. So across two + # databases, a primary key will be repeated. The "identity_token" + # tracks in memory that these two identical primary keys are local to + # different shards. + newyork_report = newyork.reports[0] + tokyo_report = tokyo.reports[0] + + assert inspect(newyork_report).identity_key == ( + Report, + (1,), + "north_america", + ) + assert inspect(tokyo_report).identity_key == (Report, (1,), "asia") + + # the token representing the originating shard is also available + # directly + assert inspect(newyork_report).identity_token == "north_america" + assert inspect(tokyo_report).identity_token == "asia" - # the token representing the originating shard is also available directly - assert inspect(newyork_report).identity_token == "north_america" - assert inspect(tokyo_report).identity_token == "asia" +if __name__ == "__main__": + main() diff --git a/examples/sharding/separate_tables.py b/examples/sharding/separate_tables.py index 97c6a07f6..98db3771f 100644 --- a/examples/sharding/separate_tables.py +++ b/examples/sharding/separate_tables.py @@ -1,27 +1,27 @@ """Illustrates sharding using a single SQLite database, that will however have multiple tables using a naming convention.""" +from __future__ import annotations import datetime from sqlalchemy import Column from sqlalchemy import create_engine -from sqlalchemy import DateTime from sqlalchemy import event -from sqlalchemy import Float from sqlalchemy import ForeignKey from sqlalchemy import inspect from sqlalchemy import Integer from sqlalchemy import select -from sqlalchemy import String from sqlalchemy import Table -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.ext.horizontal_shard import set_shard_id from sqlalchemy.ext.horizontal_shard import ShardedSession +from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship from sqlalchemy.orm import sessionmaker from sqlalchemy.sql import operators from sqlalchemy.sql import visitors - echo = True engine = create_engine("sqlite://", echo=echo) @@ -55,7 +55,9 @@ Session = sessionmaker( # mappings and tables -Base = declarative_base() +class Base(DeclarativeBase): + pass + # we need a way to create identifiers which are unique across all databases. # one easy way would be to just use a composite primary key, where one value @@ -86,13 +88,13 @@ def id_generator(ctx): class WeatherLocation(Base): __tablename__ = "_prefix__weather_locations" - id = Column(Integer, primary_key=True, default=id_generator) - continent = Column(String(30), nullable=False) - city = Column(String(50), nullable=False) + id: Mapped[int] = mapped_column(primary_key=True, default=id_generator) + continent: Mapped[str] + city: Mapped[str] - reports = relationship("Report", backref="location") + reports: Mapped[list[Report]] = relationship(back_populates="location") - def __init__(self, continent, city): + def __init__(self, continent: str, city: str): self.continent = continent self.city = city @@ -100,29 +102,22 @@ class WeatherLocation(Base): class Report(Base): __tablename__ = "_prefix__weather_reports" - id = Column(Integer, primary_key=True) - location_id = Column( - "location_id", Integer, ForeignKey("_prefix__weather_locations.id") + id: Mapped[int] = mapped_column(primary_key=True) + location_id: Mapped[int] = mapped_column( + ForeignKey("_prefix__weather_locations.id") ) - temperature = Column("temperature", Float) - report_time = Column( - "report_time", DateTime, default=datetime.datetime.now + temperature: Mapped[float] + report_time: Mapped[datetime.datetime] = mapped_column( + default=datetime.datetime.now ) - def __init__(self, temperature): - self.temperature = temperature - + location: Mapped[WeatherLocation] = relationship(back_populates="reports") -# create tables -for db in (db1, db2, db3, db4): - Base.metadata.create_all(db) - -# establish initial "id" in db1 -with db1.begin() as conn: - conn.execute(ids.insert(), {"nextid": 1}) + def __init__(self, temperature: float): + self.temperature = temperature -# step 5. define sharding functions. +# define sharding functions. # we'll use a straight mapping of a particular set of "country" # attributes to shard id. @@ -255,61 +250,89 @@ Session.configure( execute_chooser=execute_chooser, ) -# save and load objects! -tokyo = WeatherLocation("Asia", "Tokyo") -newyork = WeatherLocation("North America", "New York") -toronto = WeatherLocation("North America", "Toronto") -london = WeatherLocation("Europe", "London") -dublin = WeatherLocation("Europe", "Dublin") -brasilia = WeatherLocation("South America", "Brasila") -quito = WeatherLocation("South America", "Quito") +def setup(): + # create tables + for db in (db1, db2, db3, db4): + Base.metadata.create_all(db) -tokyo.reports.append(Report(80.0)) -newyork.reports.append(Report(75)) -quito.reports.append(Report(85)) + # establish initial "id" in db1 + with db1.begin() as conn: + conn.execute(ids.insert(), {"nextid": 1}) -with Session() as sess: - sess.add_all([tokyo, newyork, toronto, london, dublin, brasilia, quito]) +def main(): + setup() - sess.commit() + # save and load objects! - t = sess.get(WeatherLocation, tokyo.id) - assert t.city == tokyo.city - assert t.reports[0].temperature == 80.0 + tokyo = WeatherLocation("Asia", "Tokyo") + newyork = WeatherLocation("North America", "New York") + toronto = WeatherLocation("North America", "Toronto") + london = WeatherLocation("Europe", "London") + dublin = WeatherLocation("Europe", "Dublin") + brasilia = WeatherLocation("South America", "Brasila") + quito = WeatherLocation("South America", "Quito") - north_american_cities = sess.execute( - select(WeatherLocation).filter( - WeatherLocation.continent == "North America" - ) - ).scalars() + tokyo.reports.append(Report(80.0)) + newyork.reports.append(Report(75)) + quito.reports.append(Report(85)) - assert {c.city for c in north_american_cities} == {"New York", "Toronto"} + with Session() as sess: - asia_and_europe = sess.execute( - select(WeatherLocation).filter( - WeatherLocation.continent.in_(["Europe", "Asia"]) + sess.add_all( + [tokyo, newyork, toronto, london, dublin, brasilia, quito] ) - ).scalars() - assert {c.city for c in asia_and_europe} == {"Tokyo", "London", "Dublin"} + sess.commit() - # the Report class uses a simple integer primary key. So across two - # databases, a primary key will be repeated. The "identity_token" tracks - # in memory that these two identical primary keys are local to different - # databases. - newyork_report = newyork.reports[0] - tokyo_report = tokyo.reports[0] + t = sess.get(WeatherLocation, tokyo.id) + assert t.city == tokyo.city + assert t.reports[0].temperature == 80.0 - assert inspect(newyork_report).identity_key == ( - Report, - (1,), - "north_america", - ) - assert inspect(tokyo_report).identity_key == (Report, (1,), "asia") + # optionally set a shard id for the query and all related loaders + north_american_cities_w_t = sess.execute( + select(WeatherLocation) + .filter(WeatherLocation.city.startswith("T")) + .options(set_shard_id("north_america")) + ).scalars() + + # Tokyo not included since not in the north_america shard + assert {c.city for c in north_american_cities_w_t} == { + "Toronto", + } + + asia_and_europe = sess.execute( + select(WeatherLocation).filter( + WeatherLocation.continent.in_(["Europe", "Asia"]) + ) + ).scalars() + + assert {c.city for c in asia_and_europe} == { + "Tokyo", + "London", + "Dublin", + } + + # the Report class uses a simple integer primary key. So across two + # databases, a primary key will be repeated. The "identity_token" + # tracks in memory that these two identical primary keys are local to + # different shards. + newyork_report = newyork.reports[0] + tokyo_report = tokyo.reports[0] + + assert inspect(newyork_report).identity_key == ( + Report, + (1,), + "north_america", + ) + assert inspect(tokyo_report).identity_key == (Report, (1,), "asia") + + # the token representing the originating shard is also available + # directly + assert inspect(newyork_report).identity_token == "north_america" + assert inspect(tokyo_report).identity_token == "asia" - # the token representing the originating shard is also available directly - assert inspect(newyork_report).identity_token == "north_america" - assert inspect(tokyo_report).identity_token == "asia" +if __name__ == "__main__": + main() |
