Using Python 3.5 and SQLAlchemy 1.0.14 (ORM).
I have a table of items declared as such:
from sqlalchemy.ext.declarative.api import declarative_base
Base = declarative_base()
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
type = Column(String)
# other non relevant attributes
My Items can be of many different types, the type identifier being stored in type
.
For a few of those objects types, I need to have specific methods or attributes available.
To achieve that I tried to use single table inheritance with several SpecialisedItem as subclass of Item:
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
type = Column(String, index=True)
# other non relevant attributes
__mapper_args__ = {
'polymorphic_on': type,
}
class SpecialisedItem(Base):
__mapper_args__ = {
'polymorphic_identity': 'specialitem',
}
def specialised_method(self):
return "I am special"
Now when I load my items, I'd want all specialised items (having type=='specialitem'
) to be loaded as such, while any other type value would result in the parent class Item
being loaded.
That doesn't work, I get AssertionError: No such polymorphic_identity 'normal' is defined
when loading the items.
I would like to avoid creating inherited classes that do nothing just to cover all possible type
values, instead having "unmapped" type
falling back to the parent class Item
.
Is there any way to achieve that effect ?
Minimal test case for reference:
from sqlalchemy.engine import create_engine
from sqlalchemy.ext.declarative.api import declarative_base
from sqlalchemy.orm.session import sessionmaker
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.sqltypes import Integer, String
Base = declarative_base()
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
type = Column(String, index=True)
# other non relevant attributes
__mapper_args__ = {
'polymorphic_on': type,
}
class SpecialisedItem(Item):
__mapper_args__ = {
'polymorphic_identity': 'special',
}
specialAttribute = Column(String)
def specialised_method(self):
return "I am special"
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
session.add(Item(type='normal'))
session.add(Item(type='special'))
session.commit()
# loading only specialized items works
for item in session.query(Item).filter_by(type="special"):
print(item.specialised_method())
# loading other items fails
for item in session.query(Item):
print(item.type)
Thanks,
Guillaume
A mapping of “polymorphic identity” identifiers to Mapper instances is stored in the polymorphic_map dict. You can create custom polymorphic_map
that will return parent class mapper for undefined polymorphic identities.
from sqlalchemy.engine import create_engine
from sqlalchemy.ext.declarative.api import declarative_base
from sqlalchemy.orm.session import sessionmaker
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.sqltypes import Integer, String
from sqlalchemy import event
Base = declarative_base()
class Item(Base):
__tablename__ = 'items'
id = Column(Integer, primary_key=True)
type = Column(String, index=True)
# other non relevant attributes
__mapper_args__ = {
'polymorphic_on': type,
}
class SpecialisedItem(Item):
__mapper_args__ = {
'polymorphic_identity': 'special',
}
specialAttribute = Column(String)
def specialised_method(self):
return "I am special"
#http://docs.sqlalchemy.org/en/rel_1_1/orm/events.html#sqlalchemy.orm.events.MapperEvents.mapper_configured
@event.listens_for(Item, 'mapper_configured')
def receive_mapper_configured(mapper, class_):
mapper.polymorphic_map = defaultdict(lambda: mapper, mapper.polymorphic_map)
# to prevent 'incompatible polymorphic identity' warning, not mandatory
mapper._validate_polymorphic_identity = None
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
session.add(Item(type='normal'))
session.add(Item(type='special'))
session.commit()
# loading only specialized items works
for item in session.query(Item).filter_by(type="special"):
print(item.specialised_method())
# loading other items fails
for item in session.query(Item):
print(item.type)
A reusable decorator solution, based on @r-m-n answer. Custom class is also replaced with collections.defaultdict
that actually does same thing.
def receive_mapper_configured(mapper, class_):
mapper.polymorphic_map = defaultdict(lambda: mapper, mapper.polymorphic_map)
# to prevent 'incompatible polymorphic identity' warning, not necessary
mapper._validate_polymorphic_identity = None
def polymorphic_fallback(mapper_klass):
event.listens_for(mapper_klass, 'mapper_configured')(receive_mapper_configured)
return mapper_klass
Then in your code you can just add this decorator to base classes:
@polymorphic_fallback
class Item:
...
class SpecificItem(Item):
...
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With