diff --git a/migrations/versions/storage/9662e80c6ac2_initial_migration.py b/migrations/versions/storage/9662e80c6ac2_initial_migration.py index 04a89a6..e63a8cf 100644 --- a/migrations/versions/storage/9662e80c6ac2_initial_migration.py +++ b/migrations/versions/storage/9662e80c6ac2_initial_migration.py @@ -1,58 +1,59 @@ """initial_migration Revision ID: 9662e80c6ac2 Revises: Create Date: 2025-03-11 16:45:10.460029 """ -from alembic import op import sqlalchemy as sa import sqlmodel +from alembic import op # revision identifiers, used by Alembic. revision = '9662e80c6ac2' down_revision = None branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.create_table('watchers', sa.Column('id', sa.Integer(), nullable=False), sa.Column('presentity_uri', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False), sa.Column('watcher_username', sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False), sa.Column('watcher_domain', sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False), sa.Column('event', sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False), sa.Column('status', sa.Integer(), nullable=False), sa.Column('reason', sqlmodel.sql.sqltypes.AutoString(length=64), nullable=True), sa.Column('inserted_time', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('presentity_uri', 'watcher_username', 'watcher_domain', 'event', name='watcher_idx') ) op.create_table('xcap', sa.Column('id', sa.Integer(), nullable=False), sa.Column('subscriber_id', sa.Integer(), nullable=True), sa.Column('username', sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False), sa.Column('domain', sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False), sa.Column('doc', sa.LargeBinary(), nullable=False), sa.Column('doc_type', sa.Integer(), nullable=False), sa.Column('etag', sqlmodel.sql.sqltypes.AutoString(length=64), nullable=False), sa.Column('source', sa.Integer(), nullable=False), sa.Column('doc_uri', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False), sa.Column('port', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['subscriber_id'], ['subscriber.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('username', 'domain', 'doc_type', 'doc_uri', name='account_doc_type_idx') ) op.create_index('source_idx', 'xcap', ['source'], unique=False) op.create_index('xcap_subscriber_id_exists', 'xcap', ['subscriber_id'], unique=False) # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.drop_index('xcap_subscriber_id_exists', table_name='xcap') op.drop_index('source_idx', table_name='xcap') op.drop_table('xcap') op.drop_table('watchers') # ### end Alembic commands ### diff --git a/xcap/db/initialize.py b/xcap/db/initialize.py index c2953ad..c21ce82 100644 --- a/xcap/db/initialize.py +++ b/xcap/db/initialize.py @@ -1,91 +1,91 @@ import re from alembic import command from alembic.config import Config from alembic.util.exc import CommandError from application import log from sqlalchemy import create_engine from xcap.configuration import DatabaseConfig as XCAPDatabaseConfig from xcap.configuration import ServerConfig class DatabaseConfig(XCAPDatabaseConfig): authentication_db_uri = '' storage_db_uri = '' same_db = False alembic_cfg = Config("alembic.ini", 'storage_db') # Alembic configuration alembic_auth_cfg = Config("alembic.ini", 'auth_db') def retry_on_error(engine, cfg, e): error_message = str(e) log.info(f"Alembic CommandError: {error_message}") locations = cfg.get_main_option("version_locations") set_all_version_locations(cfg) match = re.search(r"Can't locate revision identified by '([\da-f]+)'", error_message) if match: missing_revision = match.group(1) log.info(f"Detected missing revision: {missing_revision}") try: command.downgrade(cfg, f'{missing_revision}@base') # Apply Alembic migrations log.info("Downgrade success") except CommandError as retry_error: log.warning(f"Downgrade failed: {retry_error}") return cfg.set_main_option("version_locations", locations) try: command.upgrade(cfg, "head") # Apply Alembic migrations except CommandError as e: log.waring(f"Migration failed: {e}") return def set_all_version_locations(cfg): storage_locations = alembic_cfg.get_main_option("version_locations") auth_locations = alembic_auth_cfg.get_main_option("version_locations") cfg.set_main_option("version_locations", f"{storage_locations}, {auth_locations}") def init_db(): same_db = False - if ServerConfig.backend not in ['Database', 'OpenSIPS'] and not DatabaseConfig.storage_db_uri or not DatabaseConfig.authentication_db_uri: + if ServerConfig.backend not in ['Database', 'OpenSIPS'] or not DatabaseConfig.storage_db_uri or not DatabaseConfig.authentication_db_uri: return if DatabaseConfig.authentication_db_uri == DatabaseConfig.storage_db_uri: same_db = True set_all_version_locations(alembic_auth_cfg) if DatabaseConfig.authentication_db_uri.startswith('sqlite'): auth_engine = create_engine(DatabaseConfig.authentication_db_uri, connect_args={"check_same_thread": False}) alembic_auth_cfg.set_main_option("sqlalchemy.url", DatabaseConfig.authentication_db_uri) # Path to migrations with auth_engine.connect(): if same_db: command.upgrade(alembic_auth_cfg, "heads") # Apply Alembic migrations log.info("Database initialized and migrations applied.") else: try: command.upgrade(alembic_auth_cfg, "head") # Apply Alembic migrations except CommandError as e: retry_on_error(auth_engine, alembic_auth_cfg, e) log.info("Authentication database initialized and migrations applied.") if not same_db and DatabaseConfig.storage_db_uri.startswith('sqlite'): engine = create_engine(DatabaseConfig.storage_db_uri, connect_args={"check_same_thread": False}) alembic_cfg.set_main_option("sqlalchemy.url", DatabaseConfig.storage_db_uri) # Path to migrations with engine.connect(): try: command.upgrade(alembic_cfg, "head") # Apply Alembic migrations except CommandError as e: retry_on_error(engine, alembic_cfg, e) log.info("Storage database initialized and migrations applied.") # Main function to initialize and create tables if __name__ == "__main__": init_db() diff --git a/xcap/db/models/sipthor_db.py b/xcap/db/models/sipthor_db.py index 9b586ea..9a772b5 100644 --- a/xcap/db/models/sipthor_db.py +++ b/xcap/db/models/sipthor_db.py @@ -1,58 +1,60 @@ from typing import List, Optional from pydantic import BaseModel from sqlalchemy import JSON, Column from sqlalchemy.orm.attributes import flag_modified from sqlmodel import Field, Relationship, SQLModel class DataObject(BaseModel): class Config: # Allow extra fields in the data object and treat them as attributes extra = "allow" class SipAccountData(SQLModel, table=True): __tablename__ = 'sip_accounts_data' + __database__ = 'sipthor_db' id: int = Field(default=None, primary_key=True) account_id: int = Field(default=None, foreign_key="sip_accounts_meta.id") profile: Optional[dict] = Field(default=None, sa_column=Column(JSON)) account: "SipAccount" = Relationship(back_populates="data", sa_relationship_kwargs={"lazy": "joined"}, ) class SipAccount(SQLModel, table=True): __tablename__ = 'sip_accounts_meta' + __database__ = 'sipthor_db' id: int = Field(default=None, primary_key=True) username: str = Field(max_length=64) domain: str = Field(max_length=64) first_name: Optional[str] = Field(default=None, max_length=64) last_name: Optional[str] = Field(default=None, max_length=64) email: Optional[str] = Field(default=None, max_length=64) customer_id: int = Field(default=0) reseller_id: int = Field(default=0) owner_id: int = Field(default=0) change_date: Optional[str] = Field(default=None) # Relationships data: List[SipAccountData] = Relationship(back_populates="account", sa_relationship_kwargs={"lazy": "joined"}, # cascade='all, delete-orphan' ) def set_profile(self, value: dict): if not self.data: SipAccountData(account=self, profile=value) else: flag_modified(self.data[0], "profile") self.data[0].profile = value @property def profile(self) -> Optional[dict]: return self.data[0].profile if self.data else None @profile.setter def profile(self, value: dict): self.set_profile(value) diff --git a/xcap/db/models/storage_db.py b/xcap/db/models/storage_db.py index e6f5ab7..967cb6f 100644 --- a/xcap/db/models/storage_db.py +++ b/xcap/db/models/storage_db.py @@ -1,49 +1,49 @@ from datetime import datetime from typing import Optional -from sqlmodel import (Field, ForeignKey, Index, Relationship, SQLModel, - UniqueConstraint) +from sqlmodel import (Column, Field, ForeignKey, Index, Integer, Relationship, + SQLModel, UniqueConstraint) from xcap.configuration import DatabaseConfig class XCAP(SQLModel, table=True): __tablename__ = DatabaseConfig.xcap_table __database__ = 'storage_db' id: Optional[int] = Field(default=None, primary_key=True) - subscriber_id: Optional[int] = Field(default=None, sa_column=ForeignKey("subscriber.id", ondelete="CASCADE")) + subscriber_id: Optional[int] = Field(default=None, sa_column=Column(Integer, ForeignKey("subscriber.id", ondelete="CASCADE"))) username: str = Field(max_length=64) domain: str = Field(max_length=64) doc: bytes # Representing longblob as bytes doc_type: int etag: str = Field(max_length=64) source: int = Field(default=0) doc_uri: str = Field(max_length=255) port: int = Field(default=0) __table_args__ = ( UniqueConstraint("username", "domain", "doc_type", "doc_uri", name="account_doc_type_idx"), Index("xcap_subscriber_id_exists", "subscriber_id"), Index("source_idx", "source"), ) # subscriber: Optional["Subscriber"] = Relationship(back_populates="none", cascade="all, delete-orphan") class Watcher(SQLModel, table=True): __tablename__ = 'watchers' __database__ = 'storage_db' id: int = Field(default=None, primary_key=True) presentity_uri: str = Field(max_length=255) watcher_username: str = Field(max_length=64) watcher_domain: str = Field(max_length=64) event: str = Field(default="presence", max_length=64) status: int reason: Optional[str] = Field(default=None, max_length=64) inserted_time: int # Unique constraint for multiple columns __table_args__ = ( UniqueConstraint('presentity_uri', 'watcher_username', 'watcher_domain', 'event', name='watcher_idx'), )