aiida.storage.psql_dos package#
Module with implementation of the storage backend using PostGreSQL and the disk-objectstore.
Subpackages#
- aiida.storage.psql_dos.migrations package
- aiida.storage.psql_dos.models package
- Submodules
DbAuthInfo
DbAuthInfo.__init__()
DbAuthInfo.__mapper__
DbAuthInfo.__module__
DbAuthInfo.__str__()
DbAuthInfo.__table__
DbAuthInfo.__table_args__
DbAuthInfo.__tablename__
DbAuthInfo._metadata
DbAuthInfo._sa_class_manager
DbAuthInfo.aiidauser
DbAuthInfo.aiidauser_id
DbAuthInfo.auth_params
DbAuthInfo.dbcomputer
DbAuthInfo.dbcomputer_id
DbAuthInfo.enabled
DbAuthInfo.id
Model
get_orm_metadata()
instant_defaults_listener()
DbComment
DbComment.__init__()
DbComment.__mapper__
DbComment.__module__
DbComment.__str__()
DbComment.__table__
DbComment.__tablename__
DbComment._sa_class_manager
DbComment.content
DbComment.ctime
DbComment.dbnode
DbComment.dbnode_id
DbComment.id
DbComment.mtime
DbComment.user
DbComment.user_id
DbComment.uuid
DbComputer
DbComputer.__init__()
DbComputer.__mapper__
DbComputer.__module__
DbComputer.__str__()
DbComputer.__table__
DbComputer.__table_args__
DbComputer.__tablename__
DbComputer._metadata
DbComputer._sa_class_manager
DbComputer.description
DbComputer.hostname
DbComputer.id
DbComputer.label
DbComputer.pk
DbComputer.scheduler_type
DbComputer.transport_type
DbComputer.uuid
DbGroup
DbGroup.__init__()
DbGroup.__mapper__
DbGroup.__module__
DbGroup.__str__()
DbGroup.__table__
DbGroup.__table_args__
DbGroup.__tablename__
DbGroup._sa_class_manager
DbGroup.dbnodes
DbGroup.description
DbGroup.extras
DbGroup.id
DbGroup.label
DbGroup.pk
DbGroup.time
DbGroup.type_string
DbGroup.user
DbGroup.user_id
DbGroup.uuid
DbGroupNode
DbLog
DbLink
DbNode
DbNode.__init__()
DbNode.__mapper__
DbNode.__module__
DbNode.__str__()
DbNode.__table__
DbNode.__table_args__
DbNode.__tablename__
DbNode._sa_class_manager
DbNode.attributes
DbNode.ctime
DbNode.dbcomputer
DbNode.dbcomputer_id
DbNode.description
DbNode.extras
DbNode.get_simple_name()
DbNode.id
DbNode.inputs
DbNode.label
DbNode.mtime
DbNode.node_type
DbNode.outputs
DbNode.outputs_q
DbNode.pk
DbNode.process_type
DbNode.repository_metadata
DbNode.user
DbNode.user_id
DbNode.uuid
DbSetting
DbUser
- Submodules
- aiida.storage.psql_dos.orm package
- Subpackages
- Submodules
SqlaAuthInfo
SqlaAuthInfo.COMPUTER_CLASS
SqlaAuthInfo.MODEL_CLASS
SqlaAuthInfo.USER_CLASS
SqlaAuthInfo.__abstractmethods__
SqlaAuthInfo.__init__()
SqlaAuthInfo.__module__
SqlaAuthInfo.__orig_bases__
SqlaAuthInfo.__parameters__
SqlaAuthInfo._abc_impl
SqlaAuthInfo._model
SqlaAuthInfo.computer
SqlaAuthInfo.enabled
SqlaAuthInfo.get_auth_params()
SqlaAuthInfo.get_metadata()
SqlaAuthInfo.id
SqlaAuthInfo.is_stored
SqlaAuthInfo.set_auth_params()
SqlaAuthInfo.set_metadata()
SqlaAuthInfo.user
SqlaAuthInfoCollection
SqlaComment
SqlaComment.MODEL_CLASS
SqlaComment.USER_CLASS
SqlaComment.__abstractmethods__
SqlaComment.__init__()
SqlaComment.__module__
SqlaComment.__orig_bases__
SqlaComment.__parameters__
SqlaComment._abc_impl
SqlaComment._model
SqlaComment.content
SqlaComment.ctime
SqlaComment.mtime
SqlaComment.node
SqlaComment.set_content()
SqlaComment.set_mtime()
SqlaComment.set_user()
SqlaComment.store()
SqlaComment.user
SqlaComment.uuid
SqlaCommentCollection
SqlaComputer
SqlaComputer.MODEL_CLASS
SqlaComputer.__abstractmethods__
SqlaComputer.__init__()
SqlaComputer.__module__
SqlaComputer.__orig_bases__
SqlaComputer.__parameters__
SqlaComputer._abc_impl
SqlaComputer._model
SqlaComputer.copy()
SqlaComputer.description
SqlaComputer.get_metadata()
SqlaComputer.get_scheduler_type()
SqlaComputer.get_transport_type()
SqlaComputer.hostname
SqlaComputer.id
SqlaComputer.is_stored
SqlaComputer.label
SqlaComputer.pk
SqlaComputer.set_description()
SqlaComputer.set_hostname()
SqlaComputer.set_label()
SqlaComputer.set_metadata()
SqlaComputer.set_scheduler_type()
SqlaComputer.set_transport_type()
SqlaComputer.store()
SqlaComputer.uuid
SqlaComputerCollection
_()
get_backend_entity()
SqlaModelEntity
SqlaModelEntity.MODEL_CLASS
SqlaModelEntity.__annotations__
SqlaModelEntity.__dict__
SqlaModelEntity.__init__()
SqlaModelEntity.__module__
SqlaModelEntity.__orig_bases__
SqlaModelEntity.__parameters__
SqlaModelEntity.__weakref__
SqlaModelEntity._class_check()
SqlaModelEntity._flush_if_stored()
SqlaModelEntity._model
SqlaModelEntity.bare_model
SqlaModelEntity.from_dbmodel()
SqlaModelEntity.id
SqlaModelEntity.is_stored
SqlaModelEntity.model
SqlaModelEntity.store()
ExtrasMixin
ExtrasMixin.__annotations__
ExtrasMixin.__dict__
ExtrasMixin.__module__
ExtrasMixin.__weakref__
ExtrasMixin.bare_model
ExtrasMixin.clear_extras()
ExtrasMixin.delete_extra()
ExtrasMixin.delete_extra_many()
ExtrasMixin.extras
ExtrasMixin.extras_items()
ExtrasMixin.extras_keys()
ExtrasMixin.get_extra()
ExtrasMixin.is_stored
ExtrasMixin.model
ExtrasMixin.reset_extras()
ExtrasMixin.set_extra()
ExtrasMixin.set_extra_many()
SqlaGroup
SqlaGroup.GROUP_NODE_CLASS
SqlaGroup.MODEL_CLASS
SqlaGroup.NODE_CLASS
SqlaGroup.USER_CLASS
SqlaGroup.__abstractmethods__
SqlaGroup.__init__()
SqlaGroup.__module__
SqlaGroup.__orig_bases__
SqlaGroup.__parameters__
SqlaGroup._abc_impl
SqlaGroup._model
SqlaGroup.add_nodes()
SqlaGroup.clear()
SqlaGroup.count()
SqlaGroup.description
SqlaGroup.is_stored
SqlaGroup.label
SqlaGroup.nodes
SqlaGroup.pk
SqlaGroup.remove_nodes()
SqlaGroup.store()
SqlaGroup.type_string
SqlaGroup.user
SqlaGroup.uuid
SqlaGroupCollection
SqlaLog
SqlaLogCollection
SqlaNode
SqlaNode.COMPUTER_CLASS
SqlaNode.LINK_CLASS
SqlaNode.MODEL_CLASS
SqlaNode.USER_CLASS
SqlaNode.__abstractmethods__
SqlaNode.__init__()
SqlaNode.__module__
SqlaNode.__orig_bases__
SqlaNode.__parameters__
SqlaNode._abc_impl
SqlaNode._add_link()
SqlaNode._model
SqlaNode.add_incoming()
SqlaNode.attributes
SqlaNode.attributes_items()
SqlaNode.attributes_keys()
SqlaNode.clean_values()
SqlaNode.clear_attributes()
SqlaNode.clone()
SqlaNode.computer
SqlaNode.ctime
SqlaNode.delete_attribute()
SqlaNode.delete_attribute_many()
SqlaNode.description
SqlaNode.get_attribute()
SqlaNode.label
SqlaNode.mtime
SqlaNode.node_type
SqlaNode.process_type
SqlaNode.repository_metadata
SqlaNode.reset_attributes()
SqlaNode.set_attribute()
SqlaNode.set_attribute_many()
SqlaNode.store()
SqlaNode.user
SqlaNode.uuid
SqlaNodeCollection
SqlaUser
SqlaUserCollection
ModelWrapper
ModelWrapper.__dict__
ModelWrapper.__getattr__()
ModelWrapper.__init__()
ModelWrapper.__module__
ModelWrapper.__setattr__()
ModelWrapper.__weakref__
ModelWrapper._ensure_model_uptodate()
ModelWrapper._flush()
ModelWrapper._in_transaction()
ModelWrapper._is_model_field()
ModelWrapper._is_mutable_model_field()
ModelWrapper.is_saved()
ModelWrapper.save()
ModelWrapper.session
disable_expire_on_commit()
Submodules#
Simple wrapper around the alembic command line tool that first loads an AiiDA profile.
- class aiida.storage.psql_dos.alembic_cli.AlembicRunner[source]#
Bases:
object
Wrapper around the alembic command line tool that first loads an AiiDA profile.
- __dict__ = mappingproxy({'__module__': 'aiida.storage.psql_dos.alembic_cli', '__doc__': 'Wrapper around the alembic command line tool that first loads an AiiDA profile.', '__init__': <function AlembicRunner.__init__>, 'execute_alembic_command': <function AlembicRunner.execute_alembic_command>, '__dict__': <attribute '__dict__' of 'AlembicRunner' objects>, '__weakref__': <attribute '__weakref__' of 'AlembicRunner' objects>, '__annotations__': {'profile': 'Profile | None'}})#
- __module__ = 'aiida.storage.psql_dos.alembic_cli'#
- __weakref__#
list of weak references to the object
SqlAlchemy implementation of aiida.orm.implementation.backends.Backend.
- class aiida.storage.psql_dos.backend.PsqlDosBackend(profile: Profile)[source]#
Bases:
StorageBackend
An AiiDA storage backend that stores data in a PostgreSQL database and disk-objectstore repository.
Note, there were originally two such backends, sqlalchemy and django. The django backend was removed, to consolidate access to this storage.
- class Model(**data: Any)[source]#
Bases:
BaseModel
Model describing required information to configure an instance of the storage.
- __abstractmethods__ = frozenset({})#
- __annotations__ = {'__class_vars__': 'ClassVar[set[str]]', '__private_attributes__': 'ClassVar[Dict[str, ModelPrivateAttr]]', '__pydantic_complete__': 'ClassVar[bool]', '__pydantic_core_schema__': 'ClassVar[CoreSchema]', '__pydantic_custom_init__': 'ClassVar[bool]', '__pydantic_decorators__': 'ClassVar[_decorators.DecoratorInfos]', '__pydantic_extra__': 'dict[str, Any] | None', '__pydantic_fields_set__': 'set[str]', '__pydantic_generic_metadata__': 'ClassVar[_generics.PydanticGenericMetadata]', '__pydantic_parent_namespace__': 'ClassVar[Dict[str, Any] | None]', '__pydantic_post_init__': "ClassVar[None | Literal['model_post_init']]", '__pydantic_private__': 'dict[str, Any] | None', '__pydantic_root_model__': 'ClassVar[bool]', '__pydantic_serializer__': 'ClassVar[SchemaSerializer]', '__pydantic_validator__': 'ClassVar[SchemaValidator | PluggableSchemaValidator]', '__signature__': 'ClassVar[Signature]', 'database_engine': <class 'str'>, 'database_hostname': <class 'str'>, 'database_name': <class 'str'>, 'database_password': <class 'str'>, 'database_port': <class 'int'>, 'database_username': <class 'str'>, 'model_computed_fields': 'ClassVar[Dict[str, ComputedFieldInfo]]', 'model_config': 'ClassVar[ConfigDict]', 'model_fields': 'ClassVar[Dict[str, FieldInfo]]', 'repository_uri': <class 'str'>}#
- __dict__#
- __module__ = 'aiida.storage.psql_dos.backend'#
- __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] = {}#
Metadata about the private attributes of the model.
- __pydantic_complete__: ClassVar[bool] = True#
Whether model building is completed, or if there are still undefined fields.
- __pydantic_core_schema__: ClassVar[CoreSchema] = {'cls': <class 'aiida.storage.psql_dos.backend.PsqlDosBackend.Model'>, 'config': {'title': 'Model'}, 'custom_init': False, 'metadata': {'pydantic_js_annotation_functions': [], 'pydantic_js_functions': [functools.partial(<function modify_model_json_schema>, cls=<class 'aiida.storage.psql_dos.backend.PsqlDosBackend.Model'>, title=None), <bound method BaseModel.__get_pydantic_json_schema__ of <class 'aiida.storage.psql_dos.backend.PsqlDosBackend.Model'>>]}, 'ref': 'aiida.storage.psql_dos.backend.PsqlDosBackend.Model:93947034815552', 'root_model': False, 'schema': {'computed_fields': [], 'fields': {'database_engine': {'metadata': {'pydantic_js_annotation_functions': [<function get_json_schema_update_func.<locals>.json_schema_update_func>], 'pydantic_js_functions': []}, 'schema': {'default': 'postgresql_psycopg', 'schema': {'type': 'str'}, 'type': 'default'}, 'type': 'model-field'}, 'database_hostname': {'metadata': {'pydantic_js_annotation_functions': [<function get_json_schema_update_func.<locals>.json_schema_update_func>], 'pydantic_js_functions': []}, 'schema': {'default': 'localhost', 'schema': {'type': 'str'}, 'type': 'default'}, 'type': 'model-field'}, 'database_name': {'metadata': {'pydantic_js_annotation_functions': [<function get_json_schema_update_func.<locals>.json_schema_update_func>], 'pydantic_js_functions': []}, 'schema': {'type': 'str'}, 'type': 'model-field'}, 'database_password': {'metadata': {'pydantic_js_annotation_functions': [<function get_json_schema_update_func.<locals>.json_schema_update_func>], 'pydantic_js_functions': []}, 'schema': {'type': 'str'}, 'type': 'model-field'}, 'database_port': {'metadata': {'pydantic_js_annotation_functions': [<function get_json_schema_update_func.<locals>.json_schema_update_func>], 'pydantic_js_functions': []}, 'schema': {'default': 5432, 'schema': {'type': 'int'}, 'type': 'default'}, 'type': 'model-field'}, 'database_username': {'metadata': {'pydantic_js_annotation_functions': [<function get_json_schema_update_func.<locals>.json_schema_update_func>], 'pydantic_js_functions': []}, 'schema': {'type': 'str'}, 'type': 'model-field'}, 'repository_uri': {'metadata': {'pydantic_js_annotation_functions': [<function get_json_schema_update_func.<locals>.json_schema_update_func>], 'pydantic_js_functions': []}, 'schema': {'type': 'str'}, 'type': 'model-field'}}, 'model_name': 'Model', 'type': 'model-fields'}, 'type': 'model'}#
The core schema of the model.
- __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos] = DecoratorInfos(validators={}, field_validators={}, root_validators={}, field_serializers={}, model_serializers={}, model_validators={}, computed_fields={})#
Metadata containing the decorators defined on the model. This replaces Model.__validators__ and Model.__root_validators__ from Pydantic V1.
- __pydantic_extra__: dict[str, Any] | None#
A dictionary containing extra values, if [extra][pydantic.config.ConfigDict.extra] is set to ‘allow’.
- __pydantic_generic_metadata__: ClassVar[_generics.PydanticGenericMetadata] = {'args': (), 'origin': None, 'parameters': ()}#
Metadata for generic models; contains data used for a similar purpose to __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
- __pydantic_parent_namespace__: ClassVar[Dict[str, Any] | None] = {'__doc__': 'An AiiDA storage backend that stores data in a PostgreSQL database and disk-objectstore repository.\n\n Note, there were originally two such backends, `sqlalchemy` and `django`.\n The `django` backend was removed, to consolidate access to this storage.\n ', '__module__': 'aiida.storage.psql_dos.backend', '__qualname__': 'PsqlDosBackend', 'subject': <pydantic._internal._mock_val_ser.MockValSer object>}#
Parent namespace of the model, used for automatic rebuilding of models.
- __pydantic_post_init__: ClassVar[None | Literal['model_post_init']] = None#
The name of the post-init method for the model, if defined.
- __pydantic_private__: dict[str, Any] | None#
Values of private attributes set on the model instance.
- __pydantic_serializer__: ClassVar[SchemaSerializer] = SchemaSerializer(serializer=Model( ModelSerializer { class: Py( 0x00005571bfa34c40, ), serializer: Fields( GeneralFieldsSerializer { fields: { "database_name": SerField { key_py: Py( 0x00007fc9abd84330, ), alias: None, alias_py: None, serializer: Some( Str( StrSerializer, ), ), required: true, }, "repository_uri": SerField { key_py: Py( 0x00007fc9abd843f0, ), alias: None, alias_py: None, serializer: Some( Str( StrSerializer, ), ), required: true, }, "database_hostname": SerField { key_py: Py( 0x00007fc9abd73cd0, ), alias: None, alias_py: None, serializer: Some( WithDefault( WithDefaultSerializer { default: Default( Py( 0x00007fc9b2f994b0, ), ), serializer: Str( StrSerializer, ), }, ), ), required: true, }, "database_username": SerField { key_py: Py( 0x00007fc9abd73d20, ), alias: None, alias_py: None, serializer: Some( Str( StrSerializer, ), ), required: true, }, "database_password": SerField { key_py: Py( 0x00007fc9abd73d70, ), alias: None, alias_py: None, serializer: Some( Str( StrSerializer, ), ), required: true, }, "database_engine": SerField { key_py: Py( 0x00007fc9abd840b0, ), alias: None, alias_py: None, serializer: Some( WithDefault( WithDefaultSerializer { default: Default( Py( 0x00007fc9abb0ee70, ), ), serializer: Str( StrSerializer, ), }, ), ), required: true, }, "database_port": SerField { key_py: Py( 0x00007fc9abd841b0, ), alias: None, alias_py: None, serializer: Some( WithDefault( WithDefaultSerializer { default: Default( Py( 0x00007fc9a945c7d0, ), ), serializer: Int( IntSerializer, ), }, ), ), required: true, }, }, computed_fields: Some( ComputedFields( [], ), ), mode: SimpleDict, extra_serializer: None, filter: SchemaFilter { include: None, exclude: None, }, required_fields: 7, }, ), has_extra: false, root_model: false, name: "Model", }, ), definitions=[])#
The pydantic-core SchemaSerializer used to dump instances of the model.
- __pydantic_validator__: ClassVar[SchemaValidator | PluggableSchemaValidator] = SchemaValidator(title="Model", validator=Model( ModelValidator { revalidate: Never, validator: ModelFields( ModelFieldsValidator { fields: [ Field { name: "database_engine", lookup_key: Simple { key: "database_engine", py_key: Py( 0x00007fc979e4ebf0, ), path: LookupPath( [ S( "database_engine", Py( 0x00007fc979e4e430, ), ), ], ), }, name_py: Py( 0x00007fc9abd840b0, ), validator: WithDefault( WithDefaultValidator { default: Default( Py( 0x00007fc9abb0ee70, ), ), on_error: Raise, validator: Str( StrValidator { strict: false, coerce_numbers_to_str: false, }, ), validate_default: false, copy_default: false, name: "default[str]", undefined: Py( 0x00007fc9abfb2f20, ), }, ), frozen: false, }, Field { name: "database_hostname", lookup_key: Simple { key: "database_hostname", py_key: Py( 0x00007fc979ec19d0, ), path: LookupPath( [ S( "database_hostname", Py( 0x00007fc979ff9390, ), ), ], ), }, name_py: Py( 0x00007fc9abd73cd0, ), validator: WithDefault( WithDefaultValidator { default: Default( Py( 0x00007fc9b2f994b0, ), ), on_error: Raise, validator: Str( StrValidator { strict: false, coerce_numbers_to_str: false, }, ), validate_default: false, copy_default: false, name: "default[str]", undefined: Py( 0x00007fc9abfb2f20, ), }, ), frozen: false, }, Field { name: "database_port", lookup_key: Simple { key: "database_port", py_key: Py( 0x00007fc979e4e630, ), path: LookupPath( [ S( "database_port", Py( 0x00007fc979e4df70, ), ), ], ), }, name_py: Py( 0x00007fc9abd841b0, ), validator: WithDefault( WithDefaultValidator { default: Default( Py( 0x00007fc9a945c7d0, ), ), on_error: Raise, validator: Int( IntValidator { strict: false, }, ), validate_default: false, copy_default: false, name: "default[int]", undefined: Py( 0x00007fc9abfb2f20, ), }, ), frozen: false, }, Field { name: "database_username", lookup_key: Simple { key: "database_username", py_key: Py( 0x00007fc979a150c0, ), path: LookupPath( [ S( "database_username", Py( 0x00007fc97a9ecc10, ), ), ], ), }, name_py: Py( 0x00007fc9abd73d20, ), validator: Str( StrValidator { strict: false, coerce_numbers_to_str: false, }, ), frozen: false, }, Field { name: "database_password", lookup_key: Simple { key: "database_password", py_key: Py( 0x00007fc97ac20350, ), path: LookupPath( [ S( "database_password", Py( 0x00007fc97ac212a0, ), ), ], ), }, name_py: Py( 0x00007fc9abd73d70, ), validator: Str( StrValidator { strict: false, coerce_numbers_to_str: false, }, ), frozen: false, }, Field { name: "database_name", lookup_key: Simple { key: "database_name", py_key: Py( 0x00007fc979d7f230, ), path: LookupPath( [ S( "database_name", Py( 0x00007fc979d7eaf0, ), ), ], ), }, name_py: Py( 0x00007fc9abd84330, ), validator: Str( StrValidator { strict: false, coerce_numbers_to_str: false, }, ), frozen: false, }, Field { name: "repository_uri", lookup_key: Simple { key: "repository_uri", py_key: Py( 0x00007fc979d7fe30, ), path: LookupPath( [ S( "repository_uri", Py( 0x00007fc979d7f8b0, ), ), ], ), }, name_py: Py( 0x00007fc9abd843f0, ), validator: Str( StrValidator { strict: false, coerce_numbers_to_str: false, }, ), frozen: false, }, ], model_name: "Model", extra_behavior: Ignore, extras_validator: None, strict: false, from_attributes: false, loc_by_alias: true, }, ), class: Py( 0x00005571bfa34c40, ), post_init: None, frozen: false, custom_init: false, root_model: false, undefined: Py( 0x00007fc9abfb2f20, ), name: "Model", }, ), definitions=[], cache_strings=True)#
The pydantic-core SchemaValidator used to validate instances of the model.
- __weakref__#
list of weak references to the object
- _abc_impl = <_abc._abc_data object>#
- model_computed_fields: ClassVar[Dict[str, ComputedFieldInfo]] = {}#
A dictionary of computed field names and their corresponding ComputedFieldInfo objects.
- model_config: ClassVar[ConfigDict] = {'defer_build': True}#
Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].
- model_fields: ClassVar[Dict[str, FieldInfo]] = {'database_engine': FieldInfo(annotation=str, required=False, default='postgresql_psycopg', title='PostgreSQL engine', description='The engine to use to connect to the database.'), 'database_hostname': FieldInfo(annotation=str, required=False, default='localhost', title='PostgreSQL hostname', description='The hostname of the PostgreSQL server.'), 'database_name': FieldInfo(annotation=str, required=True, title='PostgreSQL database name', description='The name of the database in the PostgreSQL server.'), 'database_password': FieldInfo(annotation=str, required=True, title='PostgreSQL password', description='The password with which to connect to the PostgreSQL server.'), 'database_port': FieldInfo(annotation=int, required=False, default=5432, title='PostgreSQL port', description='The port of the PostgreSQL server.'), 'database_username': FieldInfo(annotation=str, required=True, title='PostgreSQL username', description='The username with which to connect to the PostgreSQL server.'), 'repository_uri': FieldInfo(annotation=str, required=True, title='File repository URI', description='URI to the file repository.')}#
Metadata about the fields defined on the model, mapping of field names to [FieldInfo][pydantic.fields.FieldInfo] objects.
This replaces Model.__fields__ from Pydantic V1.
- __abstractmethods__ = frozenset({})#
- __init__(profile: Profile) None [source]#
Initialize the backend, for this profile.
- Raises:
~aiida.common.exceptions.UnreachableStorage if the storage cannot be accessed
- Raises:
~aiida.common.exceptions.IncompatibleStorageSchema if the profile’s storage schema is not at the latest version (and thus should be migrated)
- Raises:
- raises:
aiida.common.exceptions.CorruptStorage
if the storage is internally inconsistent
- __module__ = 'aiida.storage.psql_dos.backend'#
- _abc_impl = <_abc._abc_data object>#
- _backup_storage(manager: BackupManager, path: Path, prev_backup: Path | None = None) None [source]#
Create a backup of the postgres database and disk-objectstore to the provided path.
- Parameters:
manager – BackupManager from backup_utils containing utilities such as for calling the rsync.
path – Path to where the backup will be created.
prev_backup – Path to the previous backup. Rsync calls will be hard-linked to this path, making the backup incremental and efficient.
- _clear() None [source]#
Clear the storage, removing all data.
Warning
This is a destructive operation, and should only be used for testing purposes.
- static _get_mapper_from_entity(entity_type: EntityTypes, with_pk: bool)[source]#
Return the Sqlalchemy mapper and fields corresponding to the given entity.
- Parameters:
with_pk – if True, the fields returned will include the primary key
- _initialise_session()[source]#
Initialise the SQLAlchemy session factory.
Only one session factory is ever associated with a given class instance, i.e. once the instance is closed, it cannot be reopened.
The session factory, returns a session that is bound to the current thread. Multi-thread support is currently required by the REST API. Although, in the future, we may want to move the multi-thread handling to higher in the AiiDA stack.
- property authinfos#
Return the collection of authorisation information objects
- bulk_insert(entity_type: EntityTypes, rows: List[dict], allow_defaults: bool = False) List[int] [source]#
Insert a list of entities into the database, directly into a backend transaction.
- Parameters:
entity_type – The type of the entity
data – A list of dictionaries, containing all fields of the backend model, except the id field (a.k.a primary key), which will be generated dynamically
allow_defaults – If
False
, assert that each row contains all fields (except primary key(s)), otherwise, allow default values for missing fields.
- Raises:
IntegrityError
if the keys in a row are not a subset of the columns in the table- Returns:
The list of generated primary keys for the entities
- bulk_update(entity_type: EntityTypes, rows: List[dict]) None [source]#
Update a list of entities in the database, directly with a backend transaction.
- Parameters:
entity_type – The type of the entity
data – A list of dictionaries, containing fields of the backend model to update, and the id field (a.k.a primary key)
- Raises:
IntegrityError
if the keys in a row are not a subset of the columns in the table
- property comments#
Return the collection of comments
- property computers#
Return the collection of computers
- delete(delete_database_user: bool = False) None [source]#
Delete the storage and all the data.
- Parameters:
delete_database_user – Also delete the database user. This is
False
by default because the user may be used by other databases.
- delete_nodes_and_connections(pks_to_delete: Sequence[int]) None [source]#
Delete all nodes corresponding to pks in the input and any links to/from them.
This method is intended to be used within a transaction context.
- Parameters:
pks_to_delete – a sequence of node pks to delete
- Raises:
AssertionError
if a transaction is not active
- get_backend_entity(model: Model) BackendEntity [source]#
Return the backend entity that corresponds to the given Model instance
- Parameters:
model – the ORM model instance to promote to a backend instance
- Returns:
the backend entity corresponding to the given model
- get_global_variable(key: str) None | str | int | float [source]#
Return a global variable from the storage.
- Parameters:
key – the key of the setting
- Raises:
KeyError if the setting does not exist
- get_info(detailed: bool = False) dict [source]#
Return general information on the storage.
- Parameters:
detailed – flag to request more detailed information about the content of the storage.
- Returns:
a nested dict with the relevant information.
- get_repository() DiskObjectStoreRepositoryBackend [source]#
Return the object repository configured for this backend.
- get_unreferenced_keyset(check_consistency: bool = True) Set[str] [source]#
Returns the keyset of objects that exist in the repository but are not tracked by AiiDA.
This should be all the soft-deleted files.
- Parameters:
check_consistency – toggle for a check that raises if there are references in the database with no actual object in the underlying repository.
- Returns:
a set with all the objects in the underlying repository that are not referenced in the database.
- property groups#
Return the collection of groups
- classmethod initialise(profile: Profile, reset: bool = False) bool [source]#
Initialise the storage backend.
This is typically used once when a new storage backed is created. If this method returns without exceptions the storage backend is ready for use. If the backend already seems initialised, this method is a no-op.
- Parameters:
reset – If
true
, destroy the backend if it already exists including all of its data before recreating and initialising it. This is useful for example for test profiles that need to be reset before or after tests having run.- Returns:
True
if the storage was initialised by the function call,False
if it was already initialised.
- property logs#
Return the collection of logs
- maintain(full: bool = False, dry_run: bool = False, **kwargs) None [source]#
Perform maintenance tasks on the storage.
If full == True, then this method may attempt to block the profile associated with the storage to guarantee the safety of its procedures. This will not only prevent any other subsequent process from accessing that profile, but will also first check if there is already any process using it and raise if that is the case. The user will have to manually stop any processes that is currently accessing the profile themselves or wait for it to finish on its own.
- Parameters:
full – flag to perform operations that require to stop using the profile to be maintained.
dry_run – flag to only print the actions that would be taken without actually executing them.
- classmethod migrate(profile: Profile) None [source]#
Migrate the storage of a profile to the latest schema version.
If the schema version is already the latest version, this method does nothing. If the storage is uninitialised, this method will raise an exception.
- Raises:
:class`~aiida.common.exceptions.UnreachableStorage` if the storage cannot be accessed.
- Raises:
StorageMigrationError
if the storage is not initialised.
- migrator#
alias of
PsqlDosMigrator
- property nodes#
Return the collection of nodes
- set_global_variable(key: str, value: None | str | int | float, description: str | None = None, overwrite=True) None [source]#
Set a global variable in the storage.
- Parameters:
key – the key of the setting
value – the value of the setting
description – the description of the setting (optional)
overwrite – if True, overwrite the setting if it already exists
- Raises:
ValueError if the key already exists and overwrite is False
- transaction() Iterator[Session] [source]#
Open a transaction to be used as a context manager.
If there is an exception within the context then the changes will be rolled back and the state will be as before entering. Transactions can be nested.
- property users#
Return the collection of users
- aiida.storage.psql_dos.backend.get_filepath_container(profile: Profile) Path [source]#
Return the filepath of the disk-object store container.
Schema validation and migration utilities.
This code interacts directly with the database, outside of the ORM, taking a Profile as input for the connection configuration.
Important
This code should only be accessed via the storage backend class, not directly!
- class aiida.storage.psql_dos.migrator.PsqlDosMigrator(profile: Profile)[source]#
Bases:
object
Class for validating and migrating psql_dos storage instances.
Important
This class should only be accessed via the storage backend class (apart from for test purposes)
- __dict__ = mappingproxy({'__module__': 'aiida.storage.psql_dos.migrator', '__doc__': 'Class for validating and migrating `psql_dos` storage instances.\n\n .. important:: This class should only be accessed via the storage backend class (apart from for test purposes)\n ', 'alembic_version_tbl_name': 'alembic_version', 'django_version_table': <sqlalchemy.sql.selectable.TableClause at 0x7fc9a947c810; django_migrations>, '__init__': <function PsqlDosMigrator.__init__>, 'close': <function PsqlDosMigrator.close>, 'connection': <property object>, 'get_schema_versions': <classmethod(<function PsqlDosMigrator.get_schema_versions>)>, 'get_schema_version_head': <classmethod(<function PsqlDosMigrator.get_schema_version_head>)>, 'get_schema_version_profile': <function PsqlDosMigrator.get_schema_version_profile>, 'validate_storage': <function PsqlDosMigrator.validate_storage>, 'get_container': <function PsqlDosMigrator.get_container>, 'get_repository_uuid': <function PsqlDosMigrator.get_repository_uuid>, 'initialise': <function PsqlDosMigrator.initialise>, 'is_initialised': <property object>, 'is_repository_initialised': <property object>, 'is_database_initialised': <property object>, 'reset_repository': <function PsqlDosMigrator.reset_repository>, 'reset_database': <function PsqlDosMigrator.reset_database>, 'initialise_repository': <function PsqlDosMigrator.initialise_repository>, 'initialise_database': <function PsqlDosMigrator.initialise_database>, 'delete_all_tables': <function PsqlDosMigrator.delete_all_tables>, 'migrate': <function PsqlDosMigrator.migrate>, 'migrate_up': <function PsqlDosMigrator.migrate_up>, 'migrate_down': <function PsqlDosMigrator.migrate_down>, '_alembic_config': <staticmethod(<function PsqlDosMigrator._alembic_config>)>, '_alembic_script': <classmethod(<function PsqlDosMigrator._alembic_script>)>, '_alembic_connect': <function PsqlDosMigrator._alembic_connect>, '_migration_context': <function PsqlDosMigrator._migration_context>, 'session': <function PsqlDosMigrator.session>, 'get_current_table': <function PsqlDosMigrator.get_current_table>, '__dict__': <attribute '__dict__' of 'PsqlDosMigrator' objects>, '__weakref__': <attribute '__weakref__' of 'PsqlDosMigrator' objects>, '__annotations__': {}})#
- __module__ = 'aiida.storage.psql_dos.migrator'#
- __weakref__#
list of weak references to the object
- _alembic_connect() Iterator[Config] [source]#
Context manager to return an instance of an Alembic configuration.
The profiles’s database connection is added in the attributes property, through which it can then also be retrieved, also in the env.py file, which is run when the database is migrated.
- _migration_context() Iterator[MigrationContext] [source]#
Context manager to return an instance of an Alembic migration context.
This migration context will have been configured with the current database connection, which allows this context to be used to inspect the contents of the database, such as the current revision.
- alembic_version_tbl_name = 'alembic_version'#
- property connection#
Return the connection to the database.
Will automatically create the engine and open an connection if not already opened in a previous call.
- Returns:
Open connection to the database.
- Raises:
aiida.common.exceptions.UnreachableStorage
if connecting to the database fails.
- delete_all_tables(*, exclude_tables: list[str] | None = None) None [source]#
Delete all tables of the current database schema.
The tables are determined dynamically through reflection of the current schema version. Any other tables in the database that are not part of the schema should remain unaffected.
- Parameters:
exclude_tables – Optional list of table names that should not be deleted.
- django_version_table = <sqlalchemy.sql.selectable.TableClause at 0x7fc9a947c810; django_migrations>#
- get_container() Container [source]#
Return the disk-object store container.
- Returns:
The disk-object store container configured for the repository path of the current profile.
- get_current_table(table_name: str) Any [source]#
Return a table instantiated at the correct migration.
Note that this is obtained by inspecting the database and not by looking into the models file. So, special methods possibly defined in the models files/classes are not present.
- get_repository_uuid() str [source]#
Return the UUID of the repository.
- Returns:
The repository UUID.
- Raises:
UnreachableStorage
if the UUID cannot be retrieved, which probably means that the repository is not initialised.
- classmethod get_schema_version_head() str [source]#
Return the head schema version for this storage, i.e. the latest schema this storage can be migrated to.
- get_schema_version_profile(check_legacy=False) str | None [source]#
Return the schema version of the backend instance for this profile.
Note, the version will be None if the database is empty or is a legacy django database.
- classmethod get_schema_versions() Dict[str, str] [source]#
Return all available schema versions (oldest to latest).
- Returns:
schema version -> description
- initialise(reset: bool = False) bool [source]#
Initialise the storage backend.
This is typically used once when a new storage backed is created. If this method returns without exceptions the storage backend is ready for use. If the backend already seems initialised, this method is a no-op.
- Parameters:
reset – If
true
, destroy the backend if it already exists including all of its data before recreating and initialising it. This is useful for example for test profiles that need to be reset before or after tests having run.- Returns:
True
if the storage was initialised by the function call,False
if it was already initialised.
- initialise_database() None [source]#
Initialise the database.
This assumes that the database has no schema whatsoever and so the initial schema is created directly from the models at the current head version without migrating through all of them one by one.
- property is_database_initialised: bool#
Return whether the database is initialised.
This is the case if it contains the table that holds the schema version for alembic or Django.
- Returns:
True
if the database is initialised,False
otherwise.
- property is_initialised: bool#
Return whether the storage is initialised.
This is the case if both the database and the repository are initialised.
- Returns:
True
if the storage is initialised,False
otherwise.
- property is_repository_initialised: bool#
Return whether the repository is initialised.
- Returns:
True
if the repository is initialised,False
otherwise.
- migrate() None [source]#
Migrate the storage for this profile to the head version.
- Raises:
UnreachableStorage
if the storage cannot be accessed.- Raises:
StorageMigrationError
if the storage is not initialised.
- migrate_down(version: str) None [source]#
Migrate the database down to a specific version.
- Parameters:
version – string with schema version to migrate to
- migrate_up(version: str) None [source]#
Migrate the database up to a specific version.
- Parameters:
version – string with schema version to migrate to
- reset_database() None [source]#
Reset the database by deleting all content from all tables.
This will also destroy the settings table and so in order to use it again, it will have to be reinitialised.
- reset_repository() None [source]#
Reset the repository by deleting all of its contents.
This will also destroy the configuration and so in order to use it again, it will have to be reinitialised.
- validate_storage() None [source]#
Validate that the storage for this profile
That the database schema is at the head version, i.e. is compatible with the code API.
That the repository ID is equal to the UUID set in the database
- Raises:
aiida.common.exceptions.UnreachableStorage
if the storage cannot be connected to- Raises:
aiida.common.exceptions.IncompatibleStorageSchema
if the storage is not compatible with the code API.- Raises:
aiida.common.exceptions.CorruptStorage
if the repository ID is not equal to the UUID set in thedatabase.
Utility functions specific to the SqlAlchemy backend.
- class aiida.storage.psql_dos.utils.PsqlConfig[source]#
Bases:
TypedDict
Configuration to connect to a PostgreSQL database.
- __annotations__ = {'database_hostname': <class 'str'>, 'database_name': <class 'str'>, 'database_password': <class 'str'>, 'database_port': <class 'int'>, 'database_username': <class 'str'>, 'engine_kwargs': <class 'dict'>}#
- __dict__ = mappingproxy({'__module__': 'aiida.storage.psql_dos.utils', '__annotations__': {'database_hostname': <class 'str'>, 'database_port': <class 'int'>, 'database_username': <class 'str'>, 'database_password': <class 'str'>, 'database_name': <class 'str'>, 'engine_kwargs': <class 'dict'>}, '__doc__': 'Configuration to connect to a PostgreSQL database.', '__orig_bases__': (<function TypedDict>,), '__dict__': <attribute '__dict__' of 'PsqlConfig' objects>, '__weakref__': <attribute '__weakref__' of 'PsqlConfig' objects>, '__required_keys__': frozenset(), '__optional_keys__': frozenset({'engine_kwargs', 'database_name', 'database_port', 'database_hostname', 'database_username', 'database_password'}), '__total__': False})#
- __module__ = 'aiida.storage.psql_dos.utils'#
- __optional_keys__ = frozenset({'database_hostname', 'database_name', 'database_password', 'database_port', 'database_username', 'engine_kwargs'})#
- __orig_bases__ = (<function TypedDict>,)#
- __required_keys__ = frozenset({})#
- __total__ = False#
- __weakref__#
list of weak references to the object
- aiida.storage.psql_dos.utils.create_scoped_session_factory(engine, **kwargs)[source]#
Create scoped SQLAlchemy session factory
- aiida.storage.psql_dos.utils.create_sqlalchemy_engine(config: PsqlConfig)[source]#
Create SQLAlchemy engine (to be used for QueryBuilder queries)
- Parameters:
kwargs – keyword arguments that will be passed on to sqlalchemy.create_engine. See https://docs.sqlalchemy.org/en/13/core/engines.html?highlight=create_engine#sqlalchemy.create_engine for more info.
- aiida.storage.psql_dos.utils.flag_modified(instance, key)[source]#
Wrapper around sqlalchemy.orm.attributes.flag_modified to correctly dereference utils.ModelWrapper
Since SqlAlchemy 1.2.12 (and maybe earlier but not in 1.0.19) the flag_modified function will check that the key is actually present in the instance or it will except. If we pass a model instance, wrapped in the ModelWrapper the call will raise an InvalidRequestError. In this function that wraps the flag_modified of SqlAlchemy, we derefence the model instance if the passed instance is actually wrapped in the ModelWrapper.