aiida.orm package#

Main module to expose all orm classes and methods

Subpackages#

Submodules#

Module for the AuthInfo ORM class.

class aiida.orm.authinfos.AuthInfo(computer: Computer, user: User, backend: StorageBackend | None = None)[source]#

Bases: Entity[BackendAuthInfo, AuthInfoCollection]

ORM class that models the authorization information that allows a User to connect to a Computer.

PROPERTY_WORKDIR = 'workdir'#
_CLS_COLLECTION#

alias of AuthInfoCollection

__abstractmethods__ = frozenset({})#
__init__(computer: Computer, user: User, backend: StorageBackend | None = None) None[source]#

Create an AuthInfo instance for the given computer and user.

Parameters:
  • computer – a Computer instance

  • user – a User instance

  • backend – the backend to use for the instance, or use the default backend if None

__module__ = 'aiida.orm.authinfos'#
__orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendAuthInfo'), aiida.orm.authinfos.AuthInfoCollection],)#
__parameters__ = ()#
__qb_fields__: Sequence[QbField] = [QbField('enabled', dtype=bool, is_attribute=False), QbDictField('auth_params', dtype=Dict[str, Any], is_attribute=False), QbDictField('metadata', dtype=Dict[str, Any], is_attribute=False), QbNumericField('computer_pk', dtype=int, is_attribute=False), QbNumericField('user_pk', dtype=int, is_attribute=False)]#
__str__() str[source]#

Return str(self).

_abc_impl = <_abc._abc_data object>#
property computer: Computer#

Return the computer associated with this instance.

property enabled: bool#

Return whether this instance is enabled.

Returns:

True if enabled, False otherwise

fields: QbFields = {'auth_params': 'QbDictField(auth_params) -> Dict[str, Any]',  'computer_pk': 'QbNumericField(computer_pk) -> int',  'enabled': 'QbField(enabled) -> bool',  'metadata': 'QbDictField(metadata) -> Dict[str, Any]',  'pk': 'QbNumericField(pk) -> int',  'user_pk': 'QbNumericField(user_pk) -> int'}#
get_auth_params() Dict[str, Any][source]#

Return the dictionary of authentication parameters

Returns:

a dictionary with authentication parameters

get_metadata() Dict[str, Any][source]#

Return the dictionary of metadata

Returns:

a dictionary with metadata

get_transport() Transport[source]#

Return a fully configured transport that can be used to connect to the computer set for this instance.

get_workdir() str[source]#

Return the working directory.

If no explicit work directory is set for this instance, the working directory of the computer will be returned.

Returns:

the working directory

set_auth_params(auth_params: Dict[str, Any]) None[source]#

Set the dictionary of authentication parameters

Parameters:

auth_params – a dictionary with authentication parameters

set_metadata(metadata: Dict[str, Any]) None[source]#

Set the dictionary of metadata

Parameters:

metadata – a dictionary with metadata

property user: User#

Return the user associated with this instance.

class aiida.orm.authinfos.AuthInfoCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#

Bases: Collection[AuthInfo]

The collection of AuthInfo entries.

__abstractmethods__ = frozenset({})#
__module__ = 'aiida.orm.authinfos'#
__orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('AuthInfo')],)#
__parameters__ = ()#
_abc_impl = <_abc._abc_data object>#
static _entity_base_cls() Type[AuthInfo][source]#

The allowed entity class or subclasses thereof.

delete(pk: int) None[source]#

Delete an entry from the collection.

Parameters:

pk – the pk of the entry to delete

Module to manage the autogrouping functionality by verdi run.

class aiida.orm.autogroup.AutogroupManager(backend)[source]#

Bases: object

Class to automatically add all newly stored Node``s to an ``AutoGroup (whilst enabled).

This class should not be instantiated directly, but rather accessed through the backend storage instance.

The auto-grouping is checked by the Node.store() method which, if is_to_be_grouped is true, will store the node in the associated AutoGroup.

The exclude/include lists are lists of strings like: aiida.data:core.int, aiida.calculation:quantumespresso.pw, aiida.data:core.array.%, … i.e.: a string identifying the base class, followed by a colon and the path to the class as accepted by CalculationFactory/DataFactory. Each string can contain one or more wildcard characters %; in this case this is used in a like comparison with the QueryBuilder. Note that in this case you have to remember that _ means “any character” in the QueryBuilder, and you need to escape it if you mean a literal underscore.

Only one of the two (between exclude and include) can be set. If none of the two is set, everything is included.

__dict__ = mappingproxy({'__module__': 'aiida.orm.autogroup', '__doc__': 'Class to automatically add all newly stored ``Node``s to an ``AutoGroup`` (whilst enabled).\n\n    This class should not be instantiated directly, but rather accessed through the backend storage instance.\n\n    The auto-grouping is checked by the ``Node.store()`` method which, if ``is_to_be_grouped`` is true,\n    will store the node in the associated ``AutoGroup``.\n\n    The exclude/include lists are lists of strings like:\n    ``aiida.data:core.int``, ``aiida.calculation:quantumespresso.pw``,\n    ``aiida.data:core.array.%``, ...\n    i.e.: a string identifying the base class, followed by a colon and the path to the class\n    as accepted by CalculationFactory/DataFactory.\n    Each string can contain one or more wildcard characters ``%``;\n    in this case this is used in a ``like`` comparison with the QueryBuilder.\n    Note that in this case you have to remember that ``_`` means "any character"\n    in the QueryBuilder, and you need to escape it if you mean a literal underscore.\n\n    Only one of the two (between exclude and include) can be set.\n    If none of the two is set, everything is included.\n    ', '__init__': <function AutogroupManager.__init__>, 'is_enabled': <property object>, 'enable': <function AutogroupManager.enable>, 'disable': <function AutogroupManager.disable>, 'get_exclude': <function AutogroupManager.get_exclude>, 'get_include': <function AutogroupManager.get_include>, 'get_group_label_prefix': <function AutogroupManager.get_group_label_prefix>, 'validate': <staticmethod(<function AutogroupManager.validate>)>, 'set_exclude': <function AutogroupManager.set_exclude>, 'set_include': <function AutogroupManager.set_include>, 'set_group_label_prefix': <function AutogroupManager.set_group_label_prefix>, '_matches': <staticmethod(<function AutogroupManager._matches>)>, 'is_to_be_grouped': <function AutogroupManager.is_to_be_grouped>, 'get_or_create_group': <function AutogroupManager.get_or_create_group>, '__dict__': <attribute '__dict__' of 'AutogroupManager' objects>, '__weakref__': <attribute '__weakref__' of 'AutogroupManager' objects>, '__annotations__': {'_exclude': 'list[str] | None', '_include': 'list[str] | None'}})#
__init__(backend)[source]#

Initialize the manager for the storage backend.

__module__ = 'aiida.orm.autogroup'#
__weakref__#

list of weak references to the object

static _matches(string, filter_string)[source]#

Check if ‘string’ matches the ‘filter_string’ (used for include and exclude filters).

If ‘filter_string’ does not contain any % sign, perform an exact match. Otherwise, match with a SQL-like query, where % means any character sequence, and _ means a single character (these characters can be escaped with a backslash).

Parameters:
  • string – the string to match.

  • filter_string – the filter string.

disable() None[source]#

Disable the auto-grouping.

enable() None[source]#

Enable the auto-grouping.

get_exclude() list[str] | None[source]#

Return the list of classes to exclude from autogrouping.

Returns None if no exclusion list has been set.

get_group_label_prefix() str[source]#

Get the prefix of the label of the group. If no group label prefix was set, it will set a default one by itself.

get_include() list[str] | None[source]#

Return the list of classes to include in the autogrouping.

Returns None if no inclusion list has been set.

get_or_create_group() AutoGroup[source]#

Return the current AutoGroup, or create one if None has been set yet.

This function implements a somewhat complex logic that is however needed to make sure that, even if verdi run is called at the same time multiple times, e.g. in a for loop in bash, there is never the risk that two verdi run Unix processes try to create the same group, with the same label, ending up in a crash of the code (see PR #3650).

Here, instead, we make sure that if this concurrency issue happens, one of the two will get a IntegrityError from the DB, and then recover trying to create a group with a different label (with a numeric suffix appended), until it manages to create it.

property is_enabled: bool#

Return whether auto-grouping is enabled.

is_to_be_grouped(node) bool[source]#

Return whether the given node is to be auto-grouped according to enable state and include/exclude lists.

set_exclude(exclude: list[str] | str | None) None[source]#

Set the list of classes to exclude in the autogrouping.

Parameters:

exclude – a list of valid entry point strings (might contain ‘%’ to be used as string to be matched using SQL’s LIKE pattern-making logic), or None to specify no include list.

set_group_label_prefix(label_prefix: str | None) None[source]#

Set the label of the group to be created (or use a default).

set_include(include: list[str] | str | None) None[source]#

Set the list of classes to include in the autogrouping.

Parameters:

include – a list of valid entry point strings (might contain ‘%’ to be used as string to be matched using SQL’s LIKE pattern-making logic), or None to specify no include list.

static validate(strings: list[str] | None)[source]#

Validate the list of strings passed to set_include and set_exclude.

Comment objects and functions

class aiida.orm.comments.Comment(node: Node, user: User, content: str | None = None, backend: StorageBackend | None = None)[source]#

Bases: Entity[BackendComment, CommentCollection]

Base class to map a DbComment that represents a comment attached to a certain Node.

_CLS_COLLECTION#

alias of CommentCollection

__abstractmethods__ = frozenset({})#
__init__(node: Node, user: User, content: str | None = None, backend: StorageBackend | None = None)[source]#

Create a Comment for a given node and user

Parameters:
  • node – a Node instance

  • user – a User instance

  • content – the comment content

  • backend – the backend to use for the instance, or use the default backend if None

Returns:

a Comment object associated to the given node and user

__module__ = 'aiida.orm.comments'#
__orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendComment'), aiida.orm.comments.CommentCollection],)#
__parameters__ = ()#
__qb_fields__: Sequence[QbField] = [QbStrField('uuid', dtype=str, is_attribute=False), QbNumericField('ctime', dtype=datetime, is_attribute=False), QbNumericField('mtime', dtype=datetime, is_attribute=False), QbStrField('content', dtype=str, is_attribute=False), QbNumericField('user_pk', dtype=int, is_attribute=False), QbNumericField('node_pk', dtype=int, is_attribute=False)]#
__str__() str[source]#

Return str(self).

_abc_impl = <_abc._abc_data object>#
property content: str#
property ctime: datetime#
fields: QbFields = {'content': 'QbStrField(content) -> str',  'ctime': 'QbNumericField(ctime) -> datetime',  'mtime': 'QbNumericField(mtime) -> datetime',  'node_pk': 'QbNumericField(node_pk) -> int',  'pk': 'QbNumericField(pk) -> int',  'user_pk': 'QbNumericField(user_pk) -> int',  'uuid': 'QbStrField(uuid) -> str'}#
property mtime: datetime#
property node: Node#
set_content(value: str) None[source]#
set_mtime(value: datetime) None[source]#
set_user(value: User) None[source]#
property user: User#
property uuid: str#

Return the UUID for this comment.

This identifier is unique across all entities types and backend instances.

Returns:

the entity uuid

class aiida.orm.comments.CommentCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#

Bases: Collection[Comment]

The collection of Comment entries.

__abstractmethods__ = frozenset({})#
__module__ = 'aiida.orm.comments'#
__orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('Comment')],)#
__parameters__ = ()#
_abc_impl = <_abc._abc_data object>#
static _entity_base_cls() Type[Comment][source]#

The allowed entity class or subclasses thereof.

delete(pk: int) None[source]#

Remove a Comment from the collection with the given id

Parameters:

pk – the id of the comment to delete

Raises:
  • TypeError – if comment_id is not an int

  • NotExistent – if Comment with ID comment_id is not found

delete_all() None[source]#

Delete all Comments from the Collection

Raises:

IntegrityError – if all Comments could not be deleted

delete_many(filters: dict) List[int][source]#

Delete Comments from the Collection based on filters

Parameters:

filters – similar to QueryBuilder filter

Returns:

(former) PK s of deleted Comments

Raises:

Module for Computer entities

class aiida.orm.computers.Computer(label: str | None = None, hostname: str = '', description: str = '', transport_type: str = '', scheduler_type: str = '', workdir: str | None = None, backend: StorageBackend | None = None)[source]#

Bases: Entity[BackendComputer, ComputerCollection]

Computer entity.

PROPERTY_MINIMUM_SCHEDULER_POLL_INTERVAL = 'minimum_scheduler_poll_interval'#
PROPERTY_MINIMUM_SCHEDULER_POLL_INTERVAL__DEFAULT = 10.0#
PROPERTY_SHEBANG = 'shebang'#
PROPERTY_WORKDIR = 'workdir'#
_CLS_COLLECTION#

alias of ComputerCollection

__abstractmethods__ = frozenset({})#
__init__(label: str | None = None, hostname: str = '', description: str = '', transport_type: str = '', scheduler_type: str = '', workdir: str | None = None, backend: StorageBackend | None = None) None[source]#

Construct a new computer.

__module__ = 'aiida.orm.computers'#
__orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendComputer'), aiida.orm.computers.ComputerCollection],)#
__parameters__ = ()#
__qb_fields__: Sequence[QbField] = [QbStrField('uuid', dtype=str, is_attribute=False), QbStrField('label', dtype=str, is_attribute=False), QbStrField('description', dtype=str, is_attribute=False), QbStrField('hostname', dtype=str, is_attribute=False), QbStrField('transport_type', dtype=str, is_attribute=False), QbStrField('scheduler_type', dtype=str, is_attribute=False), QbDictField('metadata', dtype=Dict[str, Any], is_attribute=False)]#
__repr__()[source]#

Return repr(self).

__str__()[source]#

Return str(self).

_abc_impl = <_abc._abc_data object>#
classmethod _append_text_validator(append_text: str) None[source]#

Validates the append text string.

classmethod _default_mpiprocs_per_machine_validator(def_cpus_per_machine: int | None) None[source]#

Validates the default number of CPUs per machine (node)

classmethod _description_validator(description: str) None[source]#

Validates the description.

classmethod _hostname_validator(hostname: str) None[source]#

Validates the hostname.

classmethod _label_validator(label: str) None[source]#

Validates the label.

_logger = <Logger aiida.orm.computers (WARNING)>#
_mpirun_command_validator(mpirun_cmd: List[str] | Tuple[str, ...]) None[source]#

Validates the mpirun_command variable. MUST be called after properly checking for a valid scheduler.

classmethod _prepend_text_validator(prepend_text: str) None[source]#

Validates the prepend text string.

classmethod _scheduler_type_validator(scheduler_type: str) None[source]#

Validates the transport string.

classmethod _transport_type_validator(transport_type: str) None[source]#

Validates the transport string.

classmethod _workdir_validator(workdir: str) None[source]#

Validates the transport string.

configure(user: User | None = None, **kwargs: Any) AuthInfo[source]#

Configure a computer for a user with valid auth params passed via kwargs

Parameters:

user – the user to configure the computer for

Kwargs:

the configuration keywords with corresponding values

Returns:

the authinfo object for the configured user

copy() Computer[source]#

Return a copy of the current object to work with, not stored yet.

classmethod default_memory_per_machine_validator(def_memory_per_machine: int | None) None[source]#

Validates the default amount of memory (kB) per machine (node)

delete_property(name: str, raise_exception: bool = True) None[source]#

Delete a property from this computer

Parameters:
  • name – the name of the property

  • raise_exception – if True raise if the property does not exist, otherwise return None

property description: str#

Return the computer computer.

Returns:

the description.

fields: QbFields = {'description': 'QbStrField(description) -> str',  'hostname': 'QbStrField(hostname) -> str',  'label': 'QbStrField(label) -> str',  'metadata': 'QbDictField(metadata) -> Dict[str, Any]',  'pk': 'QbNumericField(pk) -> int',  'scheduler_type': 'QbStrField(scheduler_type) -> str',  'transport_type': 'QbStrField(transport_type) -> str',  'uuid': 'QbStrField(uuid) -> str'}#
get_append_text() str[source]#
get_authinfo(user: User) AuthInfo[source]#

Return the aiida.orm.authinfo.AuthInfo instance for the given user on this computer, if the computer is configured for the given user.

Parameters:

user – a User instance.

Returns:

a AuthInfo instance

Raises:

aiida.common.NotExistent – if the computer is not configured for the given user.

get_configuration(user: User | None = None) Dict[str, Any][source]#

Get the configuration of computer for the given user as a dictionary

Parameters:

user – the user to to get the configuration for, otherwise default user

get_default_memory_per_machine() int | None[source]#

Return the default amount of memory (kB) per machine (node) for this computer, or None if it was not set.

get_default_mpiprocs_per_machine() int | None[source]#

Return the default number of CPUs per machine (node) for this computer, or None if it was not set.

get_minimum_job_poll_interval() float[source]#

Get the minimum interval between subsequent requests to poll the scheduler for job status.

Note

If no value was ever set for this computer it will fall back on the default provided by the associated transport class in the DEFAULT_MINIMUM_JOB_POLL_INTERVAL attribute. If the computer doesn’t have a transport class, or it cannot be loaded, or it doesn’t provide a job poll interval default, then this will fall back on the PROPERTY_MINIMUM_SCHEDULER_POLL_INTERVAL__DEFAULT attribute of this class.

Returns:

The minimum interval (in seconds).

get_mpirun_command() List[str][source]#

Return the mpirun command. Must be a list of strings, that will be then joined with spaces when submitting.

I also provide a sensible default that may be ok in many cases.

get_prepend_text() str[source]#
get_property(name: str, *args: Any) Any[source]#

Get a property of this computer

Parameters:
  • name – the property name

  • args – additional arguments

Returns:

the property value

get_scheduler() Scheduler[source]#

Get a scheduler instance for this computer

get_shebang() str[source]#
get_transport(user: User | None = None) Transport[source]#

Return a Transport class, configured with all correct parameters. The Transport is closed (meaning that if you want to run any operation with it, you have to open it first (i.e., e.g. for a SSH transport, you have to open a connection). To do this you can call transports.open(), or simply run within a with statement:

transport = Computer.get_transport()
with transport:
    print(transports.whoami())
Parameters:

user – if None, try to obtain a transport for the default user. Otherwise, pass a valid User.

Returns:

a (closed) Transport, already configured with the connection parameters to the supercomputer, as configured with verdi computer configure for the user specified as a parameter user.

get_transport_class() Type[Transport][source]#

Get the transport class for this computer. Can be used to instantiate a transport instance.

get_use_double_quotes() bool[source]#

Return whether the command line parameters of this computer should be escaped with double quotes.

Returns:

True if to escape with double quotes, False otherwise which is also the default.

get_workdir() str[source]#

Get the working directory for this computer :return: The currently configured working directory

property hostname: str#

Return the computer hostname.

Returns:

the hostname.

property is_configured: bool#

Return whether the computer is configured for the current default user.

Returns:

Boolean, True if the computer is configured for the current default user, False otherwise.

is_user_configured(user: User) bool[source]#

Is the user configured on this computer?

Parameters:

user – the user to check

Returns:

True if configured, False otherwise

is_user_enabled(user: User) bool[source]#

Is the given user enabled to run on this computer?

Parameters:

user – the user to check

Returns:

True if enabled, False otherwise

property label: str#

Return the computer label.

Returns:

the label.

property logger: Logger#
property metadata: Dict[str, Any]#

Return the computer metadata.

Returns:

the metadata.

property scheduler_type: str#

Return the computer scheduler type.

Returns:

the scheduler type.

set_append_text(val: str) None[source]#
set_default_memory_per_machine(def_memory_per_machine: int | None) None[source]#

Set the default amount of memory (kB) per machine (node) for this computer. Accepts None if you do not want to set this value.

set_default_mpiprocs_per_machine(def_cpus_per_machine: int | None) None[source]#

Set the default number of CPUs per machine (node) for this computer. Accepts None if you do not want to set this value.

set_minimum_job_poll_interval(interval: float) None[source]#

Set the minimum interval between subsequent requests to update the list of jobs currently running on this computer.

Parameters:

interval – The minimum interval in seconds

set_mpirun_command(val: List[str] | Tuple[str, ...]) None[source]#

Set the mpirun command. It must be a list of strings (you can use string.split() if you have a single, space-separated string).

set_prepend_text(val: str) None[source]#
set_property(name: str, value: Any) None[source]#

Set a property on this computer

Parameters:
  • name – the property name

  • value – the new value

set_shebang(val: str) None[source]#
Parameters:

val (str) – A valid shebang line

set_use_double_quotes(val: bool) None[source]#

Set whether the command line parameters of this computer should be escaped with double quotes.

Parameters:

use_double_quotes – True if to escape with double quotes, False otherwise.

set_workdir(val: str) None[source]#
store() Computer[source]#

Store the computer in the DB.

Differently from Nodes, a computer can be re-stored if its properties are to be changed (e.g. a new mpirun command, etc.)

property transport_type: str#

Return the computer transport type.

Returns:

the transport_type.

property uuid: str#

Return the UUID for this computer.

This identifier is unique across all entities types and backend instances.

Returns:

the entity uuid

validate() None[source]#

Check if the attributes and files retrieved from the DB are valid. Raise a ValidationError if something is wrong.

Must be able to work even before storing: therefore, use the get_attr and similar methods that automatically read either from the DB or from the internal attribute cache.

For the base class, this is always valid. Subclasses will reimplement this. In the subclass, always call the super().validate() method first!

class aiida.orm.computers.ComputerCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#

Bases: Collection[Computer]

The collection of Computer entries.

__abstractmethods__ = frozenset({})#
__module__ = 'aiida.orm.computers'#
__orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('Computer')],)#
__parameters__ = ()#
_abc_impl = <_abc._abc_data object>#
static _entity_base_cls() Type[Computer][source]#

The allowed entity class or subclasses thereof.

delete(pk: int) None[source]#

Delete the computer with the given id

get_or_create(label: str | None = None, **kwargs) Tuple[bool, Computer][source]#

Try to retrieve a Computer from the DB with the given arguments; create (and store) a new Computer if such a Computer was not present yet.

Parameters:

label – computer label

Returns:

(computer, created) where computer is the computer (new or existing, in any case already stored) and created is a boolean saying

list_labels() List[str][source]#

Return a list with all the labels of the computers in the DB.

Module for converting backend entities into frontend, ORM, entities

class aiida.orm.convert.ConvertIterator(backend_iterator)[source]#

Bases: Iterator, Sized

Iterator that converts backend entities into frontend ORM entities as needed

See aiida.orm.Group.nodes() for an example.

__abstractmethods__ = frozenset({})#
__dict__ = mappingproxy({'__module__': 'aiida.orm.convert', '__doc__': 'Iterator that converts backend entities into frontend ORM entities as needed\n\n    See :func:`aiida.orm.Group.nodes` for an example.\n    ', '__init__': <function ConvertIterator.__init__>, '_genfunction': <function ConvertIterator._genfunction>, '__iter__': <function ConvertIterator.__iter__>, '__len__': <function ConvertIterator.__len__>, '__getitem__': <function ConvertIterator.__getitem__>, '__next__': <function ConvertIterator.__next__>, '__dict__': <attribute '__dict__' of 'ConvertIterator' objects>, '__weakref__': <attribute '__weakref__' of 'ConvertIterator' objects>, '__abstractmethods__': frozenset(), '_abc_impl': <_abc._abc_data object>, '__annotations__': {}})#
__getitem__(value)[source]#
__init__(backend_iterator)[source]#
__iter__()[source]#
__len__()[source]#
__module__ = 'aiida.orm.convert'#
__next__()[source]#

Return the next item from the iterator. When exhausted, raise StopIteration

__weakref__#

list of weak references to the object

_abc_impl = <_abc._abc_data object>#
_genfunction()[source]#
aiida.orm.convert._(backend_entity)[source]#
aiida.orm.convert.get_orm_entity(backend_entity)[source]#
aiida.orm.convert.get_orm_entity(backend_entity: Mapping)
aiida.orm.convert.get_orm_entity(backend_entity: tuple)
aiida.orm.convert.get_orm_entity(backend_entity: list)
aiida.orm.convert.get_orm_entity(backend_entity: BackendGroup)
aiida.orm.convert.get_orm_entity(backend_entity: BackendComputer)
aiida.orm.convert.get_orm_entity(backend_entity: BackendUser)
aiida.orm.convert.get_orm_entity(backend_entity: BackendAuthInfo)
aiida.orm.convert.get_orm_entity(backend_entity: BackendLog)
aiida.orm.convert.get_orm_entity(backend_entity: BackendComment)
aiida.orm.convert.get_orm_entity(backend_entity: BackendNode)

Module for all common top level AiiDA entity classes and methods

class aiida.orm.entities.Collection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#

Bases: ABC, Generic[EntityType]

Container class that represents the collection of objects of a particular entity type.

__abstractmethods__ = frozenset({'_entity_base_cls'})#
__call__(backend: StorageBackend) CollectionType[source]#

Get or create a cached collection using a new backend.

__dict__ = mappingproxy({'__module__': 'aiida.orm.entities', '__doc__': 'Container class that represents the collection of objects of a particular entity type.', '_entity_base_cls': <staticmethod(<function Collection._entity_base_cls>)>, 'get_cached': <classmethod(<functools._lru_cache_wrapper object>)>, '__init__': <function Collection.__init__>, '__call__': <function Collection.__call__>, 'entity_type': <property object>, 'backend': <property object>, 'query': <function Collection.query>, 'get': <function Collection.get>, 'find': <function Collection.find>, 'all': <function Collection.all>, 'count': <function Collection.count>, '__orig_bases__': (<class 'abc.ABC'>, typing.Generic[~EntityType]), '__dict__': <attribute '__dict__' of 'Collection' objects>, '__weakref__': <attribute '__weakref__' of 'Collection' objects>, '__parameters__': (~EntityType,), '__abstractmethods__': frozenset({'_entity_base_cls'}), '_abc_impl': <_abc._abc_data object>, '__annotations__': {}})#
__init__(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None) None[source]#

Construct a new entity collection.

Parameters:
  • entity_class – the entity type e.g. User, Computer, etc

  • backend – the backend instance to get the collection for, or use the default

__module__ = 'aiida.orm.entities'#
__orig_bases__ = (<class 'abc.ABC'>, typing.Generic[~EntityType])#
__parameters__ = (~EntityType,)#
__weakref__#

list of weak references to the object

_abc_impl = <_abc._abc_data object>#
abstract static _entity_base_cls() Type[EntityType][source]#

The allowed entity class or subclasses thereof.

all() List[EntityType][source]#

Get all entities in this collection.

Returns:

A list of all entities

property backend: StorageBackend#

Return the backend.

count(filters: 'FilterType' | None = None) int[source]#

Count entities in this collection according to criteria.

Parameters:

filters – the keyword value pair filters to match

Returns:

The number of entities found using the supplied criteria

property entity_type: Type[EntityType]#

The entity type for this instance.

find(filters: 'FilterType' | None = None, order_by: 'OrderByType' | None = None, limit: int | None = None) List[EntityType][source]#

Find collection entries matching the filter criteria.

Parameters:
  • filters – the keyword value pair filters to match

  • order_by – a list of (key, direction) pairs specifying the sort order

  • limit – the maximum number of results to return

Returns:

a list of resulting matches

get(**filters: Any) EntityType[source]#

Get a single collection entry that matches the filter criteria.

Parameters:

filters – the filters identifying the object to get

Returns:

the entry

classmethod get_cached(entity_class: Type[EntityType], backend: StorageBackend)[source]#

Get the cached collection instance for the given entity class and backend.

Parameters:

backend – the backend instance to get the collection for

query(filters: 'FilterType' | None = None, order_by: 'OrderByType' | None = None, project: list[str] | str | None = None, limit: int | None = None, offset: int | None = None, subclassing: bool = True) QueryBuilder[source]#

Get a query builder for the objects of this collection.

Parameters:
  • filters – the keyword value pair filters to match

  • order_by – a list of (key, direction) pairs specifying the sort order

  • project – Optional projections.

  • limit – the maximum number of results to return

  • offset – number of initial results to be skipped

  • subclassing – whether to match subclasses of the type as well.

class aiida.orm.entities.Entity(backend_entity: BackendEntityType)[source]#

Bases: ABC, Generic[BackendEntityType, CollectionType]

An AiiDA entity

_CLS_COLLECTION#

alias of Collection

__abstractmethods__ = frozenset({})#
__annotations__ = {'_CLS_COLLECTION': 'Type[CollectionType]', '__qb_fields__': 'Sequence[QbField]', 'fields': 'QbFields'}#
__dict__ = mappingproxy({'__module__': 'aiida.orm.entities', '__annotations__': {'_CLS_COLLECTION': 'Type[CollectionType]', 'fields': 'QbFields', '__qb_fields__': 'Sequence[QbField]'}, '__doc__': 'An AiiDA entity', '_CLS_COLLECTION': <class 'aiida.orm.entities.Collection'>, 'fields': {'pk': 'QbNumericField(pk) -> int'}, '__qb_fields__': [QbNumericField('pk', dtype=int, is_attribute=False)], 'objects': <aiida.common.lang.classproperty object>, 'collection': <aiida.common.lang.classproperty object>, 'get_collection': <classmethod(<function Entity.get_collection>)>, 'get': <classmethod(<function Entity.get>)>, '__init__': <function Entity.__init__>, '__getstate__': <function Entity.__getstate__>, 'initialize': <function super_check.<locals>.wrapper>, 'id': <property object>, 'pk': <property object>, 'store': <function Entity.store>, 'is_stored': <property object>, 'backend': <property object>, 'backend_entity': <property object>, '__orig_bases__': (<class 'abc.ABC'>, typing.Generic[~BackendEntityType, ~CollectionType]), '__dict__': <attribute '__dict__' of 'Entity' objects>, '__weakref__': <attribute '__weakref__' of 'Entity' objects>, '__parameters__': (~BackendEntityType, ~CollectionType), '__abstractmethods__': frozenset(), '_abc_impl': <_abc._abc_data object>})#
__getstate__()[source]#

Prevent an ORM entity instance from being pickled.

__init__(backend_entity: BackendEntityType) None[source]#
Parameters:

backend_entity – the backend model supporting this entity

__module__ = 'aiida.orm.entities'#
__orig_bases__ = (<class 'abc.ABC'>, typing.Generic[~BackendEntityType, ~CollectionType])#
__parameters__ = (~BackendEntityType, ~CollectionType)#
__qb_fields__: Sequence[QbField] = [QbNumericField('pk', dtype=int, is_attribute=False)]#
__weakref__#

list of weak references to the object

_abc_impl = <_abc._abc_data object>#
property backend: StorageBackend#

Get the backend for this entity

property backend_entity: BackendEntityType#

Get the implementing class for this object

collection#

A class that, when used as a decorator, works as if the two decorators @property and @classmethod where applied together (i.e., the object works as a property, both for the Class and for any of its instance; and is called with the class cls rather than with the instance as its first argument).

fields: QbFields = {'pk': 'QbNumericField(pk) -> int'}#
classmethod get(**kwargs)[source]#

Get an entity of the collection matching the given filters.

classmethod get_collection(backend: StorageBackend)[source]#

Get a collection for objects of this type for a given backend.

Note

Use the collection class property instead if the currently loaded backend or backend of the default profile should be used.

Parameters:

backend – The backend of the collection to use.

Returns:

A collection object that can be used to access entities of this type.

property id: int | None#

Return the id for this entity.

This identifier is guaranteed to be unique amongst entities of the same type for a single backend instance.

Returns:

the entity’s id

initialize(*args: Any, **kwargs: Any) None#
property is_stored: bool#

Return whether the entity is stored.

objects#

A class that, when used as a decorator, works as if the two decorators @property and @classmethod where applied together (i.e., the object works as a property, both for the Class and for any of its instance; and is called with the class cls rather than with the instance as its first argument).

property pk: int | None#

Return the primary key for this entity.

This identifier is guaranteed to be unique amongst entities of the same type for a single backend instance.

Returns:

the entity’s principal key

store() EntityType[source]#

Store the entity.

class aiida.orm.entities.EntityTypes(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)[source]#

Bases: Enum

Enum for referring to ORM entities in a backend-agnostic manner.

AUTHINFO = 'authinfo'#
COMMENT = 'comment'#
COMPUTER = 'computer'#
GROUP = 'group'#
GROUP_NODE = 'group_node'#
LOG = 'log'#
NODE = 'node'#
USER = 'user'#
__module__ = 'aiida.orm.entities'#
aiida.orm.entities.from_backend_entity(cls: Type[EntityType], backend_entity: BackendEntityType) EntityType[source]#

Construct an entity from a backend entity instance

Parameters:

backend_entity – the backend entity

Returns:

an AiiDA entity instance

Interface to the extras of a node instance.

class aiida.orm.extras.EntityExtras(entity: Node | Group)[source]#

Bases: object

Interface to the extras of a node or group instance.

Extras are a JSONable dictionary, stored on each entity, allowing for arbitrary data to be stored by users.

Extras are mutable, even after storing the entity, and as such are not deemed a core part of the provenance graph.

__contains__(key: str) bool[source]#

Check if the extras contain the given key.

__dict__ = mappingproxy({'__module__': 'aiida.orm.extras', '__doc__': 'Interface to the extras of a node or group instance.\n\n    Extras are a JSONable dictionary, stored on each entity,\n    allowing for arbitrary data to be stored by users.\n\n    Extras are mutable, even after storing the entity,\n    and as such are not deemed a core part of the provenance graph.\n    ', '__init__': <function EntityExtras.__init__>, '__contains__': <function EntityExtras.__contains__>, 'all': <property object>, 'get': <function EntityExtras.get>, 'get_many': <function EntityExtras.get_many>, 'set': <function EntityExtras.set>, 'set_many': <function EntityExtras.set_many>, 'reset': <function EntityExtras.reset>, 'delete': <function EntityExtras.delete>, 'delete_many': <function EntityExtras.delete_many>, 'clear': <function EntityExtras.clear>, 'items': <function EntityExtras.items>, 'keys': <function EntityExtras.keys>, '__dict__': <attribute '__dict__' of 'EntityExtras' objects>, '__weakref__': <attribute '__weakref__' of 'EntityExtras' objects>, '__annotations__': {}})#
__init__(entity: Node | Group) None[source]#

Initialize the interface.

__module__ = 'aiida.orm.extras'#
__weakref__#

list of weak references to the object

property all: Dict[str, Any]#

Return the complete extras dictionary.

Warning

While the entity is unstored, this will return references of the extras on the database model, meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will automatically be reflected on the database model as well. As soon as the entity is stored, the returned extras will be a deep copy and mutations of the database extras will have to go through the appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys or some values, use the iterators extras_keys and extras_items, or the getters get_extra and get_extra_many instead.

Returns:

the extras as a dictionary

clear() None[source]#

Delete all extras.

delete(key: str) None[source]#

Delete an extra.

Parameters:

key – name of the extra

Raises:

AttributeError – if the extra does not exist

delete_many(keys: List[str]) None[source]#

Delete multiple extras.

Parameters:

keys – names of the extras to delete

Raises:

AttributeError – if at least one of the extra does not exist

get(key: str, default: Any = ()) Any[source]#

Return the value of an extra.

Warning

While the entity is unstored, this will return a reference of the extra on the database model, meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will automatically be reflected on the database model as well. As soon as the entity is stored, the returned extra will be a deep copy and mutations of the database extras will have to go through the appropriate set methods.

Parameters:
  • key – name of the extra

  • default – return this value instead of raising if the attribute does not exist

Returns:

the value of the extra

Raises:

AttributeError – if the extra does not exist and no default is specified

get_many(keys: List[str]) List[Any][source]#

Return the values of multiple extras.

Warning

While the entity is unstored, this will return references of the extras on the database model, meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will automatically be reflected on the database model as well. As soon as the entity is stored, the returned extras will be a deep copy and mutations of the database extras will have to go through the appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys or some values, use the iterators extras_keys and extras_items, or the getters get_extra and get_extra_many instead.

Parameters:

keys – a list of extra names

Returns:

a list of extra values

Raises:

AttributeError – if at least one extra does not exist

items() Iterable[Tuple[str, Any]][source]#

Return an iterator over the extras.

Returns:

an iterator with extra key value pairs

keys() Iterable[str][source]#

Return an iterator over the extra keys.

Returns:

an iterator with extra keys

reset(extras: Dict[str, Any]) None[source]#

Reset the extras.

Note

This will completely clear any existing extras and replace them with the new dictionary.

Parameters:

extras – a dictionary with the extras to set

Raises:

aiida.common.ValidationError – if any of the keys are invalid, i.e. contain periods

set(key: str, value: Any) None[source]#

Set an extra to the given value.

Parameters:
  • key – name of the extra

  • value – value of the extra

Raises:

aiida.common.ValidationError – if the key is invalid, i.e. contains periods

set_many(extras: Dict[str, Any]) None[source]#

Set multiple extras.

Note

This will override any existing extras that are present in the new dictionary.

Parameters:

extras – a dictionary with the extras to set

Raises:

aiida.common.ValidationError – if any of the keys are invalid, i.e. contain periods

Module which provides decorators for AiiDA ORM entity -> DB field mappings.

class aiida.orm.fields.EntityFieldMeta(name, bases, namespace, /, **kwargs)[source]#

Bases: ABCMeta

A metaclass for entity fields, which adds a fields class attribute.

__annotations__ = {}#
__init__(name, bases, classdict)[source]#
__module__ = 'aiida.orm.fields'#
class aiida.orm.fields.QbArrayField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#

Bases: QbField

An array (list) flavor of QbField.

__dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'An array (`list`) flavor of `QbField`.', 'contains': <function QbArrayField.contains>, 'of_length': <function QbArrayField.of_length>, 'longer': <function QbArrayField.longer>, 'shorter': <function QbArrayField.shorter>, '__dict__': <attribute '__dict__' of 'QbArrayField' objects>, '__weakref__': <attribute '__weakref__' of 'QbArrayField' objects>, '__annotations__': {}})#
__module__ = 'aiida.orm.fields'#
__weakref__#

list of weak references to the object

_backend_key#
_doc#
_dtype#
_is_attribute#
_is_subscriptable#
_key#
contains(value)[source]#

Return a filter for only values containing these items

longer(value: int)[source]#

Return a filter for only array values longer than this length.

of_length(value: int)[source]#

Return a filter for only array values of this length.

shorter(value: int)[source]#

Return a filter for only array values shorter than this length.

class aiida.orm.fields.QbAttrField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#

Bases: QbNumericField, QbArrayField, QbStrField, QbDictField

A generic flavor of QbField covering all operations.

__annotations__ = {}#
__module__ = 'aiida.orm.fields'#
_backend_key#
_doc#
_dtype#
_is_attribute#
_is_subscriptable#
_key#
of_type(value)[source]#

Return a filter for only values of this type.

class aiida.orm.fields.QbDictField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#

Bases: QbField

A dictionary (dict) flavor of QbField.

__annotations__ = {}#
__dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'A dictionary (`dict`) flavor of `QbField`.', 'has_key': <function QbDictField.has_key>, '__getitem__': <function QbDictField.__getitem__>, '__dict__': <attribute '__dict__' of 'QbDictField' objects>, '__weakref__': <attribute '__weakref__' of 'QbDictField' objects>, '__annotations__': {}})#
__getitem__(key: str) QbAttrField[source]#

Return a new QbField with a nested key.

__module__ = 'aiida.orm.fields'#
__weakref__#

list of weak references to the object

_backend_key#
_doc#
_dtype#
_is_attribute#
_is_subscriptable#
_key#
has_key(value)[source]#

Return a filter for only values with these keys

class aiida.orm.fields.QbField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#

Bases: object

A field of an ORM entity, accessible via the QueryBuilder

__annotations__ = {}#
__eq__(value)[source]#

Return self==value.

__hash__()[source]#

Return hash(self).

__init__(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False) None[source]#

Initialise a ORM entity field, accessible via the QueryBuilder

Parameters:
  • key – The key of the field on the ORM entity

  • alias – The alias in the storage backend for the key, if not equal to key

  • dtype – The data type of the field. If None, the field is of variable type.

  • doc – A docstring for the field

  • is_attribute – If True, the backend_key property will prepend “attributes.” to field name

  • is_subscriptable – If True, a new field can be created by field["subkey"]

__module__ = 'aiida.orm.fields'#
__ne__(value)[source]#

Return self!=value.

__repr__() str[source]#

Return repr(self).

__slots__ = ('_key', '_backend_key', '_doc', '_dtype', '_is_attribute', '_is_subscriptable')#
__str__() str[source]#

Return str(self).

_backend_key#
_doc#
_dtype#
_get_dtype_as_str() str[source]#

Return field type as processed string.

>>> None -> ?
>>> str -> str
>>> typing.Optional[str] -> Optional[str]
>>> typing.Dict[typing.List[str]] -> Dict[List[str]]
_is_attribute#
_is_subscriptable#
_key#
property annotation: Any | None#

Return the full type annotation.

property backend_key: str#
property doc: str#
property dtype: Any | None#

Return the primitive root type.

in_(value: Iterable[Any])[source]#

Return a filter for only values in the list

property is_attribute: bool#
property is_subscriptable: bool#
property key: str#
class aiida.orm.fields.QbFieldArguments[source]#

Bases: TypedDict

__annotations__ = {'alias': typing.Optional[str], 'doc': <class 'str'>, 'dtype': typing.Optional[typing.Any], 'is_attribute': <class 'bool'>, 'is_subscriptable': <class 'bool'>, 'key': <class 'str'>}#
__dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__annotations__': {'key': <class 'str'>, 'alias': typing.Optional[str], 'dtype': typing.Optional[typing.Any], 'doc': <class 'str'>, 'is_attribute': <class 'bool'>, 'is_subscriptable': <class 'bool'>}, '__orig_bases__': (<function TypedDict>,), '__dict__': <attribute '__dict__' of 'QbFieldArguments' objects>, '__weakref__': <attribute '__weakref__' of 'QbFieldArguments' objects>, '__doc__': None, '__required_keys__': frozenset({'is_subscriptable', 'dtype', 'doc', 'key', 'alias', 'is_attribute'}), '__optional_keys__': frozenset(), '__total__': True})#
__module__ = 'aiida.orm.fields'#
__optional_keys__ = frozenset({})#
__orig_bases__ = (<function TypedDict>,)#
__required_keys__ = frozenset({'alias', 'doc', 'dtype', 'is_attribute', 'is_subscriptable', 'key'})#
__total__ = True#
__weakref__#

list of weak references to the object

alias: str | None#
doc: str#
dtype: Any | None#
is_attribute: bool#
is_subscriptable: bool#
key: str#
class aiida.orm.fields.QbFieldFilters(filters: Sequence[Tuple[QbField, str, Any]] | dict)[source]#

Bases: object

An representation of a list of fields and their comparators.

_(filters)[source]#
__and__(other: QbFieldFilters) QbFieldFilters[source]#

a & b -> {‘and’: [a.filters, b.filters]}.

__contains__(key: str) bool[source]#
__eq__(other: object) bool[source]#

a == b checks if a.filters == b.filters.

__getitem__(key: str) Any[source]#
__hash__ = None#
__init__(filters: Sequence[Tuple[QbField, str, Any]] | dict)[source]#
__invert__() QbFieldFilters[source]#

~(a > b) -> a !> b; ~(a !> b) -> a > b

__module__ = 'aiida.orm.fields'#
__or__(other: QbFieldFilters) QbFieldFilters[source]#

a | b -> {‘or’: [a.filters, b.filters]}.

__repr__() str[source]#

Return repr(self).

__slots__ = ('filters',)#
_resolve_redundancy(other: QbFieldFilters, logical: str) QbFieldFilters | None[source]#

Resolve redundant filters and nested logical operators.

add_filters(filters: dict)[source]#
add_filters(filters: tuple)
add_filters(filters: list)
as_dict() Dict[str, Any][source]#

Return the filters dictionary.

filters: Dict[str, Any]#
items()[source]#

Return an items view of the filters for use in the QueryBuilder.

class aiida.orm.fields.QbFields(fields: Dict[str, QbField] | None = None)[source]#

Bases: object

A readonly class for mapping attributes to database fields of an AiiDA entity.

__contains__(key: str) bool[source]#

Return if the field key exists

__dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'A readonly class for mapping attributes to database fields of an AiiDA entity.', '__isabstractmethod__': False, '__init__': <function QbFields.__init__>, '__repr__': <function QbFields.__repr__>, '__str__': <function QbFields.__str__>, '__getitem__': <function QbFields.__getitem__>, '__getattr__': <function QbFields.__getattr__>, '__contains__': <function QbFields.__contains__>, '__len__': <function QbFields.__len__>, '__iter__': <function QbFields.__iter__>, '__dir__': <function QbFields.__dir__>, '_dict': <property object>, '__dict__': <attribute '__dict__' of 'QbFields' objects>, '__weakref__': <attribute '__weakref__' of 'QbFields' objects>, '__annotations__': {}})#
__dir__()[source]#

Return keys for tab competion.

__getattr__(key: str) QbField[source]#

Return an QbField by key.

__getitem__(key: str) QbField[source]#

Return an QbField by key.

__init__(fields: Dict[str, QbField] | None = None)[source]#
__isabstractmethod__ = False#
__iter__()[source]#

Iterate through the field keys

__len__() int[source]#

Return the number of fields

__module__ = 'aiida.orm.fields'#
__repr__() str[source]#

Return repr(self).

__str__() str[source]#

Return str(self).

__weakref__#

list of weak references to the object

property _dict#

Return a copy of the internal mapping

class aiida.orm.fields.QbNumericField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#

Bases: QbField

A numeric (int, float, datetime) flavor of QbField.

__annotations__ = {}#
__dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'A numeric (`int`, `float`, `datetime`) flavor of `QbField`.', '__lt__': <function QbNumericField.__lt__>, '__le__': <function QbNumericField.__le__>, '__gt__': <function QbNumericField.__gt__>, '__ge__': <function QbNumericField.__ge__>, '__dict__': <attribute '__dict__' of 'QbNumericField' objects>, '__weakref__': <attribute '__weakref__' of 'QbNumericField' objects>, '__annotations__': {}})#
__ge__(value)[source]#

Return self>=value.

__gt__(value)[source]#

Return self>value.

__le__(value)[source]#

Return self<=value.

__lt__(value)[source]#

Return self<value.

__module__ = 'aiida.orm.fields'#
__weakref__#

list of weak references to the object

_backend_key#
_doc#
_dtype#
_is_attribute#
_is_subscriptable#
_key#
class aiida.orm.fields.QbStrField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#

Bases: QbField

A string (str) flavor of QbField.

__annotations__ = {}#
__dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'A string (`str`) flavor of `QbField`.', 'like': <function QbStrField.like>, 'ilike': <function QbStrField.ilike>, '__dict__': <attribute '__dict__' of 'QbStrField' objects>, '__weakref__': <attribute '__weakref__' of 'QbStrField' objects>, '__annotations__': {}})#
__module__ = 'aiida.orm.fields'#
__weakref__#

list of weak references to the object

_backend_key#
_doc#
_dtype#
_is_attribute#
_is_subscriptable#
_key#
ilike(value: str)[source]#

Return a filter for only string values matching the (case-insensitive) wildcard string.

  • The percent sign (%) represents zero, one, or multiple characters

  • The underscore sign (_) represents one, single character

like(value: str)[source]#

Return a filter for only string values matching the wildcard string.

  • The percent sign (%) represents zero, one, or multiple characters

  • The underscore sign (_) represents one, single character

aiida.orm.fields.add_field(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False) QbField[source]#

Add a dtype-dependent QbField representation of a field.

Parameters:
  • key – The key of the field on the ORM entity

  • alias – The alias in the storage backend for the key, if not equal to key

  • dtype – The data type of the field. If None, the field is of variable type.

  • doc – A docstring for the field

  • is_attribute – If True, the backend_key property will prepend “attributes.” to field name

  • is_subscriptable – If True, a new field can be created by field["subkey"]

aiida.orm.fields.extract_root_type(dtype: Any) Any[source]#

Recursively search for the primitive root type.

>>> extract_root_type(List[str]) -> list
>>> extract_root_type(Optional[List[str]]) -> list

AiiDA Group entites

class aiida.orm.groups.AutoGroup(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#

Bases: Group

Group to be used to contain selected nodes generated, whilst autogrouping is enabled.

__abstractmethods__ = frozenset({})#
__module__ = 'aiida.orm.groups'#
__parameters__ = ()#
_abc_impl = <_abc._abc_data object>#
fields: QbFields = {'description': 'QbStrField(description) -> str',  'extras': 'QbDictField(extras.*) -> Dict[str, Any]',  'label': 'QbStrField(label) -> str',  'pk': 'QbNumericField(pk) -> int',  'time': 'QbStrField(time) -> str',  'type_string': 'QbStrField(type_string) -> str',  'user_pk': 'QbNumericField(user_pk) -> int',  'uuid': 'QbStrField(uuid) -> str'}#
class aiida.orm.groups.Group(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#

Bases: Entity[BackendGroup, GroupCollection]

An AiiDA ORM implementation of group of nodes.

_CLS_COLLECTION#

alias of GroupCollection

__abstractmethods__ = frozenset({})#
__annotations__ = {'_CLS_COLLECTION': 'Type[CollectionType]', '_Group__type_string': typing.ClassVar[typing.Optional[str]], '__qb_fields__': 'Sequence[QbField]', '__type_string': 'ClassVar[Optional[str]]', 'fields': 'QbFields'}#
__getattr__(name: str) Any[source]#

This method is called when an extras is not found in the instance.

It allows for the handling of deprecated mixin methods.

__init__(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#

Create a new group. Either pass a dbgroup parameter, to reload a group from the DB (and then, no further parameters are allowed), or pass the parameters for the Group creation.

Parameters:
  • label – The group label, required on creation

  • description – The group description (by default, an empty string)

  • user – The owner of the group (by default, the automatic user)

  • type_string – a string identifying the type of group (by default, an empty string, indicating an user-defined group.

__module__ = 'aiida.orm.groups'#
__orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendGroup'), aiida.orm.groups.GroupCollection],)#
__parameters__ = ()#
__qb_fields__: Sequence[QbField] = [QbStrField('uuid', dtype=str, is_attribute=False), QbStrField('type_string', dtype=str, is_attribute=False), QbStrField('label', dtype=str, is_attribute=False), QbStrField('description', dtype=str, is_attribute=False), QbStrField('time', dtype=str, is_attribute=False), QbDictField('extras', dtype=Dict[str, Any], is_attribute=False, is_subscriptable=True), QbNumericField('user_pk', dtype=int, is_attribute=False)]#
__repr__() str[source]#

Return repr(self).

__str__() str[source]#

Return str(self).

__type_string: ClassVar[str | None] = 'core'#
_abc_impl = <_abc._abc_data object>#
_deprecated_extra_methods = {'clear_extras': 'clear', 'delete_extra': 'delete', 'delete_extra_many': 'delete_many', 'extras': 'all', 'extras_items': 'items', 'extras_keys': 'keys', 'get_extra': 'get', 'get_extra_many': 'get_many', 'reset_extras': 'reset', 'set_extra': 'set', 'set_extra_many': 'set_many'}#
_type_string = 'core'#
add_nodes(nodes: Node | Sequence[Node]) None[source]#

Add a node or a set of nodes to the group.

Note:

all the nodes and the group itself have to be stored.

Parameters:

nodes – a single Node or a list of Nodes

property base: GroupBase#

Return the group base namespace.

clear() None[source]#

Remove all the nodes from this group.

count() int[source]#

Return the number of entities in this group.

Returns:

integer number of entities contained within the group

property description: str#
Returns:

the description of the group as a string

entry_point = EntryPoint(name='core', value='aiida.orm.groups:Group', group='aiida.groups')#
fields: QbFields = {'description': 'QbStrField(description) -> str',  'extras': 'QbDictField(extras.*) -> Dict[str, Any]',  'label': 'QbStrField(label) -> str',  'pk': 'QbNumericField(pk) -> int',  'time': 'QbStrField(time) -> str',  'type_string': 'QbStrField(type_string) -> str',  'user_pk': 'QbNumericField(user_pk) -> int',  'uuid': 'QbStrField(uuid) -> str'}#
property is_empty: bool#

Return whether the group is empty, i.e. it does not contain any nodes.

Returns:

True if it contains no nodes, False otherwise

is_user_defined() bool[source]#
Returns:

True if the group is user defined, False otherwise

property label: str#
Returns:

the label of the group as a string

property nodes: ConvertIterator#

Return a generator/iterator that iterates over all nodes and returns the respective AiiDA subclasses of Node, and also allows to ask for the number of nodes in the group using len().

remove_nodes(nodes: Node | Sequence[Node]) None[source]#

Remove a node or a set of nodes to the group.

Note:

all the nodes and the group itself have to be stored.

Parameters:

nodes – a single Node or a list of Nodes

store() SelfType[source]#

Verify that the group is allowed to be stored, which is the case along as type_string is set.

property type_string: str#
Returns:

the string defining the type of the group

property user: User#
Returns:

the user associated with this group

property uuid: str#

Return the UUID for this group.

This identifier is unique across all entities types and backend instances.

Returns:

the entity uuid

class aiida.orm.groups.GroupBase(group: Group)[source]#

Bases: object

A namespace for group related functionality, that is not directly related to its user-facing properties.

__dict__ = mappingproxy({'__module__': 'aiida.orm.groups', '__doc__': 'A namespace for group related functionality, that is not directly related to its user-facing properties.', '__init__': <function GroupBase.__init__>, 'extras': <functools.cached_property object>, '__dict__': <attribute '__dict__' of 'GroupBase' objects>, '__weakref__': <attribute '__weakref__' of 'GroupBase' objects>, '__annotations__': {'_group': "'Group'"}})#
__init__(group: Group) None[source]#

Construct a new instance of the base namespace.

__module__ = 'aiida.orm.groups'#
__weakref__#

list of weak references to the object

property extras: EntityExtras#

Return the extras of this group.

class aiida.orm.groups.GroupCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#

Bases: Collection[Group]

Collection of Groups

__abstractmethods__ = frozenset({})#
__module__ = 'aiida.orm.groups'#
__orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('Group')],)#
__parameters__ = ()#
_abc_impl = <_abc._abc_data object>#
static _entity_base_cls() Type[Group][source]#

The allowed entity class or subclasses thereof.

delete(pk: int) None[source]#

Delete a group

Parameters:

pk – the id of the group to delete

get_or_create(label: str | None = None, **kwargs) Tuple[Group, bool][source]#

Try to retrieve a group from the DB with the given arguments; create (and store) a new group if such a group was not present yet.

Parameters:

label – group label

Returns:

(group, created) where group is the group (new or existing, in any case already stored) and created is a boolean saying

class aiida.orm.groups.ImportGroup(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#

Bases: Group

Group to be used to contain all nodes from an export archive that has been imported.

__abstractmethods__ = frozenset({})#
__module__ = 'aiida.orm.groups'#
__parameters__ = ()#
__type_string: ClassVar[str | None]#
_abc_impl = <_abc._abc_data object>#
fields: QbFields = {'description': 'QbStrField(description) -> str',  'extras': 'QbDictField(extras.*) -> Dict[str, Any]',  'label': 'QbStrField(label) -> str',  'pk': 'QbNumericField(pk) -> int',  'time': 'QbStrField(time) -> str',  'type_string': 'QbStrField(type_string) -> str',  'user_pk': 'QbNumericField(user_pk) -> int',  'uuid': 'QbStrField(uuid) -> str'}#
class aiida.orm.groups.UpfFamily(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#

Bases: Group

Group that represents a pseudo potential family containing UpfData nodes.

__abstractmethods__ = frozenset({})#
__module__ = 'aiida.orm.groups'#
__parameters__ = ()#
__type_string: ClassVar[str | None]#
_abc_impl = <_abc._abc_data object>#
fields: QbFields = {'description': 'QbStrField(description) -> str',  'extras': 'QbDictField(extras.*) -> Dict[str, Any]',  'label': 'QbStrField(label) -> str',  'pk': 'QbNumericField(pk) -> int',  'time': 'QbStrField(time) -> str',  'type_string': 'QbStrField(type_string) -> str',  'user_pk': 'QbNumericField(user_pk) -> int',  'uuid': 'QbStrField(uuid) -> str'}#
aiida.orm.groups.load_group_class(type_string: str) Type[Group][source]#

Load the sub class of Group that corresponds to the given type_string.

Note

will fall back on aiida.orm.groups.Group if type_string cannot be resolved to loadable entry point.

Parameters:

type_string – the entry point name of the Group sub class

Returns:

sub class of Group registered through an entry point

Module for orm logging abstract classes

class aiida.orm.logs.Log(time: datetime, loggername: str, levelname: str, dbnode_id: int, message: str = '', metadata: Dict[str, Any] | None = None, backend: StorageBackend | None = None)[source]#

Bases: Entity[BackendLog, LogCollection]

An AiiDA Log entity. Corresponds to a logged message against a particular AiiDA node.

_CLS_COLLECTION#

alias of LogCollection

__abstractmethods__ = frozenset({})#
__init__(time: datetime, loggername: str, levelname: str, dbnode_id: int, message: str = '', metadata: Dict[str, Any] | None = None, backend: StorageBackend | None = None)[source]#

Construct a new log

Parameters:
  • time – time

  • loggername – name of logger

  • levelname – name of log level

  • dbnode_id – id of database node

  • message – log message

  • metadata – metadata

  • backend – database backend

__module__ = 'aiida.orm.logs'#
__orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendLog'), aiida.orm.logs.LogCollection],)#
__parameters__ = ()#
__qb_fields__: Sequence[QbField] = [QbStrField('uuid', dtype=str, is_attribute=False), QbStrField('loggername', dtype=str, is_attribute=False), QbStrField('levelname', dtype=str, is_attribute=False), QbStrField('message', dtype=str, is_attribute=False), QbNumericField('time', dtype=datetime, is_attribute=False), QbDictField('metadata', dtype=Dict[str, Any], is_attribute=False), QbNumericField('node_pk', dtype=int, is_attribute=False)]#
_abc_impl = <_abc._abc_data object>#
property dbnode_id: int#

Get the id of the object that created the log entry

Returns:

The id of the object that created the log entry

fields: QbFields = {'levelname': 'QbStrField(levelname) -> str',  'loggername': 'QbStrField(loggername) -> str',  'message': 'QbStrField(message) -> str',  'metadata': 'QbDictField(metadata) -> Dict[str, Any]',  'node_pk': 'QbNumericField(node_pk) -> int',  'pk': 'QbNumericField(pk) -> int',  'time': 'QbNumericField(time) -> datetime',  'uuid': 'QbStrField(uuid) -> str'}#
property levelname: str#

The name of the log level

Returns:

The entry log level name

property loggername: str#

The name of the logger that created this entry

Returns:

The entry loggername

property message: str#

Get the message corresponding to the entry

Returns:

The entry message

property metadata: Dict[str, Any]#

Get the metadata corresponding to the entry

Returns:

The entry metadata

property time: datetime#

Get the time corresponding to the entry

Returns:

The entry timestamp

property uuid: str#

Return the UUID for this log.

This identifier is unique across all entities types and backend instances.

Returns:

the entity uuid

class aiida.orm.logs.LogCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#

Bases: Collection[Log]

This class represents the collection of logs and can be used to create and retrieve logs.

__abstractmethods__ = frozenset({})#
__module__ = 'aiida.orm.logs'#
__orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('Log')],)#
__parameters__ = ()#
_abc_impl = <_abc._abc_data object>#
static _entity_base_cls() Type[Log][source]#

The allowed entity class or subclasses thereof.

create_entry_from_record(record: LogRecord) Log | None[source]#

Helper function to create a log entry from a record created as by the python logging library

Parameters:

record – The record created by the logging module

Returns:

A stored log instance

delete(pk: int) None[source]#

Remove a Log entry from the collection with the given id

Parameters:

pk – id of the Log to delete

Raises:

NotExistent – if Log with ID pk is not found

delete_all() None[source]#

Delete all Logs in the collection

Raises:

IntegrityError – if all Logs could not be deleted

delete_many(filters: FilterType) List[int][source]#

Delete Logs based on filters

Parameters:

filters – filters to pass to the QueryBuilder

Returns:

(former) PK s of deleted Logs

Raises:
get_logs_for(entity: Node, order_by: OrderByType | None = None) List[Log][source]#

Get all the log messages for a given node and optionally sort

Parameters:
  • entity – the entity to get logs for

  • order_by – a list of (key, direction) pairs specifying the sort order

Returns:

the list of log entries

aiida.orm.logs.OrderSpecifier(field, direction)[source]#

The QueryBuilder: A class that allows you to query the AiiDA database, independent from backend. Note that the backend implementation is enforced and handled with a composition model! QueryBuilder() is the frontend class that the user can use. It inherits from object and contains backend-specific functionality. Backend specific functionality is provided by the implementation classes.

These inherit from aiida.orm.implementation.querybuilder.BackendQueryBuilder(), an interface classes which enforces the implementation of its defined methods. An instance of one of the implementation classes becomes a member of the QueryBuilder() instance when instantiated by the user.

class aiida.orm.querybuilder.Classifier(ormclass_type_string: str, process_type_string: str | None = None)[source]#

Bases: NamedTuple

A classifier for an entity.

__annotations__ = {'ormclass_type_string': ForwardRef('str'), 'process_type_string': ForwardRef('Optional[str]')}#
__getnewargs__()#

Return self as a plain tuple. Used by copy and pickle.

__match_args__ = ('ormclass_type_string', 'process_type_string')#
__module__ = 'aiida.orm.querybuilder'#
static __new__(_cls, ormclass_type_string: str, process_type_string: str | None = None)#

Create new instance of Classifier(ormclass_type_string, process_type_string)

__orig_bases__ = (<function NamedTuple>,)#
__repr__()#

Return a nicely formatted representation string

__slots__ = ()#
_asdict()#

Return a new dict which maps field names to their values.

_field_defaults = {'process_type_string': None}#
_fields = ('ormclass_type_string', 'process_type_string')#
classmethod _make(iterable)#

Make a new Classifier object from a sequence or iterable

_replace(**kwds)#

Return a new Classifier object replacing specified fields with new values

ormclass_type_string: str#

Alias for field number 0

process_type_string: str | None#

Alias for field number 1

class aiida.orm.querybuilder.QueryBuilder(backend: 'StorageBackend' | None = None, *, debug: bool | None = None, path: Sequence[str | Dict[str, Any] | EntityClsType] | None = (), filters: Dict[str, FilterType] | None = None, project: Dict[str, ProjectType] | None = None, limit: int | None = None, offset: int | None = None, order_by: OrderByType | None = None, distinct: bool = False, project_map: Dict[str, Dict[str, str]] | None = None)[source]#

Bases: object

The class to query the AiiDA database.

Usage:

from aiida.orm.querybuilder import QueryBuilder
qb = QueryBuilder()
# Querying nodes:
qb.append(Node)
# retrieving the results:
results = qb.all()
_EDGE_TAG_DELIM = '--'#
_VALID_PROJECTION_KEYS = ('func', 'cast')#
__deepcopy__(memo) QueryBuilder[source]#

Create deep copy of the instance.

__dict__ = mappingproxy({'__module__': 'aiida.orm.querybuilder', '__doc__': 'The class to query the AiiDA database.\n\n    Usage::\n\n        from aiida.orm.querybuilder import QueryBuilder\n        qb = QueryBuilder()\n        # Querying nodes:\n        qb.append(Node)\n        # retrieving the results:\n        results = qb.all()\n\n    ', '_EDGE_TAG_DELIM': '--', '_VALID_PROJECTION_KEYS': ('func', 'cast'), '__init__': <function QueryBuilder.__init__>, 'backend': <property object>, 'as_dict': <function QueryBuilder.as_dict>, 'queryhelp': <property object>, 'from_dict': <classmethod(<function QueryBuilder.from_dict>)>, '__repr__': <function QueryBuilder.__repr__>, '__str__': <function QueryBuilder.__str__>, '__deepcopy__': <function QueryBuilder.__deepcopy__>, 'get_used_tags': <function QueryBuilder.get_used_tags>, '_get_unique_tag': <function QueryBuilder._get_unique_tag>, 'append': <function QueryBuilder.append>, '_init_project_map': <function QueryBuilder._init_project_map>, 'order_by': <function QueryBuilder.order_by>, 'add_filter': <function QueryBuilder.add_filter>, '_process_filters': <staticmethod(<function QueryBuilder._process_filters>)>, '_add_node_type_filter': <function QueryBuilder._add_node_type_filter>, '_add_process_type_filter': <function QueryBuilder._add_process_type_filter>, '_add_group_type_filter': <function QueryBuilder._add_group_type_filter>, 'add_projection': <function QueryBuilder.add_projection>, 'set_debug': <function QueryBuilder.set_debug>, 'debug': <function QueryBuilder.debug>, 'limit': <function QueryBuilder.limit>, 'offset': <function QueryBuilder.offset>, 'distinct': <function QueryBuilder.distinct>, 'inputs': <function QueryBuilder.inputs>, 'outputs': <function QueryBuilder.outputs>, 'children': <function QueryBuilder.children>, 'parents': <function QueryBuilder.parents>, 'as_sql': <function QueryBuilder.as_sql>, 'analyze_query': <function QueryBuilder.analyze_query>, '_get_aiida_entity_res': <staticmethod(<function QueryBuilder._get_aiida_entity_res>)>, 'first': <function QueryBuilder.first>, 'count': <function QueryBuilder.count>, 'iterall': <function QueryBuilder.iterall>, 'iterdict': <function QueryBuilder.iterdict>, 'all': <function QueryBuilder.all>, 'one': <function QueryBuilder.one>, 'dict': <function QueryBuilder.dict>, '__dict__': <attribute '__dict__' of 'QueryBuilder' objects>, '__weakref__': <attribute '__weakref__' of 'QueryBuilder' objects>, '__annotations__': {'_impl': 'BackendQueryBuilder', '_path': 'List[PathItemType]', '_filters': 'Dict[str, Dict[str, Any]]', '_projections': 'Dict[str, List[Dict[str, Dict[str, Any]]]]', '_project_map': 'Dict[str, Dict[str, str]]', '_order_by': 'List[Dict[str, List[Dict[str, Dict[str, str]]]]]', '_limit': 'Optional[int]', '_offset': 'Optional[int]', '_distinct': 'bool'}})#
__init__(backend: 'StorageBackend' | None = None, *, debug: bool | None = None, path: Sequence[str | Dict[str, Any] | EntityClsType] | None = (), filters: Dict[str, FilterType] | None = None, project: Dict[str, ProjectType] | None = None, limit: int | None = None, offset: int | None = None, order_by: OrderByType | None = None, distinct: bool = False, project_map: Dict[str, Dict[str, str]] | None = None) None[source]#

Instantiates a QueryBuilder instance.

Which backend is used decided here based on backend-settings (taken from the user profile). This cannot be overridden so far by the user.

Parameters:
  • debug – Turn on debug mode. This feature prints information on the screen about the stages of the QueryBuilder. Does not affect results.

  • path – A list of the vertices to traverse. Leave empty if you plan on using the method QueryBuilder.append().

  • filters – The filters to apply. You can specify the filters here, when appending to the query using QueryBuilder.append() or even later using QueryBuilder.add_filter(). Check latter gives API-details.

  • project – The projections to apply. You can specify the projections here, when appending to the query using QueryBuilder.append() or even later using QueryBuilder.add_projection(). Latter gives you API-details.

  • limit – Limit the number of rows to this number. Check QueryBuilder.limit() for more information.

  • offset – Set an offset for the results returned. Details in QueryBuilder.offset().

  • order_by – How to order the results. As the 2 above, can be set also at later stage, check QueryBuilder.order_by() for more information.

  • distinct – Whether to return de-duplicated rows

  • project_map – A mapping of the projection input-keys to the output-keys of dict/iterdict

__module__ = 'aiida.orm.querybuilder'#
__repr__() str[source]#

Return an unambiguous string representation of the instance.

__str__() str[source]#

Return a readable string representation of the instance.

__weakref__#

list of weak references to the object

_add_group_type_filter(tagspec: str, classifiers: List[Classifier], subclassing: bool) None[source]#

Add a filter based on group type.

Parameters:
  • tagspec – The tag, which has to exist already as a key in self._filters

  • classifiers – a dictionary with classifiers

  • subclassing – if True, allow for subclasses of the ormclass

_add_node_type_filter(tagspec: str, classifiers: List[Classifier], subclassing: bool)[source]#

Add a filter based on node type.

Parameters:
  • tagspec – The tag, which has to exist already as a key in self._filters

  • classifiers – a dictionary with classifiers

  • subclassing – if True, allow for subclasses of the ormclass

_add_process_type_filter(tagspec: str, classifiers: List[Classifier], subclassing: bool) None[source]#

Add a filter based on process type.

Parameters:
  • tagspec – The tag, which has to exist already as a key in self._filters

  • classifiers – a dictionary with classifiers

  • subclassing – if True, allow for subclasses of the process type

Note: This function handles the case when process_type_string is None.

static _get_aiida_entity_res(value) Any[source]#

Convert a projected query result to front end class if it is an instance of a BackendEntity.

Values that are not an BackendEntity instance will be returned unaltered

Parameters:

value – a projected query result to convert

Returns:

the converted value

_get_unique_tag(classifiers: List[Classifier]) str[source]#

Using the function get_tag_from_type, I get a tag. I increment an index that is appended to that tag until I have an unused tag. This function is called in QueryBuilder.append() when no tag is given.

Parameters:

classifiers (dict) –

Classifiers, containing the string that defines the type of the AiiDA ORM class. For subclasses of Node, this is the Node._plugin_type_string, for other they are as defined as returned by QueryBuilder._get_ormclass().

Can also be a list of dictionaries, when multiple classes are passed to QueryBuilder.append

Returns:

A tag as a string (it is a single string also when passing multiple classes).

_init_project_map(project_map: Dict[str, Dict[str, str]]) None[source]#

Set the project map.

Note, this is a private method, since the user should not override what is set by projected QbFields.

Parameters:

project_map (dict) – The project map.

static _process_filters(filters: Dict[str, Any] | QbFieldFilters) Dict[str, Any][source]#

Process filters.

add_filter(tagspec: str | EntityClsType, filter_spec: FilterType) QueryBuilder[source]#

Adding a filter to my filters.

Parameters:
  • tagspec – A tag string or an ORM class which maps to an existing tag

  • filter_spec – The specifications for the filter, has to be a dictionary

Usage:

qb = QueryBuilder()         # Instantiating the QueryBuilder instance
qb.append(Node, tag='node') # Appending a Node
#let's put some filters:
qb.add_filter('node',{'id':{'>':12}})
# 2 filters together:
qb.add_filter('node',{'label':'foo', 'uuid':{'like':'ab%'}})
# Now I am overriding the first filter I set:
qb.add_filter('node',{'id':13})
add_projection(tag_spec: str | EntityClsType, projection_spec: ProjectType) None[source]#

Adds a projection

Parameters:
  • tag_spec – A tag string or an ORM class which maps to an existing tag

  • projection_spec – The specification for the projection. A projection is a list of dictionaries, with each dictionary containing key-value pairs where the key is database entity (e.g. a column / an attribute) and the value is (optional) additional information on how to process this database entity.

If the given projection_spec is not a list, it will be expanded to a list. If the listitems are not dictionaries, but strings (No additional processing of the projected results desired), they will be expanded to dictionaries.

Usage:

qb = QueryBuilder()
qb.append(StructureData, tag='struc')

# Will project the uuid and the kinds
qb.add_projection('struc', ['uuid', 'attributes.kinds'])

The above example will project the uuid and the kinds-attribute of all matching structures. There are 2 (so far) special keys.

The single star * will project the ORM-instance:

qb = QueryBuilder()
qb.append(StructureData, tag='struc')
# Will project the ORM instance
qb.add_projection('struc', '*')
print type(qb.first()[0])
# >>> aiida.orm.nodes.data.structure.StructureData

The double star ** projects all possible projections of this entity:

QueryBuilder().append(StructureData,tag=’s’, project=’**’).limit(1).dict()[0][‘s’].keys()

# >>> ‘user_id, description, ctime, label, extras, mtime, id, attributes, dbcomputer_id, type, uuid’

Be aware that the result of ** depends on the backend implementation.

all(batch_size: int | None = None, flat: bool = False) List[List[Any]] | List[Any][source]#

Executes the full query with the order of the rows as returned by the backend.

The order inside each row is given by the order of the vertices in the path and the order of the projections for each vertex in the path.

Parameters:
  • batch_size – the size of the batches to ask the backend to batch results in subcollections. You can optimize the speed of the query by tuning this parameter. Leave the default None if speed is not critical or if you don’t know what you’re doing.

  • flat – return the result as a flat list of projected entities without sub lists.

Returns:

a list of lists of all projected entities.

analyze_query(execute: bool = True, verbose: bool = False) str[source]#

Return the query plan, i.e. a list of SQL statements that will be executed.

See: https://www.postgresql.org/docs/11/sql-explain.html

Params execute:

Carry out the command and show actual run times and other statistics.

Params verbose:

Display additional information regarding the plan.

append(cls: EntityClsType | Sequence[EntityClsType] | None = None, entity_type: str | Sequence[str] | None = None, tag: str | None = None, filters: FilterType | None = None, project: ProjectType | None = None, subclassing: bool = True, edge_tag: str | None = None, edge_filters: FilterType | None = None, edge_project: ProjectType | None = None, outerjoin: bool = False, joining_keyword: str | None = None, joining_value: Any | None = None, orm_base: str | None = None, **kwargs: Any) QueryBuilder[source]#

Any iterative procedure to build the path for a graph query needs to invoke this method to append to the path.

Parameters:
  • cls

    The Aiida-class (or backend-class) defining the appended vertice. Also supports a tuple/list of classes. This results in an all instances of this class being accepted in a query. However the classes have to have the same orm-class for the joining to work. I.e. both have to subclasses of Node. Valid is:

    cls=(StructureData, Dict)
    

    This is invalid:

    cls=(Group, Node)

  • entity_type – The node type of the class, if cls is not given. Also here, a tuple or list is accepted.

  • tag – A unique tag. If none is given, I will create a unique tag myself.

  • filters – Filters to apply for this vertex. See add_filter(), the method invoked in the background, or usage examples for details.

  • project – Projections to apply. See usage examples for details. More information also in add_projection().

  • subclassing – Whether to include subclasses of the given class (default True). E.g. Specifying a ProcessNode as cls will include CalcJobNode, WorkChainNode, CalcFunctionNode, etc..

  • edge_tag – The tag that the edge will get. If nothing is specified (and there is a meaningful edge) the default is tag1–tag2 with tag1 being the entity joining from and tag2 being the entity joining to (this entity).

  • edge_filters – The filters to apply on the edge. Also here, details in add_filter().

  • edge_project – The project from the edges. API-details in add_projection().

  • outerjoin – If True, (default is False), will do a left outerjoin instead of an inner join

Joining can be specified in two ways:

  • Specifying the ‘joining_keyword’ and ‘joining_value’ arguments

  • Specify a single keyword argument

The joining keyword wil be with_* or direction, depending on the joining entity type. The joining value is the tag name or class of the entity to join to.

A small usage example how this can be invoked:

qb = QueryBuilder()             # Instantiating empty querybuilder instance
qb.append(cls=StructureData)    # First item is StructureData node
# The
# next node in the path is a PwCalculation, with
# the structure joined as an input
qb.append(
    cls=PwCalculation,
    with_incoming=StructureData
)
Returns:

self

as_dict(copy: bool = True) QueryDictType[source]#

Convert to a JSON serialisable dictionary representation of the query.

as_sql(inline: bool = False) str[source]#

Convert the query to an SQL string representation.

Warning

This method should be used for debugging purposes only, since normally sqlalchemy will handle this process internally.

Params inline:

Inline bound parameters (this is normally handled by the Python DB-API).

property backend: StorageBackend#

Return the backend used by the QueryBuilder.

children(**kwargs: Any) QueryBuilder[source]#

Join to children/descendants of previous vertice in path.

Returns:

self

count() int[source]#

Counts the number of rows returned by the backend.

Returns:

the number of rows as an integer

debug(msg: str, *objects: Any) None[source]#

Log debug message.

objects will passed to the format string, e.g. msg % objects

dict(batch_size: int | None = None) List[Dict[str, Dict[str, Any]]][source]#

Executes the full query with the order of the rows as returned by the backend. the order inside each row is given by the order of the vertices in the path and the order of the projections for each vertice in the path.

Parameters:

batch_size – The size of the batches to ask the backend to batch results in subcollections. You can optimize the speed of the query by tuning this parameter. Leave the default (None) if speed is not critical or if you don’t know what you’re doing!

Returns:

A list of dictionaries of all projected entities: tag -> field -> value

Usage:

qb = QueryBuilder()
qb.append(
    StructureData,
    tag='structure',
    filters={'uuid':{'==':myuuid}},
)
qb.append(
    Node,
    with_ancestors='structure',
    project=['entity_type', 'id'],  # returns entity_type (string) and id (string)
    tag='descendant'
)

# Return the dictionaries:
print "qb.iterdict()"
for d in qb.iterdict():
    print '>>>', d

results in the following output:

qb.iterdict()
>>> {'descendant': {
        'entity_type': 'calculation.job.quantumespresso.pw.PwCalculation.',
        'id': 7716}
    }
>>> {'descendant': {
        'entity_type': 'data.remote.RemoteData.',
        'id': 8510}
    }
distinct(value: bool = True) QueryBuilder[source]#

Asks for distinct rows, which is the same as asking the backend to remove duplicates. Does not execute the query!

If you want a distinct query:

qb = QueryBuilder()
# append stuff!
qb.append(...)
qb.append(...)
...
qb.distinct().all() #or
qb.distinct().dict()
Returns:

self

first(flat: Literal[False] = False) list[Any] | None[source]#
first(flat: Literal[True]) Any | None

Return the first result of the query.

Calling first results in an execution of the underlying query.

Note, this may change if several rows are valid for the query, as persistent ordering is not guaranteed unless explicitly specified.

Parameters:

flat – if True, return just the projected quantity if there is just a single projection.

Returns:

One row of results as a list, or None if no result returned.

classmethod from_dict(dct: Dict[str, Any]) QueryBuilder[source]#

Create an instance from a dictionary representation of the query.

get_used_tags(vertices: bool = True, edges: bool = True) List[str][source]#

Returns a list of all the vertices that are being used.

Parameters:
  • vertices – If True, adds the tags of vertices to the returned list

  • edges – If True, adds the tags of edges to the returnend list.

Returns:

A list of tags

inputs(**kwargs: Any) QueryBuilder[source]#

Join to inputs of previous vertice in path.

Returns:

self

iterall(batch_size: int | None = 100) Iterable[List[Any]][source]#

Same as all(), but returns a generator. Be aware that this is only safe if no commit will take place during this transaction. You might also want to read the SQLAlchemy documentation on https://docs.sqlalchemy.org/en/14/orm/query.html#sqlalchemy.orm.Query.yield_per

Parameters:

batch_size – The size of the batches to ask the backend to batch results in subcollections. You can optimize the speed of the query by tuning this parameter.

Returns:

a generator of lists

iterdict(batch_size: int | None = 100) Iterable[Dict[str, Dict[str, Any]]][source]#

Same as dict(), but returns a generator. Be aware that this is only safe if no commit will take place during this transaction. You might also want to read the SQLAlchemy documentation on https://docs.sqlalchemy.org/en/14/orm/query.html#sqlalchemy.orm.Query.yield_per

Parameters:

batch_size – The size of the batches to ask the backend to batch results in subcollections. You can optimize the speed of the query by tuning this parameter.

Returns:

a generator of dictionaries

limit(limit: int | None) QueryBuilder[source]#

Set the limit (nr of rows to return)

Parameters:

limit – integers of number of rows of rows to return

offset(offset: int | None) QueryBuilder[source]#

Set the offset. If offset is set, that many rows are skipped before returning. offset = 0 is the same as omitting setting the offset. If both offset and limit appear, then offset rows are skipped before starting to count the limit rows that are returned.

Parameters:

offset – integers of nr of rows to skip

one() List[Any][source]#

Executes the query asking for exactly one results.

Will raise an exception if this is not the case:

Raises:

MultipleObjectsError if more then one row can be returned

Raises:

NotExistent if no result was found

order_by(order_by: dict | List[dict] | Tuple[dict, ...]) QueryBuilder[source]#

Set the entity to order by

Parameters:

order_by – This is a list of items, where each item is a dictionary specifies what to sort for an entity

In each dictionary in that list, keys represent valid tags of entities (tables), and values are list of columns.

Usage:

#Sorting by id (ascending):
qb = QueryBuilder()
qb.append(Node, tag='node')
qb.order_by({'node':['id']})

# or
#Sorting by id (ascending):
qb = QueryBuilder()
qb.append(Node, tag='node')
qb.order_by({'node':[{'id':{'order':'asc'}}]})

# for descending order:
qb = QueryBuilder()
qb.append(Node, tag='node')
qb.order_by({'node':[{'id':{'order':'desc'}}]})

# or (shorter)
qb = QueryBuilder()
qb.append(Node, tag='node')
qb.order_by({'node':[{'id':'desc'}]})
outputs(**kwargs: Any) QueryBuilder[source]#

Join to outputs of previous vertice in path.

Returns:

self

parents(**kwargs: Any) QueryBuilder[source]#

Join to parents/ancestors of previous vertice in path.

Returns:

self

property queryhelp: QueryDictType#

“Legacy name for as_dict method.

set_debug(debug: bool) QueryBuilder[source]#

Run in debug mode. This does not affect functionality, but prints intermediate stages when creating a query on screen.

Parameters:

debug – Turn debug on or off

class aiida.orm.querybuilder._QueryTagMap[source]#

Bases: object

Cache of tag mappings for a query.

__contains__(tag: str) bool[source]#
__dict__ = mappingproxy({'__module__': 'aiida.orm.querybuilder', '__doc__': 'Cache of tag mappings for a query.', '__init__': <function _QueryTagMap.__init__>, '__repr__': <function _QueryTagMap.__repr__>, '__contains__': <function _QueryTagMap.__contains__>, '__iter__': <function _QueryTagMap.__iter__>, 'add': <function _QueryTagMap.add>, 'remove': <function _QueryTagMap.remove>, 'get': <function _QueryTagMap.get>, '__dict__': <attribute '__dict__' of '_QueryTagMap' objects>, '__weakref__': <attribute '__weakref__' of '_QueryTagMap' objects>, '__annotations__': {'_tag_to_type': 'Dict[str, Union[None, EntityTypes]]', '_cls_to_tag_map': 'Dict[Any, Set[str]]'}})#
__init__()[source]#

Construct a new instance.

__iter__()[source]#
__module__ = 'aiida.orm.querybuilder'#
__repr__() str[source]#

Return repr(self).

__weakref__#

list of weak references to the object

add(tag: str, etype: None | EntityTypes = None, klasses: None | EntityClsType | Sequence[EntityClsType] = None) None[source]#

Add a tag.

get(tag_or_cls: str | EntityClsType) str[source]#

Return the tag or, given a class(es), map to a tag.

Raises:

ValueError – if the tag is not found, or the class(es) does not map to a single tag

remove(tag: str) None[source]#

Remove a tag.

aiida.orm.querybuilder._get_group_type_filter(classifiers: Classifier, subclassing: bool) dict[source]#

Return filter dictionaries for Group.type_string given a set of classifiers.

Parameters:
  • classifiers – a dictionary with classifiers (note: does not support lists)

  • subclassing – if True, allow for subclasses of the ormclass

Returns:

dictionary in QueryBuilder filter language to pass into {‘type_string’: … }

aiida.orm.querybuilder._get_node_type_filter(classifiers: Classifier, subclassing: bool) dict[source]#

Return filter dictionaries given a set of classifiers.

Parameters:
  • classifiers – a dictionary with classifiers (note: does not support lists)

  • subclassing – if True, allow for subclasses of the ormclass

Returns:

dictionary in QueryBuilder filter language to pass into {“type”: … }

aiida.orm.querybuilder._get_ormclass(cls: None | EntityClsType | Sequence[EntityClsType], entity_type: None | str | Sequence[str]) Tuple[EntityTypes, List[Classifier]][source]#

Get ORM classifiers from either class(es) or ormclass_type_string(s).

Parameters:
  • cls – a class or tuple/set/list of classes that are either AiiDA ORM classes or backend ORM classes.

  • ormclass_type_string – type string for ORM class

Returns:

the ORM class as well as a dictionary with additional classifier strings

Handles the case of lists as well.

aiida.orm.querybuilder._get_ormclass_from_cls(cls: EntityClsType) Tuple[EntityTypes, Classifier][source]#

Return the correct classifiers for the QueryBuilder from an ORM class.

Parameters:
  • cls – an AiiDA ORM class or backend ORM class.

  • query – an instance of the appropriate QueryBuilder backend.

Returns:

the ORM class as well as a dictionary with additional classifier strings

Note: the ormclass_type_string is currently hardcoded for group, computer etc. One could instead use something like

aiida.orm.utils.node.get_type_string_from_class(cls.__module__, cls.__name__)

aiida.orm.querybuilder._get_ormclass_from_str(type_string: str) Tuple[EntityTypes, Classifier][source]#

Return the correct classifiers for the QueryBuilder from an ORM type string.

Parameters:
  • type_string – type string for ORM class

  • query – an instance of the appropriate QueryBuilder backend.

Returns:

the ORM class as well as a dictionary with additional classifier strings

aiida.orm.querybuilder._get_process_type_filter(classifiers: Classifier, subclassing: bool) dict[source]#

Return filter dictionaries given a set of classifiers.

Parameters:
  • classifiers – a dictionary with classifiers (note: does not support lists)

  • subclassing – if True, allow for subclasses of the process type This is activated only, if an entry point can be found for the process type (as well as for a selection of built-in process types)

Returns:

dictionary in QueryBuilder filter language to pass into {“process_type”: … }

Module for the ORM user class.

class aiida.orm.users.User(email: str, first_name: str = '', last_name: str = '', institution: str = '', backend: StorageBackend | None = None)[source]#

Bases: Entity[BackendUser, UserCollection]

AiiDA User

_CLS_COLLECTION#

alias of UserCollection

__abstractmethods__ = frozenset({})#
__init__(email: str, first_name: str = '', last_name: str = '', institution: str = '', backend: StorageBackend | None = None)[source]#

Create a new User.

__module__ = 'aiida.orm.users'#
__orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendUser'), aiida.orm.users.UserCollection],)#
__parameters__ = ()#
__qb_fields__: Sequence[QbField] = [QbStrField('email', dtype=str, is_attribute=False), QbStrField('first_name', dtype=str, is_attribute=False), QbStrField('last_name', dtype=str, is_attribute=False), QbStrField('institution', dtype=str, is_attribute=False)]#
__str__() str[source]#

Return str(self).

_abc_impl = <_abc._abc_data object>#
property email: str#
fields: QbFields = {'email': 'QbStrField(email) -> str',  'first_name': 'QbStrField(first_name) -> str',  'institution': 'QbStrField(institution) -> str',  'last_name': 'QbStrField(last_name) -> str',  'pk': 'QbNumericField(pk) -> int'}#
property first_name: str#
get_full_name() str[source]#

Return the user full name

Returns:

the user full name

get_short_name() str[source]#

Return the user short name (typically, this returns the email)

Returns:

The short name

property institution: str#
property is_default: bool#

Return whether the user is the default user.

Returns:

Boolean, True if the user is the default, False otherwise.

property last_name: str#
static normalize_email(email: str) str[source]#

Normalize the address by lowercasing the domain part of the email address (taken from Django).

property uuid: None#

For now users do not have UUIDs so always return None

class aiida.orm.users.UserCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#

Bases: Collection[User]

The collection of users stored in a backend.

__abstractmethods__ = frozenset({})#
__module__ = 'aiida.orm.users'#
__orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('User')],)#
__parameters__ = ()#
_abc_impl = <_abc._abc_data object>#
static _entity_base_cls() Type[User][source]#

The allowed entity class or subclasses thereof.

get_default() User | None[source]#

Get the current default user

get_or_create(email: str, **kwargs) Tuple[bool, User][source]#

Get the existing user with a given email address or create an unstored one

Parameters:

kwargs – The properties of the user to get or create

Returns:

The corresponding user object

Raises:

aiida.common.exceptions.MultipleObjectsError, aiida.common.exceptions.NotExistent