aiida.orm package#
Main module to expose all orm classes and methods
Subpackages#
- aiida.orm.implementation package
- Submodules
BackendAuthInfo
BackendAuthInfo.METADATA_WORKDIR
BackendAuthInfo.__abstractmethods__
BackendAuthInfo.__module__
BackendAuthInfo._abc_impl
BackendAuthInfo.computer
BackendAuthInfo.enabled
BackendAuthInfo.get_auth_params()
BackendAuthInfo.get_metadata()
BackendAuthInfo.set_auth_params()
BackendAuthInfo.set_metadata()
BackendAuthInfo.user
BackendAuthInfoCollection
BackendComment
BackendCommentCollection
BackendCommentCollection.ENTITY_CLASS
BackendCommentCollection.__annotations__
BackendCommentCollection.__module__
BackendCommentCollection.__orig_bases__
BackendCommentCollection.__parameters__
BackendCommentCollection.create()
BackendCommentCollection.delete()
BackendCommentCollection.delete_all()
BackendCommentCollection.delete_many()
BackendComputer
BackendComputer.__abstractmethods__
BackendComputer.__module__
BackendComputer._abc_impl
BackendComputer._logger
BackendComputer.copy()
BackendComputer.description
BackendComputer.get_metadata()
BackendComputer.get_scheduler_type()
BackendComputer.get_transport_type()
BackendComputer.hostname
BackendComputer.label
BackendComputer.set_description()
BackendComputer.set_hostname()
BackendComputer.set_label()
BackendComputer.set_metadata()
BackendComputer.set_scheduler_type()
BackendComputer.set_transport_type()
BackendComputer.uuid
BackendComputerCollection
BackendCollection
BackendEntity
BackendEntityExtrasMixin
BackendEntityExtrasMixin.__abstractmethods__
BackendEntityExtrasMixin.__dict__
BackendEntityExtrasMixin.__module__
BackendEntityExtrasMixin.__weakref__
BackendEntityExtrasMixin._abc_impl
BackendEntityExtrasMixin.clear_extras()
BackendEntityExtrasMixin.delete_extra()
BackendEntityExtrasMixin.delete_extra_many()
BackendEntityExtrasMixin.extras
BackendEntityExtrasMixin.extras_items()
BackendEntityExtrasMixin.extras_keys()
BackendEntityExtrasMixin.get_extra()
BackendEntityExtrasMixin.get_extra_many()
BackendEntityExtrasMixin.reset_extras()
BackendEntityExtrasMixin.set_extra()
BackendEntityExtrasMixin.set_extra_many()
BackendGroup
BackendGroup.__abstractmethods__
BackendGroup.__module__
BackendGroup.__repr__()
BackendGroup.__str__()
BackendGroup._abc_impl
BackendGroup.add_nodes()
BackendGroup.clear()
BackendGroup.count()
BackendGroup.description
BackendGroup.label
BackendGroup.nodes
BackendGroup.remove_nodes()
BackendGroup.type_string
BackendGroup.user
BackendGroup.uuid
BackendGroupCollection
NodeIterator
NodeIterator.__abstractmethods__
NodeIterator.__dict__
NodeIterator.__getitem__()
NodeIterator.__init__()
NodeIterator.__iter__()
NodeIterator.__len__()
NodeIterator.__module__
NodeIterator.__next__()
NodeIterator.__parameters__
NodeIterator.__subclasshook__()
NodeIterator.__weakref__
NodeIterator._abc_impl
NodeIterator._is_protocol
BackendLog
BackendLogCollection
BackendNode
BackendNode.__abstractmethods__
BackendNode.__module__
BackendNode._abc_impl
BackendNode.add_incoming()
BackendNode.attributes
BackendNode.attributes_items()
BackendNode.attributes_keys()
BackendNode.clean_values()
BackendNode.clear_attributes()
BackendNode.clone()
BackendNode.computer
BackendNode.ctime
BackendNode.delete_attribute()
BackendNode.delete_attribute_many()
BackendNode.description
BackendNode.get_attribute()
BackendNode.get_attribute_many()
BackendNode.label
BackendNode.mtime
BackendNode.node_type
BackendNode.process_type
BackendNode.repository_metadata
BackendNode.reset_attributes()
BackendNode.set_attribute()
BackendNode.set_attribute_many()
BackendNode.store()
BackendNode.user
BackendNode.uuid
BackendNodeCollection
BackendQueryBuilder
BackendQueryBuilder.__abstractmethods__
BackendQueryBuilder.__dict__
BackendQueryBuilder.__init__()
BackendQueryBuilder.__module__
BackendQueryBuilder.__weakref__
BackendQueryBuilder._abc_impl
BackendQueryBuilder.analyze_query()
BackendQueryBuilder.as_sql()
BackendQueryBuilder.count()
BackendQueryBuilder.first()
BackendQueryBuilder.get_creation_statistics()
BackendQueryBuilder.iterall()
BackendQueryBuilder.iterdict()
PathItemType
PathItemType.__annotations__
PathItemType.__dict__
PathItemType.__module__
PathItemType.__optional_keys__
PathItemType.__orig_bases__
PathItemType.__required_keys__
PathItemType.__total__
PathItemType.__weakref__
PathItemType.edge_tag
PathItemType.entity_type
PathItemType.joining_keyword
PathItemType.joining_value
PathItemType.orm_base
PathItemType.outerjoin
PathItemType.tag
QueryDictType
QueryDictType.__annotations__
QueryDictType.__dict__
QueryDictType.__module__
QueryDictType.__optional_keys__
QueryDictType.__orig_bases__
QueryDictType.__required_keys__
QueryDictType.__total__
QueryDictType.__weakref__
QueryDictType.distinct
QueryDictType.filters
QueryDictType.limit
QueryDictType.offset
QueryDictType.order_by
QueryDictType.path
QueryDictType.project
QueryDictType.project_map
StorageBackend
StorageBackend.__abstractmethods__
StorageBackend.__dict__
StorageBackend.__init__()
StorageBackend.__module__
StorageBackend.__str__()
StorageBackend.__weakref__
StorageBackend._abc_impl
StorageBackend._backup()
StorageBackend._clear()
StorageBackend._validate_or_init_backup_folder()
StorageBackend._write_backup_config()
StorageBackend.authinfos
StorageBackend.autogroup
StorageBackend.backup()
StorageBackend.bulk_insert()
StorageBackend.bulk_update()
StorageBackend.close()
StorageBackend.comments
StorageBackend.computers
StorageBackend.default_user
StorageBackend.delete()
StorageBackend.delete_nodes_and_connections()
StorageBackend.get_global_variable()
StorageBackend.get_info()
StorageBackend.get_orm_entities()
StorageBackend.get_repository()
StorageBackend.groups
StorageBackend.in_transaction
StorageBackend.initialise()
StorageBackend.is_closed
StorageBackend.logs
StorageBackend.maintain()
StorageBackend.migrate()
StorageBackend.nodes
StorageBackend.profile
StorageBackend.query()
StorageBackend.read_only
StorageBackend.reset_default_user()
StorageBackend.set_global_variable()
StorageBackend.transaction()
StorageBackend.users
StorageBackend.version()
StorageBackend.version_head()
StorageBackend.version_profile()
BackendUser
BackendUserCollection
clean_value()
validate_attribute_extra_key()
- Submodules
- aiida.orm.nodes package
- Subpackages
- Submodules
NodeAttributes
NodeAttributes.__contains__()
NodeAttributes.__dict__
NodeAttributes.__init__()
NodeAttributes.__module__
NodeAttributes.__weakref__
NodeAttributes.all
NodeAttributes.clear()
NodeAttributes.delete()
NodeAttributes.delete_many()
NodeAttributes.get()
NodeAttributes.get_many()
NodeAttributes.items()
NodeAttributes.keys()
NodeAttributes.reset()
NodeAttributes.set()
NodeAttributes.set_many()
NodeCaching
NodeCaching.CACHED_FROM_KEY
NodeCaching._HASH_EXTRA_KEY
NodeCaching._VALID_CACHE_KEY
NodeCaching.__annotations__
NodeCaching.__dict__
NodeCaching.__init__()
NodeCaching.__module__
NodeCaching.__weakref__
NodeCaching._compute_hash()
NodeCaching._get_objects_to_hash()
NodeCaching._get_same_node()
NodeCaching._iter_all_same_nodes()
NodeCaching.clear_hash()
NodeCaching.compute_hash()
NodeCaching.get_all_same_nodes()
NodeCaching.get_cache_source()
NodeCaching.get_hash()
NodeCaching.get_objects_to_hash()
NodeCaching.is_created_from_cache
NodeCaching.is_valid_cache
NodeCaching.rehash()
NodeCaching.should_use_cache()
NodeComments
NodeLinks
Node
Node.Collection
Node._CLS_COLLECTION
Node._CLS_NODE_CACHING
Node._CLS_NODE_LINKS
Node.__abstractmethods__
Node.__annotations__
Node.__copy__()
Node.__deepcopy__()
Node.__eq__()
Node.__getattr__()
Node.__hash__()
Node.__init__()
Node.__module__
Node.__orig_bases__
Node.__parameters__
Node.__plugin_type_string
Node.__qb_fields__
Node.__query_type_string
Node.__repr__()
Node.__str__()
Node._abc_impl
Node._add_outputs_from_cache()
Node._cachable
Node._check_mutability_attributes()
Node._deprecated_attr_methods
Node._deprecated_caching_methods
Node._deprecated_comment_methods
Node._deprecated_extra_methods
Node._deprecated_links_methods
Node._deprecated_repo_methods
Node._hash_ignored_attributes
Node._logger
Node._plugin_type_string
Node._query_type_string
Node._storable
Node._store()
Node._store_from_cache()
Node._unstorable_message
Node._updatable_attributes
Node._validate()
Node._validate_storability()
Node._verify_are_parents_stored()
Node.base
Node.class_node_type
Node.computer
Node.ctime
Node.description
Node.entry_point
Node.fields
Node.get_description()
Node.is_valid_cache
Node.label
Node.logger
Node.mtime
Node.node_type
Node.process_type
Node.store()
Node.store_all()
Node.user
Node.uuid
NodeBase
NodeCollection
NodeRepository
NodeRepository.__dict__
NodeRepository.__init__()
NodeRepository.__module__
NodeRepository.__weakref__
NodeRepository._check_mutability()
NodeRepository._clone()
NodeRepository._copy()
NodeRepository._repository
NodeRepository._store()
NodeRepository._update_repository_metadata()
NodeRepository.as_path()
NodeRepository.copy_tree()
NodeRepository.delete_object()
NodeRepository.erase()
NodeRepository.get_object()
NodeRepository.get_object_content()
NodeRepository.glob()
NodeRepository.hash()
NodeRepository.list_object_names()
NodeRepository.list_objects()
NodeRepository.metadata
NodeRepository.open()
NodeRepository.put_object_from_bytes()
NodeRepository.put_object_from_file()
NodeRepository.put_object_from_filelike()
NodeRepository.put_object_from_tree()
NodeRepository.serialize()
NodeRepository.walk()
- aiida.orm.utils package
- Subpackages
- Submodules
CalcJobResultManager
CalcJobResultManager.__dict__
CalcJobResultManager.__dir__()
CalcJobResultManager.__getattr__()
CalcJobResultManager.__getitem__()
CalcJobResultManager.__init__()
CalcJobResultManager.__iter__()
CalcJobResultManager.__module__
CalcJobResultManager.__weakref__
CalcJobResultManager._load_results()
CalcJobResultManager.get_results()
CalcJobResultManager.node
LinkManager
LinkManager.__bool__()
LinkManager.__dict__
LinkManager.__init__()
LinkManager.__iter__()
LinkManager.__module__
LinkManager.__next__()
LinkManager.__weakref__
LinkManager.all()
LinkManager.all_link_labels()
LinkManager.all_link_pairs()
LinkManager.all_nodes()
LinkManager.first()
LinkManager.get_node_by_label()
LinkManager.nested()
LinkManager.next()
LinkManager.one()
LinkPair
LinkPair.__annotations__
LinkPair.__getnewargs__()
LinkPair.__match_args__
LinkPair.__module__
LinkPair.__new__()
LinkPair.__orig_bases__
LinkPair.__repr__()
LinkPair.__slots__
LinkPair._asdict()
LinkPair._field_defaults
LinkPair._fields
LinkPair._make()
LinkPair._replace()
LinkPair.link_label
LinkPair.link_type
LinkQuadruple
LinkQuadruple.__annotations__
LinkQuadruple.__getnewargs__()
LinkQuadruple.__match_args__
LinkQuadruple.__module__
LinkQuadruple.__new__()
LinkQuadruple.__orig_bases__
LinkQuadruple.__repr__()
LinkQuadruple.__slots__
LinkQuadruple._asdict()
LinkQuadruple._field_defaults
LinkQuadruple._fields
LinkQuadruple._make()
LinkQuadruple._replace()
LinkQuadruple.link_label
LinkQuadruple.link_type
LinkQuadruple.source_id
LinkQuadruple.target_id
LinkTriple
LinkTriple.__annotations__
LinkTriple.__getnewargs__()
LinkTriple.__match_args__
LinkTriple.__module__
LinkTriple.__new__()
LinkTriple.__orig_bases__
LinkTriple.__repr__()
LinkTriple.__slots__
LinkTriple._asdict()
LinkTriple._field_defaults
LinkTriple._fields
LinkTriple._make()
LinkTriple._replace()
LinkTriple.link_label
LinkTriple.link_type
LinkTriple.node
link_triple_exists()
validate_link()
CalculationEntityLoader
CodeEntityLoader
ComputerEntityLoader
DataEntityLoader
GroupEntityLoader
IdentifierType
NodeEntityLoader
OrmEntityLoader
OrmEntityLoader.__annotations__
OrmEntityLoader.__dict__
OrmEntityLoader.__module__
OrmEntityLoader.__weakref__
OrmEntityLoader._get_query_builder_id_identifier()
OrmEntityLoader._get_query_builder_label_identifier()
OrmEntityLoader._get_query_builder_uuid_identifier()
OrmEntityLoader.get_options()
OrmEntityLoader.get_query_builder()
OrmEntityLoader.get_query_classes()
OrmEntityLoader.infer_identifier_type()
OrmEntityLoader.label_ambiguity_breaker
OrmEntityLoader.load_entity()
OrmEntityLoader.orm_base_class
ProcessEntityLoader
WorkflowEntityLoader
get_loader()
load_code()
load_computer()
load_entity()
load_group()
load_node()
DBLogHandler
create_logger_adapter()
get_dblogger_extra()
AttributeManager
NodeLinksManager
NodeLinksManager.__contains__()
NodeLinksManager.__dict__
NodeLinksManager.__dir__()
NodeLinksManager.__getattr__()
NodeLinksManager.__getitem__()
NodeLinksManager.__init__()
NodeLinksManager.__iter__()
NodeLinksManager.__module__
NodeLinksManager.__repr__()
NodeLinksManager.__str__()
NodeLinksManager.__weakref__
NodeLinksManager._construct_attribute_dict()
NodeLinksManager._get_keys()
NodeLinksManager._get_node_by_link_label()
NodeLinksManager._namespace_separator
FunctionCalculationMixin
FunctionCalculationMixin.FUNCTION_NAMESPACE_KEY
FunctionCalculationMixin.FUNCTION_NAME_KEY
FunctionCalculationMixin.FUNCTION_NUMBER_OF_LINES_KEY
FunctionCalculationMixin.FUNCTION_SOURCE_FILE_PATH
FunctionCalculationMixin.FUNCTION_STARTING_LINE_KEY
FunctionCalculationMixin.__annotations__
FunctionCalculationMixin.__dict__
FunctionCalculationMixin.__module__
FunctionCalculationMixin.__weakref__
FunctionCalculationMixin._set_function_name()
FunctionCalculationMixin._set_function_namespace()
FunctionCalculationMixin._set_function_number_of_lines()
FunctionCalculationMixin._set_function_starting_line_number()
FunctionCalculationMixin.function_name
FunctionCalculationMixin.function_namespace
FunctionCalculationMixin.function_number_of_lines
FunctionCalculationMixin.function_starting_line_number
FunctionCalculationMixin.get_function_source_code()
FunctionCalculationMixin.get_source_code_file()
FunctionCalculationMixin.get_source_code_function()
FunctionCalculationMixin.store_source_info()
Sealable
AbstractNodeMeta
get_query_type_from_type_string()
get_type_string_from_class()
is_valid_node_type_string()
load_node_class()
clean_remote()
get_calcjob_remote_paths()
AiiDADumper
AiiDALoader
_MappingType
bundle_constructor()
computer_constructor()
dataclass_constructor()
deserialize_unsafe()
enum_constructor()
group_constructor()
mapping_constructor()
node_constructor()
node_links_manager_constructor()
represent_bundle()
represent_computer()
represent_dataclass()
represent_enum()
represent_group()
represent_mapping()
represent_node()
represent_node_links_manager()
serialize()
Submodules#
Module for the AuthInfo ORM class.
- class aiida.orm.authinfos.AuthInfo(computer: Computer, user: User, backend: StorageBackend | None = None)[source]#
Bases:
Entity
[BackendAuthInfo
,AuthInfoCollection
]ORM class that models the authorization information that allows a User to connect to a Computer.
- PROPERTY_WORKDIR = 'workdir'#
- _CLS_COLLECTION#
alias of
AuthInfoCollection
- __abstractmethods__ = frozenset({})#
- __init__(computer: Computer, user: User, backend: StorageBackend | None = None) None [source]#
Create an AuthInfo instance for the given computer and user.
- Parameters:
computer – a Computer instance
user – a User instance
backend – the backend to use for the instance, or use the default backend if None
- __module__ = 'aiida.orm.authinfos'#
- __orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendAuthInfo'), aiida.orm.authinfos.AuthInfoCollection],)#
- __parameters__ = ()#
- __qb_fields__: Sequence[QbField] = [QbField('enabled', dtype=bool, is_attribute=False), QbDictField('auth_params', dtype=Dict[str, Any], is_attribute=False), QbDictField('metadata', dtype=Dict[str, Any], is_attribute=False), QbNumericField('computer_pk', dtype=int, is_attribute=False), QbNumericField('user_pk', dtype=int, is_attribute=False)]#
- _abc_impl = <_abc._abc_data object>#
- property enabled: bool#
Return whether this instance is enabled.
- Returns:
True if enabled, False otherwise
- fields: QbFields = {'auth_params': 'QbDictField(auth_params) -> Dict[str, Any]', 'computer_pk': 'QbNumericField(computer_pk) -> int', 'enabled': 'QbField(enabled) -> bool', 'metadata': 'QbDictField(metadata) -> Dict[str, Any]', 'pk': 'QbNumericField(pk) -> int', 'user_pk': 'QbNumericField(user_pk) -> int'}#
- get_auth_params() Dict[str, Any] [source]#
Return the dictionary of authentication parameters
- Returns:
a dictionary with authentication parameters
- get_metadata() Dict[str, Any] [source]#
Return the dictionary of metadata
- Returns:
a dictionary with metadata
- get_transport() Transport [source]#
Return a fully configured transport that can be used to connect to the computer set for this instance.
- get_workdir() str [source]#
Return the working directory.
If no explicit work directory is set for this instance, the working directory of the computer will be returned.
- Returns:
the working directory
- set_auth_params(auth_params: Dict[str, Any]) None [source]#
Set the dictionary of authentication parameters
- Parameters:
auth_params – a dictionary with authentication parameters
- class aiida.orm.authinfos.AuthInfoCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#
Bases:
Collection
[AuthInfo
]The collection of AuthInfo entries.
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.authinfos'#
- __orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('AuthInfo')],)#
- __parameters__ = ()#
- _abc_impl = <_abc._abc_data object>#
Module to manage the autogrouping functionality by verdi run
.
- class aiida.orm.autogroup.AutogroupManager(backend)[source]#
Bases:
object
Class to automatically add all newly stored
Node``s to an ``AutoGroup
(whilst enabled).This class should not be instantiated directly, but rather accessed through the backend storage instance.
The auto-grouping is checked by the
Node.store()
method which, ifis_to_be_grouped
is true, will store the node in the associatedAutoGroup
.The exclude/include lists are lists of strings like:
aiida.data:core.int
,aiida.calculation:quantumespresso.pw
,aiida.data:core.array.%
, … i.e.: a string identifying the base class, followed by a colon and the path to the class as accepted by CalculationFactory/DataFactory. Each string can contain one or more wildcard characters%
; in this case this is used in alike
comparison with the QueryBuilder. Note that in this case you have to remember that_
means “any character” in the QueryBuilder, and you need to escape it if you mean a literal underscore.Only one of the two (between exclude and include) can be set. If none of the two is set, everything is included.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.autogroup', '__doc__': 'Class to automatically add all newly stored ``Node``s to an ``AutoGroup`` (whilst enabled).\n\n This class should not be instantiated directly, but rather accessed through the backend storage instance.\n\n The auto-grouping is checked by the ``Node.store()`` method which, if ``is_to_be_grouped`` is true,\n will store the node in the associated ``AutoGroup``.\n\n The exclude/include lists are lists of strings like:\n ``aiida.data:core.int``, ``aiida.calculation:quantumespresso.pw``,\n ``aiida.data:core.array.%``, ...\n i.e.: a string identifying the base class, followed by a colon and the path to the class\n as accepted by CalculationFactory/DataFactory.\n Each string can contain one or more wildcard characters ``%``;\n in this case this is used in a ``like`` comparison with the QueryBuilder.\n Note that in this case you have to remember that ``_`` means "any character"\n in the QueryBuilder, and you need to escape it if you mean a literal underscore.\n\n Only one of the two (between exclude and include) can be set.\n If none of the two is set, everything is included.\n ', '__init__': <function AutogroupManager.__init__>, 'is_enabled': <property object>, 'enable': <function AutogroupManager.enable>, 'disable': <function AutogroupManager.disable>, 'get_exclude': <function AutogroupManager.get_exclude>, 'get_include': <function AutogroupManager.get_include>, 'get_group_label_prefix': <function AutogroupManager.get_group_label_prefix>, 'validate': <staticmethod(<function AutogroupManager.validate>)>, 'set_exclude': <function AutogroupManager.set_exclude>, 'set_include': <function AutogroupManager.set_include>, 'set_group_label_prefix': <function AutogroupManager.set_group_label_prefix>, '_matches': <staticmethod(<function AutogroupManager._matches>)>, 'is_to_be_grouped': <function AutogroupManager.is_to_be_grouped>, 'get_or_create_group': <function AutogroupManager.get_or_create_group>, '__dict__': <attribute '__dict__' of 'AutogroupManager' objects>, '__weakref__': <attribute '__weakref__' of 'AutogroupManager' objects>, '__annotations__': {'_exclude': 'list[str] | None', '_include': 'list[str] | None'}})#
- __module__ = 'aiida.orm.autogroup'#
- __weakref__#
list of weak references to the object
- static _matches(string, filter_string)[source]#
Check if ‘string’ matches the ‘filter_string’ (used for include and exclude filters).
If ‘filter_string’ does not contain any % sign, perform an exact match. Otherwise, match with a SQL-like query, where % means any character sequence, and _ means a single character (these characters can be escaped with a backslash).
- Parameters:
string – the string to match.
filter_string – the filter string.
- get_exclude() list[str] | None [source]#
Return the list of classes to exclude from autogrouping.
Returns
None
if no exclusion list has been set.
- get_group_label_prefix() str [source]#
Get the prefix of the label of the group. If no group label prefix was set, it will set a default one by itself.
- get_include() list[str] | None [source]#
Return the list of classes to include in the autogrouping.
Returns
None
if no inclusion list has been set.
- get_or_create_group() AutoGroup [source]#
Return the current AutoGroup, or create one if None has been set yet.
This function implements a somewhat complex logic that is however needed to make sure that, even if verdi run is called at the same time multiple times, e.g. in a for loop in bash, there is never the risk that two
verdi run
Unix processes try to create the same group, with the same label, ending up in a crash of the code (see PR #3650).Here, instead, we make sure that if this concurrency issue happens, one of the two will get a IntegrityError from the DB, and then recover trying to create a group with a different label (with a numeric suffix appended), until it manages to create it.
- is_to_be_grouped(node) bool [source]#
Return whether the given node is to be auto-grouped according to enable state and include/exclude lists.
- set_exclude(exclude: list[str] | str | None) None [source]#
Set the list of classes to exclude in the autogrouping.
- Parameters:
exclude – a list of valid entry point strings (might contain ‘%’ to be used as string to be matched using SQL’s
LIKE
pattern-making logic), orNone
to specify no include list.
- set_group_label_prefix(label_prefix: str | None) None [source]#
Set the label of the group to be created (or use a default).
- set_include(include: list[str] | str | None) None [source]#
Set the list of classes to include in the autogrouping.
- Parameters:
include – a list of valid entry point strings (might contain ‘%’ to be used as string to be matched using SQL’s
LIKE
pattern-making logic), orNone
to specify no include list.
Comment objects and functions
- class aiida.orm.comments.Comment(node: Node, user: User, content: str | None = None, backend: StorageBackend | None = None)[source]#
Bases:
Entity
[BackendComment
,CommentCollection
]Base class to map a DbComment that represents a comment attached to a certain Node.
- _CLS_COLLECTION#
alias of
CommentCollection
- __abstractmethods__ = frozenset({})#
- __init__(node: Node, user: User, content: str | None = None, backend: StorageBackend | None = None)[source]#
Create a Comment for a given node and user
- Parameters:
node – a Node instance
user – a User instance
content – the comment content
backend – the backend to use for the instance, or use the default backend if None
- Returns:
a Comment object associated to the given node and user
- __module__ = 'aiida.orm.comments'#
- __orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendComment'), aiida.orm.comments.CommentCollection],)#
- __parameters__ = ()#
- __qb_fields__: Sequence[QbField] = [QbStrField('uuid', dtype=str, is_attribute=False), QbNumericField('ctime', dtype=datetime, is_attribute=False), QbNumericField('mtime', dtype=datetime, is_attribute=False), QbStrField('content', dtype=str, is_attribute=False), QbNumericField('user_pk', dtype=int, is_attribute=False), QbNumericField('node_pk', dtype=int, is_attribute=False)]#
- _abc_impl = <_abc._abc_data object>#
- fields: QbFields = {'content': 'QbStrField(content) -> str', 'ctime': 'QbNumericField(ctime) -> datetime', 'mtime': 'QbNumericField(mtime) -> datetime', 'node_pk': 'QbNumericField(node_pk) -> int', 'pk': 'QbNumericField(pk) -> int', 'user_pk': 'QbNumericField(user_pk) -> int', 'uuid': 'QbStrField(uuid) -> str'}#
- class aiida.orm.comments.CommentCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#
Bases:
Collection
[Comment
]The collection of Comment entries.
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.comments'#
- __orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('Comment')],)#
- __parameters__ = ()#
- _abc_impl = <_abc._abc_data object>#
- delete(pk: int) None [source]#
Remove a Comment from the collection with the given id
- Parameters:
pk – the id of the comment to delete
- Raises:
TypeError – if
comment_id
is not an intNotExistent – if Comment with ID
comment_id
is not found
- delete_all() None [source]#
Delete all Comments from the Collection
- Raises:
IntegrityError – if all Comments could not be deleted
Module for Computer entities
- class aiida.orm.computers.Computer(label: str | None = None, hostname: str = '', description: str = '', transport_type: str = '', scheduler_type: str = '', workdir: str | None = None, backend: StorageBackend | None = None)[source]#
Bases:
Entity
[BackendComputer
,ComputerCollection
]Computer entity.
- PROPERTY_MINIMUM_SCHEDULER_POLL_INTERVAL = 'minimum_scheduler_poll_interval'#
- PROPERTY_MINIMUM_SCHEDULER_POLL_INTERVAL__DEFAULT = 10.0#
- PROPERTY_SHEBANG = 'shebang'#
- PROPERTY_WORKDIR = 'workdir'#
- _CLS_COLLECTION#
alias of
ComputerCollection
- __abstractmethods__ = frozenset({})#
- __init__(label: str | None = None, hostname: str = '', description: str = '', transport_type: str = '', scheduler_type: str = '', workdir: str | None = None, backend: StorageBackend | None = None) None [source]#
Construct a new computer.
- __module__ = 'aiida.orm.computers'#
- __orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendComputer'), aiida.orm.computers.ComputerCollection],)#
- __parameters__ = ()#
- __qb_fields__: Sequence[QbField] = [QbStrField('uuid', dtype=str, is_attribute=False), QbStrField('label', dtype=str, is_attribute=False), QbStrField('description', dtype=str, is_attribute=False), QbStrField('hostname', dtype=str, is_attribute=False), QbStrField('transport_type', dtype=str, is_attribute=False), QbStrField('scheduler_type', dtype=str, is_attribute=False), QbDictField('metadata', dtype=Dict[str, Any], is_attribute=False)]#
- _abc_impl = <_abc._abc_data object>#
- classmethod _append_text_validator(append_text: str) None [source]#
Validates the append text string.
- classmethod _default_mpiprocs_per_machine_validator(def_cpus_per_machine: int | None) None [source]#
Validates the default number of CPUs per machine (node)
- _logger = <Logger aiida.orm.computers (WARNING)>#
- _mpirun_command_validator(mpirun_cmd: List[str] | Tuple[str, ...]) None [source]#
Validates the mpirun_command variable. MUST be called after properly checking for a valid scheduler.
- classmethod _prepend_text_validator(prepend_text: str) None [source]#
Validates the prepend text string.
- classmethod _scheduler_type_validator(scheduler_type: str) None [source]#
Validates the transport string.
- classmethod _transport_type_validator(transport_type: str) None [source]#
Validates the transport string.
- configure(user: User | None = None, **kwargs: Any) AuthInfo [source]#
Configure a computer for a user with valid auth params passed via kwargs
- Parameters:
user – the user to configure the computer for
- Kwargs:
the configuration keywords with corresponding values
- Returns:
the authinfo object for the configured user
- classmethod default_memory_per_machine_validator(def_memory_per_machine: int | None) None [source]#
Validates the default amount of memory (kB) per machine (node)
- delete_property(name: str, raise_exception: bool = True) None [source]#
Delete a property from this computer
- Parameters:
name – the name of the property
raise_exception – if True raise if the property does not exist, otherwise return None
- fields: QbFields = {'description': 'QbStrField(description) -> str', 'hostname': 'QbStrField(hostname) -> str', 'label': 'QbStrField(label) -> str', 'metadata': 'QbDictField(metadata) -> Dict[str, Any]', 'pk': 'QbNumericField(pk) -> int', 'scheduler_type': 'QbStrField(scheduler_type) -> str', 'transport_type': 'QbStrField(transport_type) -> str', 'uuid': 'QbStrField(uuid) -> str'}#
- get_authinfo(user: User) AuthInfo [source]#
Return the aiida.orm.authinfo.AuthInfo instance for the given user on this computer, if the computer is configured for the given user.
- Parameters:
user – a User instance.
- Returns:
a AuthInfo instance
- Raises:
aiida.common.NotExistent – if the computer is not configured for the given user.
- get_configuration(user: User | None = None) Dict[str, Any] [source]#
Get the configuration of computer for the given user as a dictionary
- Parameters:
user – the user to to get the configuration for, otherwise default user
- get_default_memory_per_machine() int | None [source]#
Return the default amount of memory (kB) per machine (node) for this computer, or None if it was not set.
- get_default_mpiprocs_per_machine() int | None [source]#
Return the default number of CPUs per machine (node) for this computer, or None if it was not set.
- get_minimum_job_poll_interval() float [source]#
Get the minimum interval between subsequent requests to poll the scheduler for job status.
Note
If no value was ever set for this computer it will fall back on the default provided by the associated transport class in the
DEFAULT_MINIMUM_JOB_POLL_INTERVAL
attribute. If the computer doesn’t have a transport class, or it cannot be loaded, or it doesn’t provide a job poll interval default, then this will fall back on thePROPERTY_MINIMUM_SCHEDULER_POLL_INTERVAL__DEFAULT
attribute of this class.- Returns:
The minimum interval (in seconds).
- get_mpirun_command() List[str] [source]#
Return the mpirun command. Must be a list of strings, that will be then joined with spaces when submitting.
I also provide a sensible default that may be ok in many cases.
- get_property(name: str, *args: Any) Any [source]#
Get a property of this computer
- Parameters:
name – the property name
args – additional arguments
- Returns:
the property value
- get_transport(user: User | None = None) Transport [source]#
Return a Transport class, configured with all correct parameters. The Transport is closed (meaning that if you want to run any operation with it, you have to open it first (i.e., e.g. for a SSH transport, you have to open a connection). To do this you can call
transports.open()
, or simply run within awith
statement:transport = Computer.get_transport() with transport: print(transports.whoami())
- Parameters:
user – if None, try to obtain a transport for the default user. Otherwise, pass a valid User.
- Returns:
a (closed) Transport, already configured with the connection parameters to the supercomputer, as configured with
verdi computer configure
for the user specified as a parameteruser
.
- get_transport_class() Type[Transport] [source]#
Get the transport class for this computer. Can be used to instantiate a transport instance.
- get_use_double_quotes() bool [source]#
Return whether the command line parameters of this computer should be escaped with double quotes.
- Returns:
True if to escape with double quotes, False otherwise which is also the default.
- get_workdir() str [source]#
Get the working directory for this computer :return: The currently configured working directory
- property is_configured: bool#
Return whether the computer is configured for the current default user.
- Returns:
Boolean,
True
if the computer is configured for the current default user,False
otherwise.
- is_user_configured(user: User) bool [source]#
Is the user configured on this computer?
- Parameters:
user – the user to check
- Returns:
True if configured, False otherwise
- is_user_enabled(user: User) bool [source]#
Is the given user enabled to run on this computer?
- Parameters:
user – the user to check
- Returns:
True if enabled, False otherwise
- set_default_memory_per_machine(def_memory_per_machine: int | None) None [source]#
Set the default amount of memory (kB) per machine (node) for this computer. Accepts None if you do not want to set this value.
- set_default_mpiprocs_per_machine(def_cpus_per_machine: int | None) None [source]#
Set the default number of CPUs per machine (node) for this computer. Accepts None if you do not want to set this value.
- set_minimum_job_poll_interval(interval: float) None [source]#
Set the minimum interval between subsequent requests to update the list of jobs currently running on this computer.
- Parameters:
interval – The minimum interval in seconds
- set_mpirun_command(val: List[str] | Tuple[str, ...]) None [source]#
Set the mpirun command. It must be a list of strings (you can use string.split() if you have a single, space-separated string).
- set_property(name: str, value: Any) None [source]#
Set a property on this computer
- Parameters:
name – the property name
value – the new value
- set_use_double_quotes(val: bool) None [source]#
Set whether the command line parameters of this computer should be escaped with double quotes.
- Parameters:
use_double_quotes – True if to escape with double quotes, False otherwise.
- store() Computer [source]#
Store the computer in the DB.
Differently from Nodes, a computer can be re-stored if its properties are to be changed (e.g. a new mpirun command, etc.)
- property uuid: str#
Return the UUID for this computer.
This identifier is unique across all entities types and backend instances.
- Returns:
the entity uuid
- validate() None [source]#
Check if the attributes and files retrieved from the DB are valid. Raise a ValidationError if something is wrong.
Must be able to work even before storing: therefore, use the get_attr and similar methods that automatically read either from the DB or from the internal attribute cache.
For the base class, this is always valid. Subclasses will reimplement this. In the subclass, always call the super().validate() method first!
- class aiida.orm.computers.ComputerCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#
Bases:
Collection
[Computer
]The collection of Computer entries.
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.computers'#
- __orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('Computer')],)#
- __parameters__ = ()#
- _abc_impl = <_abc._abc_data object>#
- get_or_create(label: str | None = None, **kwargs) Tuple[bool, Computer] [source]#
Try to retrieve a Computer from the DB with the given arguments; create (and store) a new Computer if such a Computer was not present yet.
- Parameters:
label – computer label
- Returns:
(computer, created) where computer is the computer (new or existing, in any case already stored) and created is a boolean saying
Module for converting backend entities into frontend, ORM, entities
- class aiida.orm.convert.ConvertIterator(backend_iterator)[source]#
-
Iterator that converts backend entities into frontend ORM entities as needed
See
aiida.orm.Group.nodes()
for an example.- __abstractmethods__ = frozenset({})#
- __dict__ = mappingproxy({'__module__': 'aiida.orm.convert', '__doc__': 'Iterator that converts backend entities into frontend ORM entities as needed\n\n See :func:`aiida.orm.Group.nodes` for an example.\n ', '__init__': <function ConvertIterator.__init__>, '_genfunction': <function ConvertIterator._genfunction>, '__iter__': <function ConvertIterator.__iter__>, '__len__': <function ConvertIterator.__len__>, '__getitem__': <function ConvertIterator.__getitem__>, '__next__': <function ConvertIterator.__next__>, '__dict__': <attribute '__dict__' of 'ConvertIterator' objects>, '__weakref__': <attribute '__weakref__' of 'ConvertIterator' objects>, '__abstractmethods__': frozenset(), '_abc_impl': <_abc._abc_data object>, '__annotations__': {}})#
- __module__ = 'aiida.orm.convert'#
- __weakref__#
list of weak references to the object
- _abc_impl = <_abc._abc_data object>#
- aiida.orm.convert.get_orm_entity(backend_entity)[source]#
- aiida.orm.convert.get_orm_entity(backend_entity: Mapping)
- aiida.orm.convert.get_orm_entity(backend_entity: tuple)
- aiida.orm.convert.get_orm_entity(backend_entity: list)
- aiida.orm.convert.get_orm_entity(backend_entity: BackendGroup)
- aiida.orm.convert.get_orm_entity(backend_entity: BackendComputer)
- aiida.orm.convert.get_orm_entity(backend_entity: BackendUser)
- aiida.orm.convert.get_orm_entity(backend_entity: BackendAuthInfo)
- aiida.orm.convert.get_orm_entity(backend_entity: BackendLog)
- aiida.orm.convert.get_orm_entity(backend_entity: BackendComment)
- aiida.orm.convert.get_orm_entity(backend_entity: BackendNode)
Module for all common top level AiiDA entity classes and methods
- class aiida.orm.entities.Collection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#
Bases:
ABC
,Generic
[EntityType
]Container class that represents the collection of objects of a particular entity type.
- __abstractmethods__ = frozenset({'_entity_base_cls'})#
- __call__(backend: StorageBackend) CollectionType [source]#
Get or create a cached collection using a new backend.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.entities', '__doc__': 'Container class that represents the collection of objects of a particular entity type.', '_entity_base_cls': <staticmethod(<function Collection._entity_base_cls>)>, 'get_cached': <classmethod(<functools._lru_cache_wrapper object>)>, '__init__': <function Collection.__init__>, '__call__': <function Collection.__call__>, 'entity_type': <property object>, 'backend': <property object>, 'query': <function Collection.query>, 'get': <function Collection.get>, 'find': <function Collection.find>, 'all': <function Collection.all>, 'count': <function Collection.count>, '__orig_bases__': (<class 'abc.ABC'>, typing.Generic[~EntityType]), '__dict__': <attribute '__dict__' of 'Collection' objects>, '__weakref__': <attribute '__weakref__' of 'Collection' objects>, '__parameters__': (~EntityType,), '__abstractmethods__': frozenset({'_entity_base_cls'}), '_abc_impl': <_abc._abc_data object>, '__annotations__': {}})#
- __init__(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None) None [source]#
Construct a new entity collection.
- Parameters:
entity_class – the entity type e.g. User, Computer, etc
backend – the backend instance to get the collection for, or use the default
- __module__ = 'aiida.orm.entities'#
- __orig_bases__ = (<class 'abc.ABC'>, typing.Generic[~EntityType])#
- __parameters__ = (~EntityType,)#
- __weakref__#
list of weak references to the object
- _abc_impl = <_abc._abc_data object>#
- abstract static _entity_base_cls() Type[EntityType] [source]#
The allowed entity class or subclasses thereof.
- all() List[EntityType] [source]#
Get all entities in this collection.
- Returns:
A list of all entities
- property backend: StorageBackend#
Return the backend.
- count(filters: 'FilterType' | None = None) int [source]#
Count entities in this collection according to criteria.
- Parameters:
filters – the keyword value pair filters to match
- Returns:
The number of entities found using the supplied criteria
- find(filters: 'FilterType' | None = None, order_by: 'OrderByType' | None = None, limit: int | None = None) List[EntityType] [source]#
Find collection entries matching the filter criteria.
- Parameters:
filters – the keyword value pair filters to match
order_by – a list of (key, direction) pairs specifying the sort order
limit – the maximum number of results to return
- Returns:
a list of resulting matches
- get(**filters: Any) EntityType [source]#
Get a single collection entry that matches the filter criteria.
- Parameters:
filters – the filters identifying the object to get
- Returns:
the entry
- classmethod get_cached(entity_class: Type[EntityType], backend: StorageBackend)[source]#
Get the cached collection instance for the given entity class and backend.
- Parameters:
backend – the backend instance to get the collection for
- query(filters: 'FilterType' | None = None, order_by: 'OrderByType' | None = None, project: list[str] | str | None = None, limit: int | None = None, offset: int | None = None, subclassing: bool = True) QueryBuilder [source]#
Get a query builder for the objects of this collection.
- Parameters:
filters – the keyword value pair filters to match
order_by – a list of (key, direction) pairs specifying the sort order
project – Optional projections.
limit – the maximum number of results to return
offset – number of initial results to be skipped
subclassing – whether to match subclasses of the type as well.
- class aiida.orm.entities.Entity(backend_entity: BackendEntityType)[source]#
Bases:
ABC
,Generic
[BackendEntityType
,CollectionType
]An AiiDA entity
- _CLS_COLLECTION#
alias of
Collection
- __abstractmethods__ = frozenset({})#
- __annotations__ = {'_CLS_COLLECTION': 'Type[CollectionType]', '__qb_fields__': 'Sequence[QbField]', 'fields': 'QbFields'}#
- __dict__ = mappingproxy({'__module__': 'aiida.orm.entities', '__annotations__': {'_CLS_COLLECTION': 'Type[CollectionType]', 'fields': 'QbFields', '__qb_fields__': 'Sequence[QbField]'}, '__doc__': 'An AiiDA entity', '_CLS_COLLECTION': <class 'aiida.orm.entities.Collection'>, 'fields': {'pk': 'QbNumericField(pk) -> int'}, '__qb_fields__': [QbNumericField('pk', dtype=int, is_attribute=False)], 'objects': <aiida.common.lang.classproperty object>, 'collection': <aiida.common.lang.classproperty object>, 'get_collection': <classmethod(<function Entity.get_collection>)>, 'get': <classmethod(<function Entity.get>)>, '__init__': <function Entity.__init__>, '__getstate__': <function Entity.__getstate__>, 'initialize': <function super_check.<locals>.wrapper>, 'id': <property object>, 'pk': <property object>, 'store': <function Entity.store>, 'is_stored': <property object>, 'backend': <property object>, 'backend_entity': <property object>, '__orig_bases__': (<class 'abc.ABC'>, typing.Generic[~BackendEntityType, ~CollectionType]), '__dict__': <attribute '__dict__' of 'Entity' objects>, '__weakref__': <attribute '__weakref__' of 'Entity' objects>, '__parameters__': (~BackendEntityType, ~CollectionType), '__abstractmethods__': frozenset(), '_abc_impl': <_abc._abc_data object>})#
- __init__(backend_entity: BackendEntityType) None [source]#
- Parameters:
backend_entity – the backend model supporting this entity
- __module__ = 'aiida.orm.entities'#
- __orig_bases__ = (<class 'abc.ABC'>, typing.Generic[~BackendEntityType, ~CollectionType])#
- __parameters__ = (~BackendEntityType, ~CollectionType)#
- __weakref__#
list of weak references to the object
- _abc_impl = <_abc._abc_data object>#
- property backend: StorageBackend#
Get the backend for this entity
- property backend_entity: BackendEntityType#
Get the implementing class for this object
- collection#
A class that, when used as a decorator, works as if the two decorators @property and @classmethod where applied together (i.e., the object works as a property, both for the Class and for any of its instance; and is called with the class cls rather than with the instance as its first argument).
- classmethod get_collection(backend: StorageBackend)[source]#
Get a collection for objects of this type for a given backend.
Note
Use the
collection
class property instead if the currently loaded backend or backend of the default profile should be used.- Parameters:
backend – The backend of the collection to use.
- Returns:
A collection object that can be used to access entities of this type.
- property id: int | None#
Return the id for this entity.
This identifier is guaranteed to be unique amongst entities of the same type for a single backend instance.
- Returns:
the entity’s id
- objects#
A class that, when used as a decorator, works as if the two decorators @property and @classmethod where applied together (i.e., the object works as a property, both for the Class and for any of its instance; and is called with the class cls rather than with the instance as its first argument).
- class aiida.orm.entities.EntityTypes(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)[source]#
Bases:
Enum
Enum for referring to ORM entities in a backend-agnostic manner.
- AUTHINFO = 'authinfo'#
- COMMENT = 'comment'#
- COMPUTER = 'computer'#
- GROUP = 'group'#
- GROUP_NODE = 'group_node'#
- LINK = 'link'#
- LOG = 'log'#
- NODE = 'node'#
- USER = 'user'#
- __module__ = 'aiida.orm.entities'#
- aiida.orm.entities.from_backend_entity(cls: Type[EntityType], backend_entity: BackendEntityType) EntityType [source]#
Construct an entity from a backend entity instance
- Parameters:
backend_entity – the backend entity
- Returns:
an AiiDA entity instance
Interface to the extras of a node instance.
- class aiida.orm.extras.EntityExtras(entity: Node | Group)[source]#
Bases:
object
Interface to the extras of a node or group instance.
Extras are a JSONable dictionary, stored on each entity, allowing for arbitrary data to be stored by users.
Extras are mutable, even after storing the entity, and as such are not deemed a core part of the provenance graph.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.extras', '__doc__': 'Interface to the extras of a node or group instance.\n\n Extras are a JSONable dictionary, stored on each entity,\n allowing for arbitrary data to be stored by users.\n\n Extras are mutable, even after storing the entity,\n and as such are not deemed a core part of the provenance graph.\n ', '__init__': <function EntityExtras.__init__>, '__contains__': <function EntityExtras.__contains__>, 'all': <property object>, 'get': <function EntityExtras.get>, 'get_many': <function EntityExtras.get_many>, 'set': <function EntityExtras.set>, 'set_many': <function EntityExtras.set_many>, 'reset': <function EntityExtras.reset>, 'delete': <function EntityExtras.delete>, 'delete_many': <function EntityExtras.delete_many>, 'clear': <function EntityExtras.clear>, 'items': <function EntityExtras.items>, 'keys': <function EntityExtras.keys>, '__dict__': <attribute '__dict__' of 'EntityExtras' objects>, '__weakref__': <attribute '__weakref__' of 'EntityExtras' objects>, '__annotations__': {}})#
- __module__ = 'aiida.orm.extras'#
- __weakref__#
list of weak references to the object
- property all: Dict[str, Any]#
Return the complete extras dictionary.
Warning
While the entity is unstored, this will return references of the extras on the database model, meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will automatically be reflected on the database model as well. As soon as the entity is stored, the returned extras will be a deep copy and mutations of the database extras will have to go through the appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys or some values, use the iterators extras_keys and extras_items, or the getters get_extra and get_extra_many instead.
- Returns:
the extras as a dictionary
- delete(key: str) None [source]#
Delete an extra.
- Parameters:
key – name of the extra
- Raises:
AttributeError – if the extra does not exist
- delete_many(keys: List[str]) None [source]#
Delete multiple extras.
- Parameters:
keys – names of the extras to delete
- Raises:
AttributeError – if at least one of the extra does not exist
- get(key: str, default: Any = ()) Any [source]#
Return the value of an extra.
Warning
While the entity is unstored, this will return a reference of the extra on the database model, meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will automatically be reflected on the database model as well. As soon as the entity is stored, the returned extra will be a deep copy and mutations of the database extras will have to go through the appropriate set methods.
- Parameters:
key – name of the extra
default – return this value instead of raising if the attribute does not exist
- Returns:
the value of the extra
- Raises:
AttributeError – if the extra does not exist and no default is specified
- get_many(keys: List[str]) List[Any] [source]#
Return the values of multiple extras.
Warning
While the entity is unstored, this will return references of the extras on the database model, meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will automatically be reflected on the database model as well. As soon as the entity is stored, the returned extras will be a deep copy and mutations of the database extras will have to go through the appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys or some values, use the iterators extras_keys and extras_items, or the getters get_extra and get_extra_many instead.
- Parameters:
keys – a list of extra names
- Returns:
a list of extra values
- Raises:
AttributeError – if at least one extra does not exist
- items() Iterable[Tuple[str, Any]] [source]#
Return an iterator over the extras.
- Returns:
an iterator with extra key value pairs
- keys() Iterable[str] [source]#
Return an iterator over the extra keys.
- Returns:
an iterator with extra keys
- reset(extras: Dict[str, Any]) None [source]#
Reset the extras.
Note
This will completely clear any existing extras and replace them with the new dictionary.
- Parameters:
extras – a dictionary with the extras to set
- Raises:
aiida.common.ValidationError – if any of the keys are invalid, i.e. contain periods
- set(key: str, value: Any) None [source]#
Set an extra to the given value.
- Parameters:
key – name of the extra
value – value of the extra
- Raises:
aiida.common.ValidationError – if the key is invalid, i.e. contains periods
- set_many(extras: Dict[str, Any]) None [source]#
Set multiple extras.
Note
This will override any existing extras that are present in the new dictionary.
- Parameters:
extras – a dictionary with the extras to set
- Raises:
aiida.common.ValidationError – if any of the keys are invalid, i.e. contain periods
Module which provides decorators for AiiDA ORM entity -> DB field mappings.
- class aiida.orm.fields.EntityFieldMeta(name, bases, namespace, /, **kwargs)[source]#
Bases:
ABCMeta
A metaclass for entity fields, which adds a fields class attribute.
- __annotations__ = {}#
- __module__ = 'aiida.orm.fields'#
- class aiida.orm.fields.QbArrayField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#
Bases:
QbField
An array (list) flavor of QbField.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'An array (`list`) flavor of `QbField`.', 'contains': <function QbArrayField.contains>, 'of_length': <function QbArrayField.of_length>, 'longer': <function QbArrayField.longer>, 'shorter': <function QbArrayField.shorter>, '__dict__': <attribute '__dict__' of 'QbArrayField' objects>, '__weakref__': <attribute '__weakref__' of 'QbArrayField' objects>, '__annotations__': {}})#
- __module__ = 'aiida.orm.fields'#
- __weakref__#
list of weak references to the object
- _backend_key#
- _doc#
- _dtype#
- _is_attribute#
- _is_subscriptable#
- _key#
- class aiida.orm.fields.QbAttrField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#
Bases:
QbNumericField
,QbArrayField
,QbStrField
,QbDictField
A generic flavor of QbField covering all operations.
- __annotations__ = {}#
- __module__ = 'aiida.orm.fields'#
- _backend_key#
- _doc#
- _dtype#
- _is_attribute#
- _is_subscriptable#
- _key#
- class aiida.orm.fields.QbDictField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#
Bases:
QbField
A dictionary (dict) flavor of QbField.
- __annotations__ = {}#
- __dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'A dictionary (`dict`) flavor of `QbField`.', 'has_key': <function QbDictField.has_key>, '__getitem__': <function QbDictField.__getitem__>, '__dict__': <attribute '__dict__' of 'QbDictField' objects>, '__weakref__': <attribute '__weakref__' of 'QbDictField' objects>, '__annotations__': {}})#
- __getitem__(key: str) QbAttrField [source]#
Return a new QbField with a nested key.
- __module__ = 'aiida.orm.fields'#
- __weakref__#
list of weak references to the object
- _backend_key#
- _doc#
- _dtype#
- _is_attribute#
- _is_subscriptable#
- _key#
- class aiida.orm.fields.QbField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#
Bases:
object
A field of an ORM entity, accessible via the
QueryBuilder
- __annotations__ = {}#
- __init__(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False) None [source]#
Initialise a ORM entity field, accessible via the
QueryBuilder
- Parameters:
key – The key of the field on the ORM entity
alias – The alias in the storage backend for the key, if not equal to
key
dtype – The data type of the field. If None, the field is of variable type.
doc – A docstring for the field
is_attribute – If True, the
backend_key
property will prepend “attributes.” to field nameis_subscriptable – If True, a new field can be created by
field["subkey"]
- __module__ = 'aiida.orm.fields'#
- __slots__ = ('_key', '_backend_key', '_doc', '_dtype', '_is_attribute', '_is_subscriptable')#
- _backend_key#
- _doc#
- _dtype#
- _get_dtype_as_str() str [source]#
Return field type as processed string.
>>> None -> ? >>> str -> str >>> typing.Optional[str] -> Optional[str] >>> typing.Dict[typing.List[str]] -> Dict[List[str]]
- _is_attribute#
- _is_subscriptable#
- _key#
- class aiida.orm.fields.QbFieldArguments[source]#
Bases:
TypedDict
- __annotations__ = {'alias': typing.Optional[str], 'doc': <class 'str'>, 'dtype': typing.Optional[typing.Any], 'is_attribute': <class 'bool'>, 'is_subscriptable': <class 'bool'>, 'key': <class 'str'>}#
- __dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__annotations__': {'key': <class 'str'>, 'alias': typing.Optional[str], 'dtype': typing.Optional[typing.Any], 'doc': <class 'str'>, 'is_attribute': <class 'bool'>, 'is_subscriptable': <class 'bool'>}, '__orig_bases__': (<function TypedDict>,), '__dict__': <attribute '__dict__' of 'QbFieldArguments' objects>, '__weakref__': <attribute '__weakref__' of 'QbFieldArguments' objects>, '__doc__': None, '__required_keys__': frozenset({'is_subscriptable', 'dtype', 'doc', 'key', 'alias', 'is_attribute'}), '__optional_keys__': frozenset(), '__total__': True})#
- __module__ = 'aiida.orm.fields'#
- __optional_keys__ = frozenset({})#
- __orig_bases__ = (<function TypedDict>,)#
- __required_keys__ = frozenset({'alias', 'doc', 'dtype', 'is_attribute', 'is_subscriptable', 'key'})#
- __total__ = True#
- __weakref__#
list of weak references to the object
- class aiida.orm.fields.QbFieldFilters(filters: Sequence[Tuple[QbField, str, Any]] | dict)[source]#
Bases:
object
An representation of a list of fields and their comparators.
- __and__(other: QbFieldFilters) QbFieldFilters [source]#
a & b
-> {‘and’: [a.filters, b.filters]}.
- __hash__ = None#
- __invert__() QbFieldFilters [source]#
~(a > b) -> a !> b; ~(a !> b) -> a > b
- __module__ = 'aiida.orm.fields'#
- __or__(other: QbFieldFilters) QbFieldFilters [source]#
a | b
-> {‘or’: [a.filters, b.filters]}.
- __slots__ = ('filters',)#
- _resolve_redundancy(other: QbFieldFilters, logical: str) QbFieldFilters | None [source]#
Resolve redundant filters and nested logical operators.
- class aiida.orm.fields.QbFields(fields: Dict[str, QbField] | None = None)[source]#
Bases:
object
A readonly class for mapping attributes to database fields of an AiiDA entity.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'A readonly class for mapping attributes to database fields of an AiiDA entity.', '__isabstractmethod__': False, '__init__': <function QbFields.__init__>, '__repr__': <function QbFields.__repr__>, '__str__': <function QbFields.__str__>, '__getitem__': <function QbFields.__getitem__>, '__getattr__': <function QbFields.__getattr__>, '__contains__': <function QbFields.__contains__>, '__len__': <function QbFields.__len__>, '__iter__': <function QbFields.__iter__>, '__dir__': <function QbFields.__dir__>, '_dict': <property object>, '__dict__': <attribute '__dict__' of 'QbFields' objects>, '__weakref__': <attribute '__weakref__' of 'QbFields' objects>, '__annotations__': {}})#
- __isabstractmethod__ = False#
- __module__ = 'aiida.orm.fields'#
- __weakref__#
list of weak references to the object
- property _dict#
Return a copy of the internal mapping
- class aiida.orm.fields.QbNumericField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#
Bases:
QbField
A numeric (int, float, datetime) flavor of QbField.
- __annotations__ = {}#
- __dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'A numeric (`int`, `float`, `datetime`) flavor of `QbField`.', '__lt__': <function QbNumericField.__lt__>, '__le__': <function QbNumericField.__le__>, '__gt__': <function QbNumericField.__gt__>, '__ge__': <function QbNumericField.__ge__>, '__dict__': <attribute '__dict__' of 'QbNumericField' objects>, '__weakref__': <attribute '__weakref__' of 'QbNumericField' objects>, '__annotations__': {}})#
- __module__ = 'aiida.orm.fields'#
- __weakref__#
list of weak references to the object
- _backend_key#
- _doc#
- _dtype#
- _is_attribute#
- _is_subscriptable#
- _key#
- class aiida.orm.fields.QbStrField(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False)[source]#
Bases:
QbField
A string (str) flavor of QbField.
- __annotations__ = {}#
- __dict__ = mappingproxy({'__module__': 'aiida.orm.fields', '__doc__': 'A string (`str`) flavor of `QbField`.', 'like': <function QbStrField.like>, 'ilike': <function QbStrField.ilike>, '__dict__': <attribute '__dict__' of 'QbStrField' objects>, '__weakref__': <attribute '__weakref__' of 'QbStrField' objects>, '__annotations__': {}})#
- __module__ = 'aiida.orm.fields'#
- __weakref__#
list of weak references to the object
- _backend_key#
- _doc#
- _dtype#
- _is_attribute#
- _is_subscriptable#
- _key#
- aiida.orm.fields.add_field(key: str, alias: str | None = None, *, dtype: Any | None = None, doc: str = '', is_attribute: bool = True, is_subscriptable: bool = False) QbField [source]#
Add a dtype-dependent QbField representation of a field.
- Parameters:
key – The key of the field on the ORM entity
alias – The alias in the storage backend for the key, if not equal to
key
dtype – The data type of the field. If None, the field is of variable type.
doc – A docstring for the field
is_attribute – If True, the
backend_key
property will prepend “attributes.” to field nameis_subscriptable – If True, a new field can be created by
field["subkey"]
- aiida.orm.fields.extract_root_type(dtype: Any) Any [source]#
Recursively search for the primitive root type.
>>> extract_root_type(List[str]) -> list >>> extract_root_type(Optional[List[str]]) -> list
AiiDA Group entites
- class aiida.orm.groups.AutoGroup(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#
Bases:
Group
Group to be used to contain selected nodes generated, whilst autogrouping is enabled.
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.groups'#
- __parameters__ = ()#
- _abc_impl = <_abc._abc_data object>#
- fields: QbFields = {'description': 'QbStrField(description) -> str', 'extras': 'QbDictField(extras.*) -> Dict[str, Any]', 'label': 'QbStrField(label) -> str', 'pk': 'QbNumericField(pk) -> int', 'time': 'QbStrField(time) -> str', 'type_string': 'QbStrField(type_string) -> str', 'user_pk': 'QbNumericField(user_pk) -> int', 'uuid': 'QbStrField(uuid) -> str'}#
- class aiida.orm.groups.Group(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#
Bases:
Entity
[BackendGroup
,GroupCollection
]An AiiDA ORM implementation of group of nodes.
- _CLS_COLLECTION#
alias of
GroupCollection
- __abstractmethods__ = frozenset({})#
- __annotations__ = {'_CLS_COLLECTION': 'Type[CollectionType]', '_Group__type_string': typing.ClassVar[typing.Optional[str]], '__qb_fields__': 'Sequence[QbField]', '__type_string': 'ClassVar[Optional[str]]', 'fields': 'QbFields'}#
- __getattr__(name: str) Any [source]#
This method is called when an extras is not found in the instance.
It allows for the handling of deprecated mixin methods.
- __init__(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#
Create a new group. Either pass a dbgroup parameter, to reload a group from the DB (and then, no further parameters are allowed), or pass the parameters for the Group creation.
- Parameters:
label – The group label, required on creation
description – The group description (by default, an empty string)
user – The owner of the group (by default, the automatic user)
type_string – a string identifying the type of group (by default, an empty string, indicating an user-defined group.
- __module__ = 'aiida.orm.groups'#
- __orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendGroup'), aiida.orm.groups.GroupCollection],)#
- __parameters__ = ()#
- __qb_fields__: Sequence[QbField] = [QbStrField('uuid', dtype=str, is_attribute=False), QbStrField('type_string', dtype=str, is_attribute=False), QbStrField('label', dtype=str, is_attribute=False), QbStrField('description', dtype=str, is_attribute=False), QbStrField('time', dtype=str, is_attribute=False), QbDictField('extras', dtype=Dict[str, Any], is_attribute=False, is_subscriptable=True), QbNumericField('user_pk', dtype=int, is_attribute=False)]#
- _abc_impl = <_abc._abc_data object>#
- _deprecated_extra_methods = {'clear_extras': 'clear', 'delete_extra': 'delete', 'delete_extra_many': 'delete_many', 'extras': 'all', 'extras_items': 'items', 'extras_keys': 'keys', 'get_extra': 'get', 'get_extra_many': 'get_many', 'reset_extras': 'reset', 'set_extra': 'set', 'set_extra_many': 'set_many'}#
- _type_string = 'core'#
- add_nodes(nodes: Node | Sequence[Node]) None [source]#
Add a node or a set of nodes to the group.
- Note:
all the nodes and the group itself have to be stored.
- Parameters:
nodes – a single Node or a list of Nodes
- count() int [source]#
Return the number of entities in this group.
- Returns:
integer number of entities contained within the group
- entry_point = EntryPoint(name='core', value='aiida.orm.groups:Group', group='aiida.groups')#
- fields: QbFields = {'description': 'QbStrField(description) -> str', 'extras': 'QbDictField(extras.*) -> Dict[str, Any]', 'label': 'QbStrField(label) -> str', 'pk': 'QbNumericField(pk) -> int', 'time': 'QbStrField(time) -> str', 'type_string': 'QbStrField(type_string) -> str', 'user_pk': 'QbNumericField(user_pk) -> int', 'uuid': 'QbStrField(uuid) -> str'}#
- property is_empty: bool#
Return whether the group is empty, i.e. it does not contain any nodes.
- Returns:
True if it contains no nodes, False otherwise
- property nodes: ConvertIterator#
Return a generator/iterator that iterates over all nodes and returns the respective AiiDA subclasses of Node, and also allows to ask for the number of nodes in the group using len().
- remove_nodes(nodes: Node | Sequence[Node]) None [source]#
Remove a node or a set of nodes to the group.
- Note:
all the nodes and the group itself have to be stored.
- Parameters:
nodes – a single Node or a list of Nodes
- class aiida.orm.groups.GroupBase(group: Group)[source]#
Bases:
object
A namespace for group related functionality, that is not directly related to its user-facing properties.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.groups', '__doc__': 'A namespace for group related functionality, that is not directly related to its user-facing properties.', '__init__': <function GroupBase.__init__>, 'extras': <functools.cached_property object>, '__dict__': <attribute '__dict__' of 'GroupBase' objects>, '__weakref__': <attribute '__weakref__' of 'GroupBase' objects>, '__annotations__': {'_group': "'Group'"}})#
- __module__ = 'aiida.orm.groups'#
- __weakref__#
list of weak references to the object
- property extras: EntityExtras#
Return the extras of this group.
- class aiida.orm.groups.GroupCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#
Bases:
Collection
[Group
]Collection of Groups
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.groups'#
- __orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('Group')],)#
- __parameters__ = ()#
- _abc_impl = <_abc._abc_data object>#
- get_or_create(label: str | None = None, **kwargs) Tuple[Group, bool] [source]#
Try to retrieve a group from the DB with the given arguments; create (and store) a new group if such a group was not present yet.
- Parameters:
label – group label
- Returns:
(group, created) where group is the group (new or existing, in any case already stored) and created is a boolean saying
- class aiida.orm.groups.ImportGroup(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#
Bases:
Group
Group to be used to contain all nodes from an export archive that has been imported.
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.groups'#
- __parameters__ = ()#
- _abc_impl = <_abc._abc_data object>#
- fields: QbFields = {'description': 'QbStrField(description) -> str', 'extras': 'QbDictField(extras.*) -> Dict[str, Any]', 'label': 'QbStrField(label) -> str', 'pk': 'QbNumericField(pk) -> int', 'time': 'QbStrField(time) -> str', 'type_string': 'QbStrField(type_string) -> str', 'user_pk': 'QbNumericField(user_pk) -> int', 'uuid': 'QbStrField(uuid) -> str'}#
- class aiida.orm.groups.UpfFamily(label: str | None = None, user: User | None = None, description: str = '', type_string: str | None = None, backend: StorageBackend | None = None)[source]#
Bases:
Group
Group that represents a pseudo potential family containing UpfData nodes.
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.groups'#
- __parameters__ = ()#
- _abc_impl = <_abc._abc_data object>#
- fields: QbFields = {'description': 'QbStrField(description) -> str', 'extras': 'QbDictField(extras.*) -> Dict[str, Any]', 'label': 'QbStrField(label) -> str', 'pk': 'QbNumericField(pk) -> int', 'time': 'QbStrField(time) -> str', 'type_string': 'QbStrField(type_string) -> str', 'user_pk': 'QbNumericField(user_pk) -> int', 'uuid': 'QbStrField(uuid) -> str'}#
- aiida.orm.groups.load_group_class(type_string: str) Type[Group] [source]#
Load the sub class of Group that corresponds to the given type_string.
Note
will fall back on aiida.orm.groups.Group if type_string cannot be resolved to loadable entry point.
- Parameters:
type_string – the entry point name of the Group sub class
- Returns:
sub class of Group registered through an entry point
Module for orm logging abstract classes
- class aiida.orm.logs.Log(time: datetime, loggername: str, levelname: str, dbnode_id: int, message: str = '', metadata: Dict[str, Any] | None = None, backend: StorageBackend | None = None)[source]#
Bases:
Entity
[BackendLog
,LogCollection
]An AiiDA Log entity. Corresponds to a logged message against a particular AiiDA node.
- _CLS_COLLECTION#
alias of
LogCollection
- __abstractmethods__ = frozenset({})#
- __init__(time: datetime, loggername: str, levelname: str, dbnode_id: int, message: str = '', metadata: Dict[str, Any] | None = None, backend: StorageBackend | None = None)[source]#
Construct a new log
- Parameters:
time – time
loggername – name of logger
levelname – name of log level
dbnode_id – id of database node
message – log message
metadata – metadata
backend – database backend
- __module__ = 'aiida.orm.logs'#
- __orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendLog'), aiida.orm.logs.LogCollection],)#
- __parameters__ = ()#
- __qb_fields__: Sequence[QbField] = [QbStrField('uuid', dtype=str, is_attribute=False), QbStrField('loggername', dtype=str, is_attribute=False), QbStrField('levelname', dtype=str, is_attribute=False), QbStrField('message', dtype=str, is_attribute=False), QbNumericField('time', dtype=datetime, is_attribute=False), QbDictField('metadata', dtype=Dict[str, Any], is_attribute=False), QbNumericField('node_pk', dtype=int, is_attribute=False)]#
- _abc_impl = <_abc._abc_data object>#
- property dbnode_id: int#
Get the id of the object that created the log entry
- Returns:
The id of the object that created the log entry
- fields: QbFields = {'levelname': 'QbStrField(levelname) -> str', 'loggername': 'QbStrField(loggername) -> str', 'message': 'QbStrField(message) -> str', 'metadata': 'QbDictField(metadata) -> Dict[str, Any]', 'node_pk': 'QbNumericField(node_pk) -> int', 'pk': 'QbNumericField(pk) -> int', 'time': 'QbNumericField(time) -> datetime', 'uuid': 'QbStrField(uuid) -> str'}#
- property loggername: str#
The name of the logger that created this entry
- Returns:
The entry loggername
- class aiida.orm.logs.LogCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#
Bases:
Collection
[Log
]This class represents the collection of logs and can be used to create and retrieve logs.
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.logs'#
- __orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('Log')],)#
- __parameters__ = ()#
- _abc_impl = <_abc._abc_data object>#
- create_entry_from_record(record: LogRecord) Log | None [source]#
Helper function to create a log entry from a record created as by the python logging library
- Parameters:
record – The record created by the logging module
- Returns:
A stored log instance
- delete(pk: int) None [source]#
Remove a Log entry from the collection with the given id
- Parameters:
pk – id of the Log to delete
- Raises:
NotExistent – if Log with ID
pk
is not found
- delete_all() None [source]#
Delete all Logs in the collection
- Raises:
IntegrityError – if all Logs could not be deleted
The QueryBuilder: A class that allows you to query the AiiDA database, independent from backend.
Note that the backend implementation is enforced and handled with a composition model!
QueryBuilder()
is the frontend class that the user can use. It inherits from object and contains
backend-specific functionality. Backend specific functionality is provided by the implementation classes.
These inherit from aiida.orm.implementation.querybuilder.BackendQueryBuilder()
,
an interface classes which enforces the implementation of its defined methods.
An instance of one of the implementation classes becomes a member of the QueryBuilder()
instance
when instantiated by the user.
- class aiida.orm.querybuilder.Classifier(ormclass_type_string: str, process_type_string: str | None = None)[source]#
Bases:
NamedTuple
A classifier for an entity.
- __annotations__ = {'ormclass_type_string': ForwardRef('str'), 'process_type_string': ForwardRef('Optional[str]')}#
- __getnewargs__()#
Return self as a plain tuple. Used by copy and pickle.
- __match_args__ = ('ormclass_type_string', 'process_type_string')#
- __module__ = 'aiida.orm.querybuilder'#
- static __new__(_cls, ormclass_type_string: str, process_type_string: str | None = None)#
Create new instance of Classifier(ormclass_type_string, process_type_string)
- __orig_bases__ = (<function NamedTuple>,)#
- __repr__()#
Return a nicely formatted representation string
- __slots__ = ()#
- _asdict()#
Return a new dict which maps field names to their values.
- _field_defaults = {'process_type_string': None}#
- _fields = ('ormclass_type_string', 'process_type_string')#
- classmethod _make(iterable)#
Make a new Classifier object from a sequence or iterable
- _replace(**kwds)#
Return a new Classifier object replacing specified fields with new values
- class aiida.orm.querybuilder.QueryBuilder(backend: 'StorageBackend' | None = None, *, debug: bool | None = None, path: Sequence[str | Dict[str, Any] | EntityClsType] | None = (), filters: Dict[str, FilterType] | None = None, project: Dict[str, ProjectType] | None = None, limit: int | None = None, offset: int | None = None, order_by: OrderByType | None = None, distinct: bool = False, project_map: Dict[str, Dict[str, str]] | None = None)[source]#
Bases:
object
The class to query the AiiDA database.
Usage:
from aiida.orm.querybuilder import QueryBuilder qb = QueryBuilder() # Querying nodes: qb.append(Node) # retrieving the results: results = qb.all()
- _EDGE_TAG_DELIM = '--'#
- _VALID_PROJECTION_KEYS = ('func', 'cast')#
- __deepcopy__(memo) QueryBuilder [source]#
Create deep copy of the instance.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.querybuilder', '__doc__': 'The class to query the AiiDA database.\n\n Usage::\n\n from aiida.orm.querybuilder import QueryBuilder\n qb = QueryBuilder()\n # Querying nodes:\n qb.append(Node)\n # retrieving the results:\n results = qb.all()\n\n ', '_EDGE_TAG_DELIM': '--', '_VALID_PROJECTION_KEYS': ('func', 'cast'), '__init__': <function QueryBuilder.__init__>, 'backend': <property object>, 'as_dict': <function QueryBuilder.as_dict>, 'queryhelp': <property object>, 'from_dict': <classmethod(<function QueryBuilder.from_dict>)>, '__repr__': <function QueryBuilder.__repr__>, '__str__': <function QueryBuilder.__str__>, '__deepcopy__': <function QueryBuilder.__deepcopy__>, 'get_used_tags': <function QueryBuilder.get_used_tags>, '_get_unique_tag': <function QueryBuilder._get_unique_tag>, 'append': <function QueryBuilder.append>, '_init_project_map': <function QueryBuilder._init_project_map>, 'order_by': <function QueryBuilder.order_by>, 'add_filter': <function QueryBuilder.add_filter>, '_process_filters': <staticmethod(<function QueryBuilder._process_filters>)>, '_add_node_type_filter': <function QueryBuilder._add_node_type_filter>, '_add_process_type_filter': <function QueryBuilder._add_process_type_filter>, '_add_group_type_filter': <function QueryBuilder._add_group_type_filter>, 'add_projection': <function QueryBuilder.add_projection>, 'set_debug': <function QueryBuilder.set_debug>, 'debug': <function QueryBuilder.debug>, 'limit': <function QueryBuilder.limit>, 'offset': <function QueryBuilder.offset>, 'distinct': <function QueryBuilder.distinct>, 'inputs': <function QueryBuilder.inputs>, 'outputs': <function QueryBuilder.outputs>, 'children': <function QueryBuilder.children>, 'parents': <function QueryBuilder.parents>, 'as_sql': <function QueryBuilder.as_sql>, 'analyze_query': <function QueryBuilder.analyze_query>, '_get_aiida_entity_res': <staticmethod(<function QueryBuilder._get_aiida_entity_res>)>, 'first': <function QueryBuilder.first>, 'count': <function QueryBuilder.count>, 'iterall': <function QueryBuilder.iterall>, 'iterdict': <function QueryBuilder.iterdict>, 'all': <function QueryBuilder.all>, 'one': <function QueryBuilder.one>, 'dict': <function QueryBuilder.dict>, '__dict__': <attribute '__dict__' of 'QueryBuilder' objects>, '__weakref__': <attribute '__weakref__' of 'QueryBuilder' objects>, '__annotations__': {'_impl': 'BackendQueryBuilder', '_path': 'List[PathItemType]', '_filters': 'Dict[str, Dict[str, Any]]', '_projections': 'Dict[str, List[Dict[str, Dict[str, Any]]]]', '_project_map': 'Dict[str, Dict[str, str]]', '_order_by': 'List[Dict[str, List[Dict[str, Dict[str, str]]]]]', '_limit': 'Optional[int]', '_offset': 'Optional[int]', '_distinct': 'bool'}})#
- __init__(backend: 'StorageBackend' | None = None, *, debug: bool | None = None, path: Sequence[str | Dict[str, Any] | EntityClsType] | None = (), filters: Dict[str, FilterType] | None = None, project: Dict[str, ProjectType] | None = None, limit: int | None = None, offset: int | None = None, order_by: OrderByType | None = None, distinct: bool = False, project_map: Dict[str, Dict[str, str]] | None = None) None [source]#
Instantiates a QueryBuilder instance.
Which backend is used decided here based on backend-settings (taken from the user profile). This cannot be overridden so far by the user.
- Parameters:
debug – Turn on debug mode. This feature prints information on the screen about the stages of the QueryBuilder. Does not affect results.
path – A list of the vertices to traverse. Leave empty if you plan on using the method
QueryBuilder.append()
.filters – The filters to apply. You can specify the filters here, when appending to the query using
QueryBuilder.append()
or even later usingQueryBuilder.add_filter()
. Check latter gives API-details.project – The projections to apply. You can specify the projections here, when appending to the query using
QueryBuilder.append()
or even later usingQueryBuilder.add_projection()
. Latter gives you API-details.limit – Limit the number of rows to this number. Check
QueryBuilder.limit()
for more information.offset – Set an offset for the results returned. Details in
QueryBuilder.offset()
.order_by – How to order the results. As the 2 above, can be set also at later stage, check
QueryBuilder.order_by()
for more information.distinct – Whether to return de-duplicated rows
project_map – A mapping of the projection input-keys to the output-keys of dict/iterdict
- __module__ = 'aiida.orm.querybuilder'#
- __weakref__#
list of weak references to the object
- _add_group_type_filter(tagspec: str, classifiers: List[Classifier], subclassing: bool) None [source]#
Add a filter based on group type.
- Parameters:
tagspec – The tag, which has to exist already as a key in self._filters
classifiers – a dictionary with classifiers
subclassing – if True, allow for subclasses of the ormclass
- _add_node_type_filter(tagspec: str, classifiers: List[Classifier], subclassing: bool)[source]#
Add a filter based on node type.
- Parameters:
tagspec – The tag, which has to exist already as a key in self._filters
classifiers – a dictionary with classifiers
subclassing – if True, allow for subclasses of the ormclass
- _add_process_type_filter(tagspec: str, classifiers: List[Classifier], subclassing: bool) None [source]#
Add a filter based on process type.
- Parameters:
tagspec – The tag, which has to exist already as a key in self._filters
classifiers – a dictionary with classifiers
subclassing – if True, allow for subclasses of the process type
Note: This function handles the case when process_type_string is None.
- static _get_aiida_entity_res(value) Any [source]#
Convert a projected query result to front end class if it is an instance of a BackendEntity.
Values that are not an BackendEntity instance will be returned unaltered
- Parameters:
value – a projected query result to convert
- Returns:
the converted value
- _get_unique_tag(classifiers: List[Classifier]) str [source]#
Using the function get_tag_from_type, I get a tag. I increment an index that is appended to that tag until I have an unused tag. This function is called in
QueryBuilder.append()
when no tag is given.- Parameters:
classifiers (dict) –
Classifiers, containing the string that defines the type of the AiiDA ORM class. For subclasses of Node, this is the Node._plugin_type_string, for other they are as defined as returned by
QueryBuilder._get_ormclass()
.Can also be a list of dictionaries, when multiple classes are passed to QueryBuilder.append
- Returns:
A tag as a string (it is a single string also when passing multiple classes).
- _init_project_map(project_map: Dict[str, Dict[str, str]]) None [source]#
Set the project map.
Note, this is a private method, since the user should not override what is set by projected QbFields.
- Parameters:
project_map (dict) – The project map.
- static _process_filters(filters: Dict[str, Any] | QbFieldFilters) Dict[str, Any] [source]#
Process filters.
- add_filter(tagspec: str | EntityClsType, filter_spec: FilterType) QueryBuilder [source]#
Adding a filter to my filters.
- Parameters:
tagspec – A tag string or an ORM class which maps to an existing tag
filter_spec – The specifications for the filter, has to be a dictionary
Usage:
qb = QueryBuilder() # Instantiating the QueryBuilder instance qb.append(Node, tag='node') # Appending a Node #let's put some filters: qb.add_filter('node',{'id':{'>':12}}) # 2 filters together: qb.add_filter('node',{'label':'foo', 'uuid':{'like':'ab%'}}) # Now I am overriding the first filter I set: qb.add_filter('node',{'id':13})
- add_projection(tag_spec: str | EntityClsType, projection_spec: ProjectType) None [source]#
Adds a projection
- Parameters:
tag_spec – A tag string or an ORM class which maps to an existing tag
projection_spec – The specification for the projection. A projection is a list of dictionaries, with each dictionary containing key-value pairs where the key is database entity (e.g. a column / an attribute) and the value is (optional) additional information on how to process this database entity.
If the given projection_spec is not a list, it will be expanded to a list. If the listitems are not dictionaries, but strings (No additional processing of the projected results desired), they will be expanded to dictionaries.
Usage:
qb = QueryBuilder() qb.append(StructureData, tag='struc') # Will project the uuid and the kinds qb.add_projection('struc', ['uuid', 'attributes.kinds'])
The above example will project the uuid and the kinds-attribute of all matching structures. There are 2 (so far) special keys.
The single star * will project the ORM-instance:
qb = QueryBuilder() qb.append(StructureData, tag='struc') # Will project the ORM instance qb.add_projection('struc', '*') print type(qb.first()[0]) # >>> aiida.orm.nodes.data.structure.StructureData
The double star
**
projects all possible projections of this entity:QueryBuilder().append(StructureData,tag=’s’, project=’**’).limit(1).dict()[0][‘s’].keys()
# >>> ‘user_id, description, ctime, label, extras, mtime, id, attributes, dbcomputer_id, type, uuid’
Be aware that the result of
**
depends on the backend implementation.
- all(batch_size: int | None = None, flat: bool = False) List[List[Any]] | List[Any] [source]#
Executes the full query with the order of the rows as returned by the backend.
The order inside each row is given by the order of the vertices in the path and the order of the projections for each vertex in the path.
- Parameters:
batch_size – the size of the batches to ask the backend to batch results in subcollections. You can optimize the speed of the query by tuning this parameter. Leave the default None if speed is not critical or if you don’t know what you’re doing.
flat – return the result as a flat list of projected entities without sub lists.
- Returns:
a list of lists of all projected entities.
- analyze_query(execute: bool = True, verbose: bool = False) str [source]#
Return the query plan, i.e. a list of SQL statements that will be executed.
See: https://www.postgresql.org/docs/11/sql-explain.html
- Params execute:
Carry out the command and show actual run times and other statistics.
- Params verbose:
Display additional information regarding the plan.
- append(cls: EntityClsType | Sequence[EntityClsType] | None = None, entity_type: str | Sequence[str] | None = None, tag: str | None = None, filters: FilterType | None = None, project: ProjectType | None = None, subclassing: bool = True, edge_tag: str | None = None, edge_filters: FilterType | None = None, edge_project: ProjectType | None = None, outerjoin: bool = False, joining_keyword: str | None = None, joining_value: Any | None = None, orm_base: str | None = None, **kwargs: Any) QueryBuilder [source]#
Any iterative procedure to build the path for a graph query needs to invoke this method to append to the path.
- Parameters:
cls –
The Aiida-class (or backend-class) defining the appended vertice. Also supports a tuple/list of classes. This results in an all instances of this class being accepted in a query. However the classes have to have the same orm-class for the joining to work. I.e. both have to subclasses of Node. Valid is:
cls=(StructureData, Dict)
This is invalid:
cls=(Group, Node)
entity_type – The node type of the class, if cls is not given. Also here, a tuple or list is accepted.
tag – A unique tag. If none is given, I will create a unique tag myself.
filters – Filters to apply for this vertex. See
add_filter()
, the method invoked in the background, or usage examples for details.project – Projections to apply. See usage examples for details. More information also in
add_projection()
.subclassing – Whether to include subclasses of the given class (default True). E.g. Specifying a ProcessNode as cls will include CalcJobNode, WorkChainNode, CalcFunctionNode, etc..
edge_tag – The tag that the edge will get. If nothing is specified (and there is a meaningful edge) the default is tag1–tag2 with tag1 being the entity joining from and tag2 being the entity joining to (this entity).
edge_filters – The filters to apply on the edge. Also here, details in
add_filter()
.edge_project – The project from the edges. API-details in
add_projection()
.outerjoin – If True, (default is False), will do a left outerjoin instead of an inner join
Joining can be specified in two ways:
Specifying the ‘joining_keyword’ and ‘joining_value’ arguments
Specify a single keyword argument
The joining keyword wil be
with_*
ordirection
, depending on the joining entity type. The joining value is the tag name or class of the entity to join to.A small usage example how this can be invoked:
qb = QueryBuilder() # Instantiating empty querybuilder instance qb.append(cls=StructureData) # First item is StructureData node # The # next node in the path is a PwCalculation, with # the structure joined as an input qb.append( cls=PwCalculation, with_incoming=StructureData )
- Returns:
self
- as_dict(copy: bool = True) QueryDictType [source]#
Convert to a JSON serialisable dictionary representation of the query.
- as_sql(inline: bool = False) str [source]#
Convert the query to an SQL string representation.
Warning
This method should be used for debugging purposes only, since normally sqlalchemy will handle this process internally.
- Params inline:
Inline bound parameters (this is normally handled by the Python DB-API).
- property backend: StorageBackend#
Return the backend used by the QueryBuilder.
- children(**kwargs: Any) QueryBuilder [source]#
Join to children/descendants of previous vertice in path.
- Returns:
self
- count() int [source]#
Counts the number of rows returned by the backend.
- Returns:
the number of rows as an integer
- debug(msg: str, *objects: Any) None [source]#
Log debug message.
objects will passed to the format string, e.g.
msg % objects
- dict(batch_size: int | None = None) List[Dict[str, Dict[str, Any]]] [source]#
Executes the full query with the order of the rows as returned by the backend. the order inside each row is given by the order of the vertices in the path and the order of the projections for each vertice in the path.
- Parameters:
batch_size – The size of the batches to ask the backend to batch results in subcollections. You can optimize the speed of the query by tuning this parameter. Leave the default (None) if speed is not critical or if you don’t know what you’re doing!
- Returns:
A list of dictionaries of all projected entities: tag -> field -> value
Usage:
qb = QueryBuilder() qb.append( StructureData, tag='structure', filters={'uuid':{'==':myuuid}}, ) qb.append( Node, with_ancestors='structure', project=['entity_type', 'id'], # returns entity_type (string) and id (string) tag='descendant' ) # Return the dictionaries: print "qb.iterdict()" for d in qb.iterdict(): print '>>>', d
results in the following output:
qb.iterdict() >>> {'descendant': { 'entity_type': 'calculation.job.quantumespresso.pw.PwCalculation.', 'id': 7716} } >>> {'descendant': { 'entity_type': 'data.remote.RemoteData.', 'id': 8510} }
- distinct(value: bool = True) QueryBuilder [source]#
Asks for distinct rows, which is the same as asking the backend to remove duplicates. Does not execute the query!
If you want a distinct query:
qb = QueryBuilder() # append stuff! qb.append(...) qb.append(...) ... qb.distinct().all() #or qb.distinct().dict()
- Returns:
self
- first(flat: Literal[False] = False) list[Any] | None [source]#
- first(flat: Literal[True]) Any | None
Return the first result of the query.
Calling
first
results in an execution of the underlying query.Note, this may change if several rows are valid for the query, as persistent ordering is not guaranteed unless explicitly specified.
- Parameters:
flat – if True, return just the projected quantity if there is just a single projection.
- Returns:
One row of results as a list, or None if no result returned.
- classmethod from_dict(dct: Dict[str, Any]) QueryBuilder [source]#
Create an instance from a dictionary representation of the query.
- get_used_tags(vertices: bool = True, edges: bool = True) List[str] [source]#
Returns a list of all the vertices that are being used.
- Parameters:
vertices – If True, adds the tags of vertices to the returned list
edges – If True, adds the tags of edges to the returnend list.
- Returns:
A list of tags
- inputs(**kwargs: Any) QueryBuilder [source]#
Join to inputs of previous vertice in path.
- Returns:
self
- iterall(batch_size: int | None = 100) Iterable[List[Any]] [source]#
Same as
all()
, but returns a generator. Be aware that this is only safe if no commit will take place during this transaction. You might also want to read the SQLAlchemy documentation on https://docs.sqlalchemy.org/en/14/orm/query.html#sqlalchemy.orm.Query.yield_per- Parameters:
batch_size – The size of the batches to ask the backend to batch results in subcollections. You can optimize the speed of the query by tuning this parameter.
- Returns:
a generator of lists
- iterdict(batch_size: int | None = 100) Iterable[Dict[str, Dict[str, Any]]] [source]#
Same as
dict()
, but returns a generator. Be aware that this is only safe if no commit will take place during this transaction. You might also want to read the SQLAlchemy documentation on https://docs.sqlalchemy.org/en/14/orm/query.html#sqlalchemy.orm.Query.yield_per- Parameters:
batch_size – The size of the batches to ask the backend to batch results in subcollections. You can optimize the speed of the query by tuning this parameter.
- Returns:
a generator of dictionaries
- limit(limit: int | None) QueryBuilder [source]#
Set the limit (nr of rows to return)
- Parameters:
limit – integers of number of rows of rows to return
- offset(offset: int | None) QueryBuilder [source]#
Set the offset. If offset is set, that many rows are skipped before returning. offset = 0 is the same as omitting setting the offset. If both offset and limit appear, then offset rows are skipped before starting to count the limit rows that are returned.
- Parameters:
offset – integers of nr of rows to skip
- one() List[Any] [source]#
Executes the query asking for exactly one results.
Will raise an exception if this is not the case:
- Raises:
MultipleObjectsError if more then one row can be returned
- Raises:
NotExistent if no result was found
- order_by(order_by: dict | List[dict] | Tuple[dict, ...]) QueryBuilder [source]#
Set the entity to order by
- Parameters:
order_by – This is a list of items, where each item is a dictionary specifies what to sort for an entity
In each dictionary in that list, keys represent valid tags of entities (tables), and values are list of columns.
Usage:
#Sorting by id (ascending): qb = QueryBuilder() qb.append(Node, tag='node') qb.order_by({'node':['id']}) # or #Sorting by id (ascending): qb = QueryBuilder() qb.append(Node, tag='node') qb.order_by({'node':[{'id':{'order':'asc'}}]}) # for descending order: qb = QueryBuilder() qb.append(Node, tag='node') qb.order_by({'node':[{'id':{'order':'desc'}}]}) # or (shorter) qb = QueryBuilder() qb.append(Node, tag='node') qb.order_by({'node':[{'id':'desc'}]})
- outputs(**kwargs: Any) QueryBuilder [source]#
Join to outputs of previous vertice in path.
- Returns:
self
- parents(**kwargs: Any) QueryBuilder [source]#
Join to parents/ancestors of previous vertice in path.
- Returns:
self
- property queryhelp: QueryDictType#
“Legacy name for
as_dict
method.
- set_debug(debug: bool) QueryBuilder [source]#
Run in debug mode. This does not affect functionality, but prints intermediate stages when creating a query on screen.
- Parameters:
debug – Turn debug on or off
- class aiida.orm.querybuilder._QueryTagMap[source]#
Bases:
object
Cache of tag mappings for a query.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.querybuilder', '__doc__': 'Cache of tag mappings for a query.', '__init__': <function _QueryTagMap.__init__>, '__repr__': <function _QueryTagMap.__repr__>, '__contains__': <function _QueryTagMap.__contains__>, '__iter__': <function _QueryTagMap.__iter__>, 'add': <function _QueryTagMap.add>, 'remove': <function _QueryTagMap.remove>, 'get': <function _QueryTagMap.get>, '__dict__': <attribute '__dict__' of '_QueryTagMap' objects>, '__weakref__': <attribute '__weakref__' of '_QueryTagMap' objects>, '__annotations__': {'_tag_to_type': 'Dict[str, Union[None, EntityTypes]]', '_cls_to_tag_map': 'Dict[Any, Set[str]]'}})#
- __module__ = 'aiida.orm.querybuilder'#
- __weakref__#
list of weak references to the object
- add(tag: str, etype: None | EntityTypes = None, klasses: None | EntityClsType | Sequence[EntityClsType] = None) None [source]#
Add a tag.
- get(tag_or_cls: str | EntityClsType) str [source]#
Return the tag or, given a class(es), map to a tag.
- Raises:
ValueError – if the tag is not found, or the class(es) does not map to a single tag
- aiida.orm.querybuilder._get_group_type_filter(classifiers: Classifier, subclassing: bool) dict [source]#
Return filter dictionaries for Group.type_string given a set of classifiers.
- Parameters:
classifiers – a dictionary with classifiers (note: does not support lists)
subclassing – if True, allow for subclasses of the ormclass
- Returns:
dictionary in QueryBuilder filter language to pass into {‘type_string’: … }
- aiida.orm.querybuilder._get_node_type_filter(classifiers: Classifier, subclassing: bool) dict [source]#
Return filter dictionaries given a set of classifiers.
- Parameters:
classifiers – a dictionary with classifiers (note: does not support lists)
subclassing – if True, allow for subclasses of the ormclass
- Returns:
dictionary in QueryBuilder filter language to pass into {“type”: … }
- aiida.orm.querybuilder._get_ormclass(cls: None | EntityClsType | Sequence[EntityClsType], entity_type: None | str | Sequence[str]) Tuple[EntityTypes, List[Classifier]] [source]#
Get ORM classifiers from either class(es) or ormclass_type_string(s).
- Parameters:
cls – a class or tuple/set/list of classes that are either AiiDA ORM classes or backend ORM classes.
ormclass_type_string – type string for ORM class
- Returns:
the ORM class as well as a dictionary with additional classifier strings
Handles the case of lists as well.
- aiida.orm.querybuilder._get_ormclass_from_cls(cls: EntityClsType) Tuple[EntityTypes, Classifier] [source]#
Return the correct classifiers for the QueryBuilder from an ORM class.
- Parameters:
cls – an AiiDA ORM class or backend ORM class.
query – an instance of the appropriate QueryBuilder backend.
- Returns:
the ORM class as well as a dictionary with additional classifier strings
- Note: the ormclass_type_string is currently hardcoded for group, computer etc. One could instead use something like
aiida.orm.utils.node.get_type_string_from_class(cls.__module__, cls.__name__)
- aiida.orm.querybuilder._get_ormclass_from_str(type_string: str) Tuple[EntityTypes, Classifier] [source]#
Return the correct classifiers for the QueryBuilder from an ORM type string.
- Parameters:
type_string – type string for ORM class
query – an instance of the appropriate QueryBuilder backend.
- Returns:
the ORM class as well as a dictionary with additional classifier strings
- aiida.orm.querybuilder._get_process_type_filter(classifiers: Classifier, subclassing: bool) dict [source]#
Return filter dictionaries given a set of classifiers.
- Parameters:
classifiers – a dictionary with classifiers (note: does not support lists)
subclassing – if True, allow for subclasses of the process type This is activated only, if an entry point can be found for the process type (as well as for a selection of built-in process types)
- Returns:
dictionary in QueryBuilder filter language to pass into {“process_type”: … }
Module for the ORM user class.
- class aiida.orm.users.User(email: str, first_name: str = '', last_name: str = '', institution: str = '', backend: StorageBackend | None = None)[source]#
Bases:
Entity
[BackendUser
,UserCollection
]AiiDA User
- _CLS_COLLECTION#
alias of
UserCollection
- __abstractmethods__ = frozenset({})#
- __init__(email: str, first_name: str = '', last_name: str = '', institution: str = '', backend: StorageBackend | None = None)[source]#
Create a new User.
- __module__ = 'aiida.orm.users'#
- __orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendUser'), aiida.orm.users.UserCollection],)#
- __parameters__ = ()#
- __qb_fields__: Sequence[QbField] = [QbStrField('email', dtype=str, is_attribute=False), QbStrField('first_name', dtype=str, is_attribute=False), QbStrField('last_name', dtype=str, is_attribute=False), QbStrField('institution', dtype=str, is_attribute=False)]#
- _abc_impl = <_abc._abc_data object>#
- fields: QbFields = {'email': 'QbStrField(email) -> str', 'first_name': 'QbStrField(first_name) -> str', 'institution': 'QbStrField(institution) -> str', 'last_name': 'QbStrField(last_name) -> str', 'pk': 'QbNumericField(pk) -> int'}#
- get_short_name() str [source]#
Return the user short name (typically, this returns the email)
- Returns:
The short name
- property is_default: bool#
Return whether the user is the default user.
- Returns:
Boolean,
True
if the user is the default,False
otherwise.
- class aiida.orm.users.UserCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#
Bases:
Collection
[User
]The collection of users stored in a backend.
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.users'#
- __orig_bases__ = (aiida.orm.entities.Collection[ForwardRef('User')],)#
- __parameters__ = ()#
- _abc_impl = <_abc._abc_data object>#
- get_or_create(email: str, **kwargs) Tuple[bool, User] [source]#
Get the existing user with a given email address or create an unstored one
- Parameters:
kwargs – The properties of the user to get or create
- Returns:
The corresponding user object
- Raises:
aiida.common.exceptions.MultipleObjectsError
,aiida.common.exceptions.NotExistent