aiida.orm.nodes package#
Module with Node sub classes for data and processes.
Subpackages#
- aiida.orm.nodes.data package
- Subpackages
- Submodules
BaseType
to_aiida_type()
Bool
_()
get_false_node()
get_true_node()
CifData
CifData._PARSE_POLICIES
CifData._PARSE_POLICY_DEFAULT
CifData._SCAN_TYPES
CifData._SCAN_TYPE_DEFAULT
CifData._SET_INCOMPATIBILITIES
CifData.__abstractmethods__
CifData.__init__()
CifData.__module__
CifData.__parameters__
CifData.__qb_fields__
CifData._abc_impl
CifData._ase
CifData._get_object_ase()
CifData._get_object_pycifrw()
CifData._logger
CifData._prepare_cif()
CifData._validate()
CifData._values
CifData.ase
CifData.fields
CifData.from_md5()
CifData.generate_md5()
CifData.get_ase()
CifData.get_formulae()
CifData.get_or_create()
CifData.get_spacegroup_numbers()
CifData.get_structure()
CifData.has_atomic_sites
CifData.has_attached_hydrogens
CifData.has_partial_occupancies
CifData.has_undefined_atomic_sites
CifData.has_unknown_species
CifData.parse()
CifData.read_cif()
CifData.set_ase()
CifData.set_file()
CifData.set_parse_policy()
CifData.set_scan_type()
CifData.set_values()
CifData.store()
CifData.values
cif_from_ase()
has_pycifrw()
parse_formula()
pycifrw_from_cif()
Data
Data.__abstractmethods__
Data.__annotations__
Data.__copy__()
Data.__deepcopy__()
Data.__init__()
Data.__module__
Data.__parameters__
Data.__qb_fields__
Data._abc_impl
Data._export_format_replacements
Data._exportcontent()
Data._get_converters()
Data._get_exporters()
Data._get_importers()
Data._logger
Data._source_attributes
Data._storable
Data._unstorable_message
Data.clone()
Data.convert()
Data.creator
Data.export()
Data.fields
Data.get_export_formats()
Data.importfile()
Data.importstring()
Data.set_source()
Data.source
Dict
Dict.__abstractmethods__
Dict.__contains__()
Dict.__eq__()
Dict.__getitem__()
Dict.__hash__
Dict.__init__()
Dict.__module__
Dict.__parameters__
Dict.__plugin_type_string
Dict.__qb_fields__
Dict.__query_type_string
Dict.__setitem__()
Dict._abc_impl
Dict._logger
Dict.dict
Dict.fields
Dict.get()
Dict.get_dict()
Dict.items()
Dict.keys()
Dict.set_dict()
Dict.update_dict()
_()
EnumData
EnumData.KEY_IDENTIFIER
EnumData.KEY_NAME
EnumData.KEY_VALUE
EnumData.__abstractmethods__
EnumData.__eq__()
EnumData.__hash__
EnumData.__init__()
EnumData.__module__
EnumData.__parameters__
EnumData.__plugin_type_string
EnumData.__query_type_string
EnumData._abc_impl
EnumData._logger
EnumData.fields
EnumData.get_enum()
EnumData.get_member()
EnumData.name
EnumData.value
_()
Float
_()
FolderData
FolderData.__abstractmethods__
FolderData.__init__()
FolderData.__module__
FolderData.__parameters__
FolderData.__plugin_type_string
FolderData.__query_type_string
FolderData._abc_impl
FolderData._logger
FolderData.as_path()
FolderData.copy_tree()
FolderData.delete_object()
FolderData.erase()
FolderData.fields
FolderData.get_object()
FolderData.get_object_content()
FolderData.glob()
FolderData.list_object_names()
FolderData.list_objects()
FolderData.open()
FolderData.put_object_from_bytes()
FolderData.put_object_from_file()
FolderData.put_object_from_filelike()
FolderData.put_object_from_tree()
FolderData.walk()
Int
_()
JsonSerializableProtocol
JsonSerializableProtocol.__abstractmethods__
JsonSerializableProtocol.__dict__
JsonSerializableProtocol.__init__()
JsonSerializableProtocol.__module__
JsonSerializableProtocol.__parameters__
JsonSerializableProtocol.__subclasshook__()
JsonSerializableProtocol.__weakref__
JsonSerializableProtocol._abc_impl
JsonSerializableProtocol._is_protocol
JsonSerializableProtocol.as_dict()
JsonableData
JsonableData.__abstractmethods__
JsonableData.__init__()
JsonableData.__module__
JsonableData.__parameters__
JsonableData.__plugin_type_string
JsonableData.__query_type_string
JsonableData._abc_impl
JsonableData._deserialize_float_constants()
JsonableData._get_object()
JsonableData._logger
JsonableData.fields
JsonableData.obj
List
List._LIST_KEY
List.__abstractmethods__
List.__delitem__()
List.__eq__()
List.__getitem__()
List.__hash__
List.__init__()
List.__len__()
List.__module__
List.__parameters__
List.__plugin_type_string
List.__query_type_string
List.__setitem__()
List.__str__()
List._abc_impl
List._logger
List._using_list_reference()
List.append()
List.count()
List.extend()
List.fields
List.get_list()
List.index()
List.insert()
List.pop()
List.remove()
List.reverse()
List.set_list()
List.sort()
_()
NumericType
NumericType.__abs__()
NumericType.__abstractmethods__
NumericType.__add__()
NumericType.__div__()
NumericType.__float__()
NumericType.__floordiv__()
NumericType.__ge__()
NumericType.__gt__()
NumericType.__int__()
NumericType.__le__()
NumericType.__lt__()
NumericType.__mod__()
NumericType.__module__
NumericType.__mul__()
NumericType.__neg__()
NumericType.__parameters__
NumericType.__plugin_type_string
NumericType.__pos__()
NumericType.__pow__()
NumericType.__query_type_string
NumericType.__radd__()
NumericType.__rdiv__()
NumericType.__rfloordiv__()
NumericType.__rmod__()
NumericType.__rmul__()
NumericType.__rsub__()
NumericType.__rtruediv__()
NumericType.__sub__()
NumericType.__truediv__()
NumericType._abc_impl
NumericType._logger
NumericType.fields
_left_operator()
_right_operator()
OrbitalData
SinglefileData
SinglefileData.DEFAULT_FILENAME
SinglefileData.__abstractmethods__
SinglefileData.__init__()
SinglefileData.__module__
SinglefileData.__parameters__
SinglefileData.__plugin_type_string
SinglefileData.__query_type_string
SinglefileData._abc_impl
SinglefileData._logger
SinglefileData._validate()
SinglefileData.as_path()
SinglefileData.fields
SinglefileData.filename
SinglefileData.from_string()
SinglefileData.get_content()
SinglefileData.open()
SinglefileData.set_file()
Str
_()
Kind
Kind.__dict__
Kind.__init__()
Kind.__module__
Kind.__repr__()
Kind.__str__()
Kind.__weakref__
Kind.compare_with()
Kind.get_raw()
Kind.get_symbols_string()
Kind.has_vacancies
Kind.is_alloy
Kind.mass
Kind.name
Kind.reset_mass()
Kind.set_automatic_kind_name()
Kind.set_symbols_and_weights()
Kind.symbol
Kind.symbols
Kind.weights
Site
StructureData
StructureData.__abstractmethods__
StructureData.__init__()
StructureData.__module__
StructureData.__parameters__
StructureData.__plugin_type_string
StructureData.__qb_fields__
StructureData.__query_type_string
StructureData._abc_impl
StructureData._adjust_default_cell()
StructureData._dimensionality_label
StructureData._get_object_ase()
StructureData._get_object_phonopyatoms()
StructureData._get_object_pymatgen()
StructureData._get_object_pymatgen_molecule()
StructureData._get_object_pymatgen_structure()
StructureData._internal_kind_tags
StructureData._logger
StructureData._parse_xyz()
StructureData._prepare_chemdoodle()
StructureData._prepare_cif()
StructureData._prepare_xsf()
StructureData._prepare_xyz()
StructureData._set_incompatibilities
StructureData._validate()
StructureData.append_atom()
StructureData.append_kind()
StructureData.append_site()
StructureData.cell
StructureData.cell_angles
StructureData.cell_lengths
StructureData.clear_kinds()
StructureData.clear_sites()
StructureData.fields
StructureData.get_ase()
StructureData.get_cell_volume()
StructureData.get_cif()
StructureData.get_composition()
StructureData.get_description()
StructureData.get_dimensionality()
StructureData.get_formula()
StructureData.get_kind()
StructureData.get_kind_names()
StructureData.get_pymatgen()
StructureData.get_pymatgen_molecule()
StructureData.get_pymatgen_structure()
StructureData.get_site_kindnames()
StructureData.get_symbols_set()
StructureData.has_vacancies
StructureData.is_alloy
StructureData.kinds
StructureData.pbc
StructureData.reset_cell()
StructureData.reset_sites_positions()
StructureData.set_ase()
StructureData.set_cell()
StructureData.set_cell_angles()
StructureData.set_cell_lengths()
StructureData.set_pbc()
StructureData.set_pymatgen()
StructureData.set_pymatgen_molecule()
StructureData.set_pymatgen_structure()
StructureData.sites
_create_symbols_tuple()
_create_weights_tuple()
_get_dimensionality()
_get_valid_cell()
_validate_dimensionality()
ase_refine_cell()
atom_kinds_to_html()
calc_cell_volume()
create_automatic_kind_name()
get_formula()
get_formula_from_symbol_list()
get_formula_group()
get_pymatgen_version()
get_symbols_string()
get_valid_pbc()
group_symbols()
has_ase()
has_pymatgen()
has_spglib()
has_vacancies()
is_ase_atoms()
is_valid_symbol()
symop_fract_from_ortho()
symop_ortho_from_fract()
validate_symbols_tuple()
validate_weights_tuple()
UpfData
UpfData.__abstractmethods__
UpfData.__init__()
UpfData.__module__
UpfData.__parameters__
UpfData.__plugin_type_string
UpfData.__query_type_string
UpfData._abc_impl
UpfData._logger
UpfData._prepare_json()
UpfData._prepare_upf()
UpfData._validate()
UpfData.element
UpfData.fields
UpfData.from_md5()
UpfData.get_or_create()
UpfData.get_upf_family_names()
UpfData.get_upf_group()
UpfData.get_upf_groups()
UpfData.md5sum
UpfData.set_file()
UpfData.store()
emit_deprecation()
get_pseudos_from_structure()
parse_upf()
upload_upf_family()
- aiida.orm.nodes.process package
- Subpackages
- Submodules
ProcessNode
ProcessNode.CHECKPOINT_KEY
ProcessNode.EXCEPTION_KEY
ProcessNode.EXIT_MESSAGE_KEY
ProcessNode.EXIT_STATUS_KEY
ProcessNode.METADATA_INPUTS_KEY
ProcessNode.PROCESS_LABEL_KEY
ProcessNode.PROCESS_PAUSED_KEY
ProcessNode.PROCESS_STATE_KEY
ProcessNode.PROCESS_STATUS_KEY
ProcessNode._CLS_NODE_CACHING
ProcessNode._CLS_NODE_LINKS
ProcessNode.__abstractmethods__
ProcessNode.__annotations__
ProcessNode.__module__
ProcessNode.__parameters__
ProcessNode.__qb_fields__
ProcessNode.__str__()
ProcessNode._abc_impl
ProcessNode._hash_ignored_attributes
ProcessNode._logger
ProcessNode._unstorable_message
ProcessNode._updatable_attributes
ProcessNode.called
ProcessNode.called_descendants
ProcessNode.caller
ProcessNode.checkpoint
ProcessNode.delete_checkpoint()
ProcessNode.exception
ProcessNode.exit_code
ProcessNode.exit_message
ProcessNode.exit_status
ProcessNode.fields
ProcessNode.get_builder_restart()
ProcessNode.get_metadata_inputs()
ProcessNode.is_excepted
ProcessNode.is_failed
ProcessNode.is_finished
ProcessNode.is_finished_ok
ProcessNode.is_killed
ProcessNode.is_terminated
ProcessNode.logger
ProcessNode.pause()
ProcessNode.paused
ProcessNode.process_class
ProcessNode.process_label
ProcessNode.process_state
ProcessNode.process_status
ProcessNode.recursive_merge()
ProcessNode.set_checkpoint()
ProcessNode.set_exception()
ProcessNode.set_exit_message()
ProcessNode.set_exit_status()
ProcessNode.set_metadata_inputs()
ProcessNode.set_process_label()
ProcessNode.set_process_state()
ProcessNode.set_process_status()
ProcessNode.set_process_type()
ProcessNode.unpause()
ProcessNodeCaching
ProcessNodeLinks
Submodules#
Interface to the attributes of a node instance.
- class aiida.orm.nodes.attributes.NodeAttributes(node: Node)[source]#
Bases:
object
Interface to the attributes of a node instance.
Attributes are a JSONable dictionary, stored on each node, allowing for arbitrary data to be stored by node subclasses (and thus data plugins).
Once the node is stored, the attributes are generally deemed immutable (except for some updatable keys on process nodes, which can be mutated whilst the node is not “sealed”).
- __dict__ = mappingproxy({'__module__': 'aiida.orm.nodes.attributes', '__doc__': 'Interface to the attributes of a node instance.\n\n Attributes are a JSONable dictionary, stored on each node,\n allowing for arbitrary data to be stored by node subclasses (and thus data plugins).\n\n Once the node is stored, the attributes are generally deemed immutable\n (except for some updatable keys on process nodes, which can be mutated whilst the node is not "sealed").\n ', '__init__': <function NodeAttributes.__init__>, '__contains__': <function NodeAttributes.__contains__>, 'all': <property object>, 'get': <function NodeAttributes.get>, 'get_many': <function NodeAttributes.get_many>, 'set': <function NodeAttributes.set>, 'set_many': <function NodeAttributes.set_many>, 'reset': <function NodeAttributes.reset>, 'delete': <function NodeAttributes.delete>, 'delete_many': <function NodeAttributes.delete_many>, 'clear': <function NodeAttributes.clear>, 'items': <function NodeAttributes.items>, 'keys': <function NodeAttributes.keys>, '__dict__': <attribute '__dict__' of 'NodeAttributes' objects>, '__weakref__': <attribute '__weakref__' of 'NodeAttributes' objects>, '__annotations__': {}})#
- __module__ = 'aiida.orm.nodes.attributes'#
- __weakref__#
list of weak references to the object
- property all: Dict[str, Any]#
Return the complete attributes dictionary.
Warning
While the entity is unstored, this will return references of the attributes on the database model, meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will automatically be reflected on the database model as well. As soon as the entity is stored, the returned attributes will be a deep copy and mutations of the database attributes will have to go through the appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys or some values, use the iterators keys and items, or the getters get and get_many instead.
- Returns:
the attributes as a dictionary
- delete(key: str) None [source]#
Delete an attribute.
- Parameters:
key – name of the attribute
- Raises:
AttributeError – if the attribute does not exist
aiida.common.ModificationNotAllowed – if the entity is stored
- delete_many(keys: List[str]) None [source]#
Delete multiple attributes.
- Parameters:
keys – names of the attributes to delete
- Raises:
AttributeError – if at least one of the attribute does not exist
aiida.common.ModificationNotAllowed – if the entity is stored
- get(key: str, default=()) Any [source]#
Return the value of an attribute.
Warning
While the entity is unstored, this will return a reference of the attribute on the database model, meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will automatically be reflected on the database model as well. As soon as the entity is stored, the returned attribute will be a deep copy and mutations of the database attributes will have to go through the appropriate set methods.
- Parameters:
key – name of the attribute
default – return this value instead of raising if the attribute does not exist
- Returns:
the value of the attribute
- Raises:
AttributeError – if the attribute does not exist and no default is specified
- get_many(keys: List[str]) List[Any] [source]#
Return the values of multiple attributes.
Warning
While the entity is unstored, this will return references of the attributes on the database model, meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will automatically be reflected on the database model as well. As soon as the entity is stored, the returned attributes will be a deep copy and mutations of the database attributes will have to go through the appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys or some values, use the iterators keys and items, or the getters get and get_many instead.
- Parameters:
keys – a list of attribute names
- Returns:
a list of attribute values
- Raises:
AttributeError – if at least one attribute does not exist
- items() Iterable[Tuple[str, Any]] [source]#
Return an iterator over the attributes.
- Returns:
an iterator with attribute key value pairs
- keys() Iterable[str] [source]#
Return an iterator over the attribute keys.
- Returns:
an iterator with attribute keys
- reset(attributes: Dict[str, Any]) None [source]#
Reset the attributes.
Note
This will completely clear any existing attributes and replace them with the new dictionary.
- Parameters:
attributes – a dictionary with the attributes to set
- Raises:
aiida.common.ValidationError – if any of the keys are invalid, i.e. contain periods
aiida.common.ModificationNotAllowed – if the entity is stored
- set(key: str, value: Any) None [source]#
Set an attribute to the given value.
- Parameters:
key – name of the attribute
value – value of the attribute
- Raises:
aiida.common.ValidationError – if the key is invalid, i.e. contains periods
aiida.common.ModificationNotAllowed – if the entity is stored
- set_many(attributes: Dict[str, Any]) None [source]#
Set multiple attributes.
Note
This will override any existing attributes that are present in the new dictionary.
- Parameters:
attributes – a dictionary with the attributes to set
- Raises:
aiida.common.ValidationError – if any of the keys are invalid, i.e. contain periods
aiida.common.ModificationNotAllowed – if the entity is stored
Interface to control caching of a node instance.
- class aiida.orm.nodes.caching.NodeCaching(node: Node)[source]#
Bases:
object
Interface to control caching of a node instance.
- __annotations__ = {'CACHED_FROM_KEY': 'str', '_HASH_EXTRA_KEY': 'str', '_VALID_CACHE_KEY': 'str'}#
- __dict__ = mappingproxy({'__module__': 'aiida.orm.nodes.caching', '__annotations__': {'_HASH_EXTRA_KEY': 'str', '_VALID_CACHE_KEY': 'str', 'CACHED_FROM_KEY': 'str'}, '__doc__': 'Interface to control caching of a node instance.', '_HASH_EXTRA_KEY': '_aiida_hash', '_VALID_CACHE_KEY': '_aiida_valid_cache', 'CACHED_FROM_KEY': '_aiida_cached_from', '__init__': <function NodeCaching.__init__>, 'compute_hash': <function NodeCaching.compute_hash>, '_compute_hash': <function NodeCaching._compute_hash>, '_get_objects_to_hash': <function NodeCaching._get_objects_to_hash>, 'get_objects_to_hash': <function NodeCaching.get_objects_to_hash>, 'get_hash': <function NodeCaching.get_hash>, 'rehash': <function NodeCaching.rehash>, 'clear_hash': <function NodeCaching.clear_hash>, 'get_cache_source': <function NodeCaching.get_cache_source>, 'is_created_from_cache': <property object>, 'should_use_cache': <function NodeCaching.should_use_cache>, '_get_same_node': <function NodeCaching._get_same_node>, 'get_all_same_nodes': <function NodeCaching.get_all_same_nodes>, '_iter_all_same_nodes': <function NodeCaching._iter_all_same_nodes>, 'is_valid_cache': <property object>, '__dict__': <attribute '__dict__' of 'NodeCaching' objects>, '__weakref__': <attribute '__weakref__' of 'NodeCaching' objects>})#
- __module__ = 'aiida.orm.nodes.caching'#
- __weakref__#
list of weak references to the object
- _compute_hash(ignore_errors: bool = True, **kwargs: Any) str | None [source]#
Return the hash for this node based on its attributes.
This will always work, even before storing.
- Parameters:
ignore_errors – return
None
onaiida.common.exceptions.HashingError
(logging the exception)
- _get_same_node() 'Node' | None [source]#
Returns a stored node from which the current Node can be cached or None if it does not exist
If a node is returned it is a valid cache, meaning its _aiida_hash extra matches self.compute_hash(). If there are multiple valid matches, the first one is returned. If no matches are found, None is returned.
- Returns:
a stored Node instance with the same hash as this code or None
Note: this should be only called on stored nodes, or internally from .store() since it first calls clean_value() on the attributes to normalise them.
- _iter_all_same_nodes(allow_before_store=False) t.Iterator['Node'] [source]#
Returns an iterator of all same nodes.
Note: this should be only called on stored nodes, or internally from .store() since it first calls clean_value() on the attributes to normalise them.
- compute_hash(ignore_errors: bool = True, **kwargs: Any) str | None [source]#
Return the computed hash for this node based on its attributes.
- Parameters:
ignore_errors – return
None
onaiida.common.exceptions.HashingError
(logging the exception)
- get_all_same_nodes() list['Node'] [source]#
Return a list of stored nodes which match the type and hash of the current node.
All returned nodes are valid caches, meaning their _aiida_hash extra matches self.compute_hash().
Note: this can be called only after storing a Node (since at store time attributes will be cleaned with clean_value and the hash should become idempotent to the action of serialization/deserialization)
- get_cache_source() str | None [source]#
Return the UUID of the node that was used in creating this node from the cache, or None if it was not cached.
- Returns:
source node UUID or None
- get_hash() str | None [source]#
Return the hash that was computed and stored for this node or
None
.This does not recompute the hash but simply returns the hash that was computed when the node was stored. If the hash was reset, using
aiida.orm.nodes.caching.NodeCaching.clear_hash()
for example, it will returnNone
.
- get_objects_to_hash() list[Any] [source]#
Return a list of objects which should be included in the hash.
- property is_created_from_cache: bool#
Return whether this node was created from a cached node.
- Returns:
boolean, True if the node was created by cloning a cached node, False otherwise
- property is_valid_cache: bool#
Hook to exclude certain
Node
classes from being considered a valid cache.The base class assumes that all node instances are valid to cache from, unless the
_VALID_CACHE_KEY
extra has been set toFalse
explicitly. Subclasses can override this property with more specific logic, but should probably also consider the value returned by this base class.
Interface for comments of a node instance.
- class aiida.orm.nodes.comments.NodeComments(node: Node)[source]#
Bases:
object
Interface for comments of a node instance.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.nodes.comments', '__doc__': 'Interface for comments of a node instance.', '__init__': <function NodeComments.__init__>, 'add': <function NodeComments.add>, 'get': <function NodeComments.get>, 'all': <function NodeComments.all>, 'update': <function NodeComments.update>, 'remove': <function NodeComments.remove>, '__dict__': <attribute '__dict__' of 'NodeComments' objects>, '__weakref__': <attribute '__weakref__' of 'NodeComments' objects>, '__annotations__': {}})#
- __module__ = 'aiida.orm.nodes.comments'#
- __weakref__#
list of weak references to the object
- add(content: str, user: User | None = None) Comment [source]#
Add a new comment.
- Parameters:
content – string with comment
user – the user to associate with the comment, will use default if not supplied
- Returns:
the newly created comment
- all() list[Comment] [source]#
Return a sorted list of comments for this node.
- Returns:
the list of comments, sorted by pk
- get(identifier: int) Comment [source]#
Return a comment corresponding to the given identifier.
- Parameters:
identifier – the comment pk
- Raises:
aiida.common.NotExistent – if the comment with the given id does not exist
aiida.common.MultipleObjectsError – if the id cannot be uniquely resolved to a comment
- Returns:
the comment
- remove(identifier: int) None [source]#
Delete an existing comment.
- Parameters:
identifier – the comment pk
- update(identifier: int, content: str) None [source]#
Update the content of an existing comment.
- Parameters:
identifier – the comment pk
content – the new comment content
- Raises:
aiida.common.NotExistent – if the comment with the given id does not exist
aiida.common.MultipleObjectsError – if the id cannot be uniquely resolved to a comment
Interface for links of a node instance.
- class aiida.orm.nodes.links.NodeLinks(node: Node)[source]#
Bases:
object
Interface for links of a node instance.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.nodes.links', '__doc__': 'Interface for links of a node instance.', '__init__': <function NodeLinks.__init__>, '_add_incoming_cache': <function NodeLinks._add_incoming_cache>, 'add_incoming': <function NodeLinks.add_incoming>, 'validate_incoming': <function NodeLinks.validate_incoming>, 'validate_outgoing': <function NodeLinks.validate_outgoing>, 'get_stored_link_triples': <function NodeLinks.get_stored_link_triples>, 'get_incoming': <function NodeLinks.get_incoming>, 'get_outgoing': <function NodeLinks.get_outgoing>, '__dict__': <attribute '__dict__' of 'NodeLinks' objects>, '__weakref__': <attribute '__weakref__' of 'NodeLinks' objects>, '__annotations__': {'incoming_cache': 'list[LinkTriple]'}})#
- __module__ = 'aiida.orm.nodes.links'#
- __weakref__#
list of weak references to the object
- _add_incoming_cache(source: Node, link_type: LinkType, link_label: str) None [source]#
Add an incoming link to the cache.
- Parameters:
source – the node from which the link is coming
link_type – the link type
link_label – the link label
- Raises:
aiida.common.UniquenessError – if the given link triple already exists in the cache
- add_incoming(source: Node, link_type: LinkType, link_label: str) None [source]#
Add a link of the given type from a given node to ourself.
- Parameters:
source – the node from which the link is coming
link_type – the link type
link_label – the link label
- Raises:
TypeError – if source is not a Node instance or link_type is not a LinkType enum
ValueError – if the proposed link is invalid
- get_incoming(node_class: t.Type['Node'] | None = None, link_type: t.Union[LinkType, t.Sequence[LinkType]] = (), link_label_filter: t.Optional[str] = None, only_uuid: bool = False) LinkManager [source]#
Return a list of link triples that are (directly) incoming into this node.
- Parameters:
node_class – If specified, should be a class or tuple of classes, and it filters only elements of that specific type (or a subclass of ‘type’)
link_type – If specified should be a string or tuple to get the inputs of this link type, if None then returns all inputs of all link types.
link_label_filter – filters the incoming nodes by its link label. Here wildcards (% and _) can be passed in link label filter as we are using “like” in QB.
only_uuid – project only the node UUID instead of the instance onto the NodeTriple.node entries
- get_outgoing(node_class: t.Type['Node'] | None = None, link_type: t.Union[LinkType, t.Sequence[LinkType]] = (), link_label_filter: t.Optional[str] = None, only_uuid: bool = False) LinkManager [source]#
Return a list of link triples that are (directly) outgoing of this node.
- Parameters:
node_class – If specified, should be a class or tuple of classes, and it filters only elements of that specific type (or a subclass of ‘type’)
link_type – If specified should be a string or tuple to get the inputs of this link type, if None then returns all outputs of all link types.
link_label_filter – filters the outgoing nodes by its link label. Here wildcards (% and _) can be passed in link label filter as we are using “like” in QB.
only_uuid – project only the node UUID instead of the instance onto the NodeTriple.node entries
- get_stored_link_triples(node_class: t.Type['Node'] | None = None, link_type: t.Union[LinkType, t.Sequence[LinkType]] = (), link_label_filter: t.Optional[str] = None, link_direction: str = 'incoming', only_uuid: bool = False) list[LinkTriple] [source]#
Return the list of stored link triples directly incoming to or outgoing of this node.
Note this will only return link triples that are stored in the database. Anything in the cache is ignored.
- Parameters:
node_class – If specified, should be a class, and it filters only elements of that (subclass of) type
link_type – Only get inputs of this link type, if empty tuple then returns all inputs of all link types.
link_label_filter – filters the incoming nodes by its link label. This should be a regex statement as one would pass directly to a QueryBuilder filter statement with the ‘like’ operation.
link_direction – incoming or outgoing to get the incoming or outgoing links, respectively.
only_uuid – project only the node UUID instead of the instance onto the NodeTriple.node entries
- validate_incoming(source: Node, link_type: LinkType, link_label: str) None [source]#
Validate adding a link of the given type from a given node to ourself.
This function will first validate the types of the inputs, followed by the node and link types and validate whether in principle a link of that type between the nodes of these types is allowed.
Subsequently, the validity of the “degree” of the proposed link is validated, which means validating the number of links of the given type from the given node type is allowed.
- Parameters:
source – the node from which the link is coming
link_type – the link type
link_label – the link label
- Raises:
TypeError – if source is not a Node instance or link_type is not a LinkType enum
ValueError – if the proposed link is invalid
- validate_outgoing(target: Node, link_type: LinkType, link_label: str) None [source]#
Validate adding a link of the given type from ourself to a given node.
The validity of the triple (source, link, target) should be validated in the validate_incoming call. This method will be called afterwards and can be overriden by subclasses to add additional checks that are specific to that subclass.
- Parameters:
target – the node to which the link is going
link_type – the link type
link_label – the link label
- Raises:
TypeError – if target is not a Node instance or link_type is not a LinkType enum
ValueError – if the proposed link is invalid
Package for node ORM classes.
- class aiida.orm.nodes.node.Node(backend: 'StorageBackend' | None = None, user: User | None = None, computer: Computer | None = None, **kwargs: Any)[source]#
Bases:
Entity
[BackendNode
,NodeCollection
]Base class for all nodes in AiiDA.
Stores attributes starting with an underscore.
Caches files and attributes before the first save, and saves everything only on store(). After the call to store(), attributes cannot be changed.
Only after storing (or upon loading from uuid) extras can be modified and in this case they are directly set on the db.
In the plugin, also set the _plugin_type_string, to be set in the DB in the ‘type’ field.
- Collection[source]#
alias of
NodeCollection
- _CLS_COLLECTION#
alias of
NodeCollection
- _CLS_NODE_CACHING#
alias of
NodeCaching
- __abstractmethods__ = frozenset({})#
- __annotations__ = {'_CLS_COLLECTION': 'Type[CollectionType]', '_Node__plugin_type_string': 'ClassVar[str]', '_Node__query_type_string': 'ClassVar[str]', '__plugin_type_string': 'ClassVar[str]', '__qb_fields__': 'Sequence[QbField]', '__query_type_string': 'ClassVar[str]', '_hash_ignored_attributes': 'Tuple[str, ...]', '_logger': 'Optional[Logger]', '_updatable_attributes': 'Tuple[str, ...]', 'fields': 'QbFields'}#
- __deepcopy__(memo)[source]#
Deep copying a Node is not supported in general, but only for the Data sub class.
- __eq__(other: Any) bool [source]#
Fallback equality comparison by uuid (can be overwritten by specific types)
- __getattr__(name: str) Any [source]#
This method is called when an attribute is not found in the instance.
It allows for the handling of deprecated mixin methods.
- __init__(backend: 'StorageBackend' | None = None, user: User | None = None, computer: Computer | None = None, **kwargs: Any) None [source]#
- Parameters:
backend_entity – the backend model supporting this entity
- __module__ = 'aiida.orm.nodes.node'#
- __orig_bases__ = (aiida.orm.entities.Entity[ForwardRef('BackendNode'), aiida.orm.nodes.node.NodeCollection],)#
- __parameters__ = ()#
- __qb_fields__: Sequence[QbField] = [QbStrField('uuid', dtype=str, is_attribute=False), QbStrField('label', dtype=str, is_attribute=False), QbStrField('description', dtype=str, is_attribute=False), QbStrField('node_type', dtype=str, is_attribute=False), QbNumericField('ctime', dtype=datetime, is_attribute=False), QbNumericField('mtime', dtype=datetime, is_attribute=False), QbDictField('repository_metadata', dtype=Dict[str, Any], is_attribute=False), QbDictField('extras', dtype=Dict[str, Any], is_attribute=False, is_subscriptable=True), QbNumericField('user_pk', dtype=int, is_attribute=False), QbDictField('attributes', dtype=Dict[str, Any], is_attribute=False, is_subscriptable=True)]#
- _abc_impl = <_abc._abc_data object>#
- _add_outputs_from_cache(cache_node: Node) None [source]#
Replicate the output links and nodes from the cached node onto this node.
- _cachable = False#
- _check_mutability_attributes(keys: List[str] | None = None) None [source]#
Check if the entity is mutable and raise an exception if not.
This is called from NodeAttributes methods that modify the attributes.
- Parameters:
keys – the keys that will be mutated, or all if None
- _deprecated_attr_methods = {'attributes': 'all', 'attributes_items': 'items', 'attributes_keys': 'keys', 'clear_attributes': 'clear', 'delete_attribute': 'delete', 'delete_attribute_many': 'delete_many', 'get_attribute': 'get', 'get_attribute_many': 'get_many', 'reset_attributes': 'reset', 'set_attribute': 'set', 'set_attribute_many': 'set_many'}#
- _deprecated_caching_methods = {'_get_hash': '_get_hash', '_get_objects_to_hash': '_get_objects_to_hash', '_get_same_node': '_get_same_node', '_iter_all_same_nodes': '_iter_all_same_nodes', 'clear_hash': 'clear_hash', 'get_all_same_nodes': 'get_all_same_nodes', 'get_cache_source': 'get_cache_source', 'get_hash': 'get_hash', 'is_created_from_cache': 'is_created_from_cache', 'rehash': 'rehash'}#
- _deprecated_comment_methods = {'add_comment': 'add', 'get_comment': 'get', 'get_comments': 'all', 'remove_comment': 'remove', 'update_comment': 'update'}#
- _deprecated_extra_methods = {'clear_extras': 'clear', 'delete_extra': 'delete', 'delete_extra_many': 'delete_many', 'extras': 'all', 'extras_items': 'items', 'extras_keys': 'keys', 'get_extra': 'get', 'get_extra_many': 'get_many', 'reset_extras': 'reset', 'set_extra': 'set', 'set_extra_many': 'set_many'}#
- _deprecated_links_methods = {'add_incoming': 'add_incoming', 'get_incoming': 'get_incoming', 'get_outgoing': 'get_outgoing', 'get_stored_link_triples': 'get_stored_link_triples', 'validate_incoming': 'validate_incoming', 'validate_outgoing': 'validate_outgoing'}#
- _deprecated_repo_methods = {'copy_tree': 'copy_tree', 'delete_object': 'delete_object', 'get_object': 'get_object', 'get_object_content': 'get_object_content', 'glob': 'glob', 'list_object_names': 'list_object_names', 'list_objects': 'list_objects', 'open': 'open', 'put_object_from_file': 'put_object_from_file', 'put_object_from_filelike': 'put_object_from_filelike', 'put_object_from_tree': 'put_object_from_tree', 'repository_metadata': 'metadata', 'walk': 'walk'}#
- _plugin_type_string = ''#
- _query_type_string = ''#
- _storable = False#
- _store(clean: bool = True) Node [source]#
Store the node in the database while saving its attributes and repository directory.
- Parameters:
clean – boolean, if True, will clean the attributes and extras before attempting to store
- _store_from_cache(cache_node: Node) None [source]#
Store this node from an existing cache node.
Note
With the current implementation of the backend repository, which automatically deduplicates the content that it contains, we do not have to copy the contents of the source node. Since the content should be exactly equal, the repository will already contain it and there is nothing to copy. We simply replace the current
repository
instance with a clone of that of the source node, which does not actually copy any files.
- _unstorable_message = 'only Data, WorkflowNode, CalculationNode or their subclasses can be stored'#
- _validate() bool [source]#
Validate information stored in Node object.
For the
Node
base class, this check is always valid. Subclasses can override this method to perform additional checks and should usually callsuper()._validate()
first!This method is called automatically before storing the node in the DB. Therefore, use
get()
and similar methods that automatically read either from the DB or from the internal attribute cache.
- _validate_storability() None [source]#
Verify that the current node is allowed to be stored.
- Raises:
aiida.common.exceptions.StoringNotAllowed – if the node does not match all requirements for storing
- _verify_are_parents_stored() None [source]#
Verify that all parent nodes are already stored.
- Raises:
aiida.common.ModificationNotAllowed – if one of the source nodes of incoming links is not stored.
- class_node_type = ''#
- entry_point = None#
- fields: QbFields = {'attributes': 'QbDictField(attributes.*) -> Dict[str, Any]', 'ctime': 'QbNumericField(ctime) -> datetime', 'description': 'QbStrField(description) -> str', 'extras': 'QbDictField(extras.*) -> Dict[str, Any]', 'label': 'QbStrField(label) -> str', 'mtime': 'QbNumericField(mtime) -> datetime', 'node_type': 'QbStrField(node_type) -> str', 'pk': 'QbNumericField(pk) -> int', 'repository_metadata': 'QbDictField(repository_metadata) -> Dict[str, Any]', 'user_pk': 'QbNumericField(user_pk) -> int', 'uuid': 'QbStrField(uuid) -> str'}#
- get_description() str [source]#
Return a string with a description of the node.
- Returns:
a description string
- property is_valid_cache: bool#
Hook to exclude certain
Node
classes from being considered a valid cache.The base class assumes that all node instances are valid to cache from, unless the
_VALID_CACHE_KEY
extra has been set toFalse
explicitly. Subclasses can override this property with more specific logic, but should probably also consider the value returned by this base class.
- store() Node [source]#
Store the node in the database while saving its attributes and repository directory.
After being called attributes cannot be changed anymore! Instead, extras can be changed only AFTER calling this store() function.
- Note:
After successful storage, those links that are in the cache, and for which also the parent node is already stored, will be automatically stored. The others will remain unstored.
- class aiida.orm.nodes.node.NodeBase(node: Node)[source]#
Bases:
object
A namespace for node related functionality, that is not directly related to its user-facing properties.
- __dict__ = mappingproxy({'__module__': 'aiida.orm.nodes.node', '__doc__': 'A namespace for node related functionality, that is not directly related to its user-facing properties.', '__init__': <function NodeBase.__init__>, 'repository': <functools.cached_property object>, 'caching': <functools.cached_property object>, 'comments': <functools.cached_property object>, 'attributes': <functools.cached_property object>, 'extras': <functools.cached_property object>, 'links': <functools.cached_property object>, '__dict__': <attribute '__dict__' of 'NodeBase' objects>, '__weakref__': <attribute '__weakref__' of 'NodeBase' objects>, '__annotations__': {'_node': "'Node'"}})#
- __module__ = 'aiida.orm.nodes.node'#
- __weakref__#
list of weak references to the object
- property attributes: NodeAttributes#
Return an interface to interact with the attributes of this node.
- property caching: NodeCaching#
Return an interface to interact with the caching of this node.
- property comments: NodeComments#
Return an interface to interact with the comments of this node.
- property extras: EntityExtras#
Return an interface to interact with the extras of this node.
- property repository: NodeRepository#
Return the repository for this node.
- class aiida.orm.nodes.node.NodeCollection(entity_class: Type[EntityType], backend: 'StorageBackend' | None = None)[source]#
Bases:
Collection
[NodeType
],Generic
[NodeType
]The collection of nodes.
- __abstractmethods__ = frozenset({})#
- __module__ = 'aiida.orm.nodes.node'#
- __orig_bases__ = (aiida.orm.entities.Collection[~NodeType], typing.Generic[~NodeType])#
- __parameters__ = (~NodeType,)#
- _abc_impl = <_abc._abc_data object>#
- delete(pk: int) None [source]#
Delete a Node from the collection with the given id
- Parameters:
pk – the node id
- iter_repo_keys(filters: dict | None = None, subclassing: bool = True, batch_size: int = 100) Iterator[str] [source]#
Iterate over all repository object keys for this
Node
classNote
keys will not be deduplicated, wrap in a
set
to achieve this- Parameters:
filters – Filters for the node query
subclassing – Whether to include subclasses of the given class
batch_size – The number of nodes to fetch data for at once
Interface to the file repository of a node instance.
- class aiida.orm.nodes.repository.NodeRepository(node: Node)[source]#
Bases:
object
Interface to the file repository of a node instance.
This is the compatibility layer between the Node class and the Repository class. The repository in principle has no concept of immutability, so it is implemented here. Any mutating operations will raise a ModificationNotAllowed exception if the node is stored. Otherwise the operation is just forwarded to the repository instance.
The repository instance keeps an internal mapping of the file hierarchy that it maintains, starting from an empty hierarchy if the instance was constructed normally, or from a specific hierarchy if reconstructed through the
Repository.from_serialized
classmethod. This is only the case for stored nodes, because unstored nodes do not have any files yet when they are constructed. Once the node get’s stored, the repository is asked to serialize its metadata contents which is then stored in therepository_metadata
field of the backend node. This layer explicitly does not update the metadata of the node on a mutation action. The reason is that for stored nodes these actions are anyway forbidden and for unstored nodes, the final metadata will be stored in one go, once the node is stored, so there is no need to keep updating the node metadata intermediately. Note that this does mean thatrepository_metadata
does not give accurate information, as long as the node is not yet stored.- __dict__ = mappingproxy({'__module__': 'aiida.orm.nodes.repository', '__doc__': "Interface to the file repository of a node instance.\n\n This is the compatibility layer between the `Node` class and the `Repository` class. The repository in principle has\n no concept of immutability, so it is implemented here. Any mutating operations will raise a `ModificationNotAllowed`\n exception if the node is stored. Otherwise the operation is just forwarded to the repository instance.\n\n The repository instance keeps an internal mapping of the file hierarchy that it maintains, starting from an empty\n hierarchy if the instance was constructed normally, or from a specific hierarchy if reconstructed through the\n ``Repository.from_serialized`` classmethod. This is only the case for stored nodes, because unstored nodes do not\n have any files yet when they are constructed. Once the node get's stored, the repository is asked to serialize its\n metadata contents which is then stored in the ``repository_metadata`` field of the backend node. This layer\n explicitly does not update the metadata of the node on a mutation action. The reason is that for stored nodes these\n actions are anyway forbidden and for unstored nodes, the final metadata will be stored in one go, once the node is\n stored, so there is no need to keep updating the node metadata intermediately. Note that this does mean that\n ``repository_metadata`` does not give accurate information, as long as the node is not yet stored.\n ", '__init__': <function NodeRepository.__init__>, 'metadata': <property object>, '_update_repository_metadata': <function NodeRepository._update_repository_metadata>, '_check_mutability': <function NodeRepository._check_mutability>, '_repository': <property object>, '_store': <function NodeRepository._store>, '_copy': <function NodeRepository._copy>, '_clone': <function NodeRepository._clone>, 'serialize': <function NodeRepository.serialize>, 'hash': <function NodeRepository.hash>, 'list_objects': <function NodeRepository.list_objects>, 'list_object_names': <function NodeRepository.list_object_names>, 'open': <function NodeRepository.open>, 'as_path': <function NodeRepository.as_path>, 'get_object': <function NodeRepository.get_object>, 'get_object_content': <function NodeRepository.get_object_content>, 'put_object_from_bytes': <function NodeRepository.put_object_from_bytes>, 'put_object_from_filelike': <function NodeRepository.put_object_from_filelike>, 'put_object_from_file': <function NodeRepository.put_object_from_file>, 'put_object_from_tree': <function NodeRepository.put_object_from_tree>, 'walk': <function NodeRepository.walk>, 'glob': <function NodeRepository.glob>, 'copy_tree': <function NodeRepository.copy_tree>, 'delete_object': <function NodeRepository.delete_object>, 'erase': <function NodeRepository.erase>, '__dict__': <attribute '__dict__' of 'NodeRepository' objects>, '__weakref__': <attribute '__weakref__' of 'NodeRepository' objects>, '__annotations__': {'_node': "'Node'", '_repository_instance': 'Repository | None'}})#
- __module__ = 'aiida.orm.nodes.repository'#
- __weakref__#
list of weak references to the object
- _check_mutability()[source]#
Check if the node is mutable.
- Raises:
ModificationNotAllowed – when the node is stored and therefore immutable.
- _clone(repo: NodeRepository) None [source]#
Clone the repository from another instance.
This is used when cloning a node.
- Parameters:
repo – the repository to clone.
- _copy(repo: NodeRepository) None [source]#
Copy a repository from another instance.
This is used when storing cached nodes.
- Parameters:
repo – the repository to clone.
- property _repository: Repository#
Return the repository instance, lazily constructing it if necessary.
Note
this property is protected because a node’s repository should not be accessed outside of its scope.
- Returns:
the file repository instance.
- as_path(path: str | PurePosixPath | None = None) Iterator[Path] [source]#
Make the contents of the repository available as a normal filepath on the local file system.
- Parameters:
path – optional relative path of the object within the repository.
- Returns:
the filepath of the content of the repository or object if
path
is specified.- Raises:
TypeError – if the path is not a string or
Path
, or is an absolute path.FileNotFoundError – if no object exists for the given path.
- copy_tree(target: str | Path, path: str | PurePosixPath | None = None) None [source]#
Copy the contents of the entire node repository to another location on the local file system.
- Parameters:
target – absolute path of the directory where to copy the contents to.
path – optional relative path whose contents to copy.
- delete_object(path: str)[source]#
Delete the object from the repository.
- Parameters:
key – fully qualified identifier for the object within the repository.
- Raises:
TypeError – if the path is not a string and relative path.
FileNotFoundError – if the file does not exist.
IsADirectoryError – if the object is a directory and not a file.
OSError – if the file could not be deleted.
ModificationNotAllowed – when the node is stored and therefore immutable.
- erase()[source]#
Delete all objects from the repository.
- Raises:
ModificationNotAllowed – when the node is stored and therefore immutable.
- get_object(path: FilePath | None = None) File [source]#
Return the object at the given path.
- Parameters:
path – the relative path of the object within the repository.
- Returns:
the File representing the object located at the given relative path.
- Raises:
TypeError – if the path is not a string or
Path
, or is an absolute path.FileNotFoundError – if no object exists for the given path.
- get_object_content(path: str, mode: Literal['r']) str [source]#
- get_object_content(path: str, mode: Literal['rb']) bytes
Return the content of a object identified by key.
- Parameters:
path – the relative path of the object within the repository.
- Raises:
TypeError – if the path is not a string and relative path.
FileNotFoundError – if the file does not exist.
IsADirectoryError – if the object is a directory and not a file.
OSError – if the file could not be opened.
- glob() Iterable[PurePosixPath] [source]#
Yield a recursive list of all paths (files and directories).
- hash() str [source]#
Generate a hash of the repository’s contents.
- Returns:
the hash representing the contents of the repository.
- list_object_names(path: str | None = None) list[str] [source]#
Return a sorted list of the object names contained in this repository, optionally in the given sub directory.
- Parameters:
path – optional relative path inside the repository whose objects to list.
- Returns:
a list of File named tuples representing the objects present in directory with the given key.
- Raises:
TypeError – if the path is not a string and relative path.
FileNotFoundError – if no object exists for the given path.
NotADirectoryError – if the object at the given path is not a directory.
- list_objects(path: str | None = None) list[File] [source]#
Return a list of the objects contained in this repository sorted by name, optionally in given sub directory.
- Parameters:
path – optional relative path inside the repository whose objects to list.
- Returns:
a list of File named tuples representing the objects present in directory with the given key.
- Raises:
TypeError – if the path is not a string and relative path.
FileNotFoundError – if no object exists for the given path.
NotADirectoryError – if the object at the given path is not a directory.
- property metadata: dict[str, Any]#
Return the repository metadata, representing the virtual file hierarchy.
Note, this is only accurate if the node is stored.
- Returns:
the repository metadata
- open(path: FilePath, mode: t.Literal['r']) t.Iterator[t.TextIO] [source]#
- open(path: FilePath, mode: t.Literal['rb']) t.Iterator[t.BinaryIO]
Open a file handle to an object stored under the given key.
Note
this should only be used to open a handle to read an existing file. To write a new file use the method
put_object_from_filelike
instead.- Parameters:
path – the relative path of the object within the repository.
- Returns:
yield a byte stream object.
- Raises:
TypeError – if the path is not a string and relative path.
FileNotFoundError – if the file does not exist.
IsADirectoryError – if the object is a directory and not a file.
OSError – if the file could not be opened.
- put_object_from_bytes(content: bytes, path: str) None [source]#
Store the given content in the repository at the given path.
- Parameters:
path – the relative path where to store the object in the repository.
content – the content to store.
- Raises:
TypeError – if the path is not a string and relative path.
FileExistsError – if an object already exists at the given path.
- put_object_from_file(filepath: str, path: str)[source]#
Store a new object under path with contents of the file located at filepath on the local file system.
- Parameters:
filepath – absolute path of file whose contents to copy to the repository
path – the relative path where to store the object in the repository.
- Raises:
TypeError – if the path is not a string and relative path, or the handle is not a byte stream.
ModificationNotAllowed – when the node is stored and therefore immutable.
- put_object_from_filelike(handle: BufferedReader, path: str)[source]#
Store the byte contents of a file in the repository.
- Parameters:
handle – filelike object with the byte content to be stored.
path – the relative path where to store the object in the repository.
- Raises:
TypeError – if the path is not a string and relative path.
ModificationNotAllowed – when the node is stored and therefore immutable.
- put_object_from_tree(filepath: str, path: str | None = None)[source]#
Store the entire contents of filepath on the local file system in the repository with under given path.
- Parameters:
filepath – absolute path of the directory whose contents to copy to the repository.
path – the relative path where to store the objects in the repository.
- Raises:
TypeError – if the path is not a string and relative path.
ModificationNotAllowed – when the node is stored and therefore immutable.
- serialize() dict [source]#
Serialize the metadata of the repository content into a JSON-serializable format.
- Returns:
dictionary with the content metadata.
- walk(path: str | PurePosixPath | None = None) Iterable[tuple[PurePosixPath, list[str], list[str]]] [source]#
Walk over the directories and files contained within this repository.
Note
the order of the dirname and filename lists that are returned is not necessarily sorted. This is in line with the
os.walk
implementation where the order depends on the underlying file system used.- Parameters:
path – the relative path of the directory within the repository whose contents to walk.
- Returns:
tuples of root, dirnames and filenames just like
os.walk
, with the exception that the root path is always relative with respect to the repository root, instead of an absolute path and it is an instance ofpathlib.PurePosixPath
instead of a normal string