from __future__ import annotationsfrom functools import reducefrom typing import ( Any, Generic, Iterable, Iterator, Sequence, TypeVar,)import torchimport torch.nnfrom ase.data import atomic_numbers, chemical_symbols, covalent_radiifrom graph_pes.utils.misc import left_aligned_mulfrom torch import Tensorfrom .misc import ( MAX_Z, pairs, to_significant_figures, uniform_repr,)V = TypeVar("V", bound=torch.nn.Module)class UniformModuleDict(torch.nn.ModuleDict, Generic[V]): """ A :class:`torch.nn.ModuleDict` sub-class for cases where the values are all of the same type. Examples -------- >>> from graph_pes.utils.nn import UniformModuleDict >>> from torch.nn import Linear >>> linear_dict = UniformModuleDict(a=Linear(10, 5), b=Linear(5, 1)) """ def __init__(self, **modules: V): super().__init__(modules) def values(self) -> Iterable[V]: return super().values() # type: ignore def items(self) -> Iterable[tuple[str, V]]: return super().items() # type: ignore def __getitem__(self, key: str) -> V: return super().__getitem__(key) # type: ignore def __setitem__(self, key: str, module: V) -> None: # type: ignore super().__setitem__(key, module) def pop(self, key: str) -> V: return super().pop(key) # type: ignoreclass UniformModuleList(torch.nn.ModuleList, Sequence[V]): """ A :class:`torch.nn.ModuleList` sub-class for cases where the values are all of the same type. Examples -------- >>> from graph_pes.utils.nn import UniformModuleList >>> from torch.nn import Linear >>> linear_list = UniformModuleList(Linear(10, 5), Linear(5, 1)) """ def __init__(self, modules: Iterable[V]): super().__init__(modules) def __getitem__(self, idx: int) -> V: # type: ignore return super().__getitem__(idx) # type: ignore def __setitem__(self, idx: int, value: V) -> None: # type: ignore super().__setitem__(idx, value) def append(self, module: V) -> None: # type: ignore super().append(module) def extend(self, modules: Iterable[V]) -> None: # type: ignore super().extend(modules) def insert(self, idx: int, module: V) -> None: # type: ignore super().insert(idx, module) def pop(self, idx: int) -> V: # type: ignore return super().pop(idx) # type: ignore def __iter__(self) -> Iterator[V]: return super().__iter__() # type: ignoreclass ShiftedSoftplus(torch.nn.Module): def __init__(self): super().__init__() self.shift = torch.log(torch.tensor(2.0)).item() def forward(self, x: torch.Tensor) -> torch.Tensor: return torch.nn.functional.softplus(x) - self.shift def __repr__(self): return uniform_repr(self.__class__.__name__)def parse_activation(act: str) -> torch.nn.Module: """ Parse a string into a PyTorch activation function. Parameters ---------- act The activation function to parse. Returns ------- torch.nn.Module The parsed activation function. """ activation = getattr(torch.nn, act, None) if activation is None: raise ValueError(f"Activation function {act} not found in `torch.nn`.") return activation()def prod(iterable): return reduce(lambda x, y: x * y, iterable, 1)[docs]class MLP(torch.nn.Module): """ A multi-layer perceptron model, alternating linear layers and activations. Parameters ---------- layers The number of nodes in each layer. activation The activation function to use: either a named activation function from `torch.nn`, or a `torch.nn.Module` instance. activate_last Whether to apply the activation function after the last linear layer. bias Whether to include bias terms in the linear layers. Examples -------- >>> import torch >>> from graph_pes.utils.nn import MLP >>> model = MLP([10, 5, 1]) >>> model MLP(10 → 5 → 1, activation=CELU()) >>> MLP([10, 5, 1], activation=torch.nn.ReLU()) MLP(10 → 5 → 1, activation=ReLU()) >>> MLP([10, 5, 1], activation="Tanh") MLP(10 → 5 → 1, activation=Tanh()) """ def __init__( self, layers: list[int], activation: str | torch.nn.Module = "CELU", activate_last: bool = False, bias: bool = True, ): super().__init__() self.activation = ( parse_activation(activation) if isinstance(activation, str) else activation ) self.activate_last = activate_last self.linear_layers = torch.nn.ModuleList( [ torch.nn.Linear(_in, _out, bias=bias) for _in, _out in pairs(layers) ] )
[docs] def forward(self, x: Tensor) -> Tensor: """ Perform a forward pass through the network. Parameters ---------- x The input to the network. """ for i, linear in enumerate(self.linear_layers): x = linear(x) last_layer = i == len(self.linear_layers) - 1 if not last_layer or self.activate_last: x = self.activation(x) return x
See AlsoThe Annotated NeRF – Training on Custom Dataset from Scratch in PytorchBuilding a Parallel Large Language Model Network — MindSpore master documentationPIG: Physics-Informed Gaussians as Adaptive Parametric Mesh RepresentationsContrastive Learning – SimCLR and BYOL (With Code Example)@property def input_size(self): """The size of the input to the network.""" return self.linear_layers[0].in_features @property def output_size(self): """The size of the output of the network.""" return self.linear_layers[-1].out_features @property def layer_widths(self): """The widths of the layers in the network.""" inputs = [layer.in_features for layer in self.linear_layers] return inputs + [self.output_size] def __repr__(self): layers = " → ".join(map(str, self.layer_widths)) return uniform_repr( self.__class__.__name__, layers, activation=self.activation, stringify=False, )
def _rebuild_per_element_parameter(data, requires_grad, state): psp = PerElementParameter(data, requires_grad) psp._accessed_Zs = state["_accessed_Zs"] psp._index_dims = state["_index_dims"] return psp[docs]class PerElementParameter(torch.nn.Parameter): """ A subclass of :class:`torch.nn.Parameter` that is indexed by atomic number/s. Crucially, this subclass overrides the :meth:`numel` method, for accurately counting the number of relevant and learnable parameters. Examples -------- Imagine the case where you have a model parameter with a value for each element in the periodic table. If you only train the model on a dataset containing a few elements, you don't want to count the total number of parameters, as this will be unnecessarily large. >>> # don't do this! >>> per_element_parameter = torch.nn.Parameter(torch.randn(119)) >>> per_element_parameter.numel() 119 >>> per_element_parameter Parameter containing: tensor([ 1.2838e-01, -1.4507e+00, 1.3465e-01, -9.5786e-01, ..., -1.3329e+00, -1.5515e+00, 2.1106e+00, -9.7268e-01], requires_grad=True) >>> # do this instead >>> per_element_paramter = PerElementParameter.of_shape((1,)) >>> per_element_parameter.register_elements([1, 6, 8]) >>> per_element_parameter.numel() 3 >>> per_element_parameter PerElementParameter({'O': -0.278, 'H': 0.157, 'C': -0.0379}, trainable=True) ``graph-pes-train`` automatically registers all elements that a model encounters during training, so you rarely need to call :meth:`register_elements` yourself. """ def __new__( cls, data: Tensor, requires_grad: bool = True ) -> PerElementParameter: pep = super().__new__(cls, data, requires_grad=requires_grad) pep._is_per_element_param = True # type: ignore return pep def __init__(self, data: Tensor, requires_grad: bool = True): super().__init__() # set extra state self._accessed_Zs: set[int] = set() # set this to an arbitrary value: this gets updated post-init self._index_dims: int = 1
[docs] def register_elements(self, Zs: Iterable[int]) -> None: """ Register the elements that are relevant for the parameter. This is typically only used internally - you shouldn't call this yourself in any of your model definitions. """ self._accessed_Zs.update(sorted(Zs))
[docs] @classmethod def of_shape( cls, shape: tuple[int, ...] = (), index_dims: int = 1, default_value: float | None = None, requires_grad: bool = True, ) -> PerElementParameter: """ Create a :class:`PerElementParameter` with a given shape for each element in the periodic table. Parameters ---------- shape The shape of the parameter for each element. index_dims The number of dimensions to index by. default_value The value to initialise the parameter with. If ``None``, the parameter is initialised with random values. requires_grad Whether the parameter should be learnable. Returns ------- PerElementParameter The created parameter. Examples -------- Create a parameter intended to be indexed by a single atomic number, i.e. ``pep[Z]``: >>> PerElementParameter.of_shape((3,)).shape torch.Size([119, 3]) >>> PerElementParameter.of_shape((3, 4)).shape torch.Size([119, 3, 4]) Create a parameter intended to be indexed by two atomic numbers, i.e. ``pep[Z1, Z2]``: >>> PerElementParameter.of_shape((3,), index_dims=2).shape torch.Size([119, 119, 3]) """ actual_shape = tuple([MAX_Z + 1] * index_dims) + shape if default_value is not None: data = torch.full(actual_shape, float(default_value)) else: data = torch.randn(actual_shape) psp = PerElementParameter(data, requires_grad=requires_grad) psp._index_dims = index_dims return psp
[docs] @classmethod @torch.no_grad() def from_dict( cls, requires_grad: bool = True, default_value: float = 0.0, **values: float, ) -> PerElementParameter: """ Create a :class:`PerElementParameter` containing a single value for each element in the periodic table from a dictionary of values. Parameters ---------- requires_grad Whether the parameter should be learnable. default_value The value to initialise the parameter with. If ``None``, the parameter is initialised with random values. values A dictionary of values, indexed by element symbol. Returns ------- PerElementParameter The created parameter. Examples -------- >>> from graph_pes.utils.nn import PerElementParameter >>> pep = PerElementParameter.from_dict(H=1.0, O=2.0) >>> pep.register_elements([1, 6, 8]) >>> pep PerElementParameter({'H': 1.0, 'C': 0.0, 'O': 2.0}, trainable=True) """ pep = PerElementParameter.of_length( 1, requires_grad=requires_grad, default_value=default_value ) for element_symbol, value in values.items(): if element_symbol not in chemical_symbols: raise ValueError(f"Unknown element: {element_symbol}") Z = chemical_symbols.index(element_symbol) pep[Z] = value pep.register_elements(atomic_numbers[v] for v in values) return pep
[docs] @classmethod def of_length( cls, length: int, index_dims: int = 1, default_value: float | None = None, requires_grad: bool = True, ) -> PerElementParameter: """ Alias for ``PerElementParameter.of_shape((length,), **kwargs)``. """ return PerElementParameter.of_shape( (length,), index_dims, default_value, requires_grad )
[docs] @classmethod @torch.no_grad() def covalent_radii( cls, scaling_factor: float = 1.0, ) -> PerElementParameter: """ Create a :class:`PerElementParameter` containing the covalent radii of each element in the periodic table. """ pep = PerElementParameter.of_length(1, default_value=1.0) for Z in range(1, MAX_Z + 1): pep[Z] = torch.tensor(covalent_radii[Z]) * scaling_factor return pep
[docs] def numel(self) -> int: n_elements = len(self._accessed_Zs) accessed_parameters = n_elements**self._index_dims per_element_size = prod(self.shape[self._index_dims :]) return accessed_parameters * per_element_size
# needed for de/serialization def __reduce_ex__(self, proto): return ( _rebuild_per_element_parameter, (self.data, self.requires_grad, torch._utils._get_obj_state(self)), ) def __instancecheck__(self, instance) -> bool: return super().__instancecheck__(instance) or ( # type: ignore[no-untyped-call] isinstance(instance, torch.Tensor) and getattr(instance, "_is_per_element_param", False) ) @torch.no_grad() def _repr( self, alias: str | None = None, more_info: dict[str, Any] | None = None, ) -> str: alias = alias or self.__class__.__name__ more_info = more_info or {} if "trainable" not in more_info: more_info["trainable"] = self.requires_grad if len(self._accessed_Zs) == 0: if self._index_dims == 1 and self.shape[1] == 1: return uniform_repr(alias, **more_info) return uniform_repr( alias, index_dims=self._index_dims, shape=tuple(self.shape[self._index_dims :]), **more_info, ) if self._index_dims == 1: if self.shape[1] == 1: d = { chemical_symbols[Z]: to_significant_figures(self[Z].item()) for Z in self._accessed_Zs } string = f"{alias}({str(d)}, " for k, v in more_info.items(): string += f"{k}={v}, " return string[:-2] + ")" elif len(self.shape) == 2: d = { chemical_symbols[Z]: self[Z].tolist() for Z in self._accessed_Zs } string = f"{alias}({str(d)}, " for k, v in more_info.items(): string += f"{k}={v}, " return string[:-2] + ")" if self._index_dims == 2 and self.shape[2] == 1: columns = [] columns.append( ["Z"] + [chemical_symbols[Z] for Z in self._accessed_Zs] ) for col_Z in self._accessed_Zs: row: list[str | float] = [chemical_symbols[col_Z]] for row_Z in self._accessed_Zs: row.append( to_significant_figures(self[col_Z, row_Z].item()) ) columns.append(row) widths = [max(len(str(x)) for x in col) for col in zip(*columns)] lines = [] for row in columns: line = "" for x, w in zip(row, widths): # right align line += f"{x:>{w}} " lines.append(line) table = "\n" + "\n".join(lines) return uniform_repr( alias, table, **more_info, ) return uniform_repr( alias, index_dims=self._index_dims, accessed_Zs=sorted(self._accessed_Zs), shape=tuple(self.shape[self._index_dims :]), **more_info, ) def __repr__(self, *, tensor_contents=None): return self._repr()
[docs]class PerElementEmbedding(torch.nn.Module): """ A per-element equivalent of :class:`torch.nn.Embedding`. Parameters ---------- dim The length of each embedding vector. Examples -------- >>> embedding = PerElementEmbedding(10) >>> len(graph["atomic_numbers"]) # number of atoms in the graph 24 >>> embedding(graph["atomic_numbers"]) <tensor of shape (24, 10)> """ def __init__(self, dim: int): super().__init__() self._embeddings = PerElementParameter.of_length(dim) def forward(self, Z: Tensor) -> Tensor: return self._embeddings[Z] def dim(self) -> int: return self._embeddings.shape[1] def __repr__(self) -> str: Zs = sorted(self._embeddings._accessed_Zs) return uniform_repr( self.__class__.__name__, dim=self._embeddings.shape[1], elements=[chemical_symbols[Z] for Z in Zs], ) def __call__(self, Z: Tensor) -> Tensor: return super().__call__(Z)
class HaddamardProduct(torch.nn.Module): def __init__( self, *components: torch.nn.Module, left_aligned: bool = False ): super().__init__() self.components: list[torch.nn.Module] = torch.nn.ModuleList(components) # type: ignore self.left_aligned = left_aligned def forward(self, x): out = torch.scalar_tensor(1) for component in self.components: if self.left_aligned: out = left_aligned_mul(out, component(x)) else: out = out * component(x) return outdef learnable_parameters(module: torch.nn.Module) -> int: """Count the number of **learnable** parameters a module has.""" return sum(p.numel() for p in module.parameters() if p.requires_grad)class AtomicOneHot(torch.nn.Module): """ Takes a tensor of atomic numbers Z, and returns a one-hot encoding of the atomic numbers. Parameters ---------- n_elements The total number of expected atomic numbers. """ def __init__(self, elements: list[str]): super().__init__() self.elements = elements self.n_elements = len(elements) self.Z_to_idx: Tensor self.register_buffer( "Z_to_idx", # deliberately crazy value to catch errors torch.full((MAX_Z + 1,), fill_value=1234), ) for i, symbol in enumerate(elements): Z = atomic_numbers[symbol] self.Z_to_idx[Z] = i def forward(self, Z: Tensor) -> Tensor: internal_idx = self.Z_to_idx[Z] with torch.no_grad(): if (internal_idx == 1234).any(): unknown_Z = torch.unique(Z[internal_idx == 1234]) raise ValueError( f"Unknown elements: {unknown_Z}. " f"Expected one of {self.elements}" ) return torch.nn.functional.one_hot( internal_idx, self.n_elements ).float() def __repr__(self): return uniform_repr( self.__class__.__name__, elements=self.elements, )
graph_pes.utils.nn - graph-pes (2025)
Top Articles
Anna Malygon Wiki, Biography, Age, Spouse, Height, Net Worth, Fast Facts - Wikibious
SpenLC - Brawl Stars: Ultimate Cheat Sheet for Ranked - Best Picks & Bans Season 29
Official Brawl Stars news - BrawlStarsStats
Latest Posts
2022 Kawasaki Jet Ski Lineup [Prices & Specs Chart] - jetdrift.com
Jet Ski Prices – How Much Does the Average Jet Ski Cost? |
Recommended Articles
- Forum 6 Cinemas Tamworth NSW
- Eyebrow Trimming Tool Grooming Set Facial Hair Eyebrow Eyelash Trimming ToYR • EUR 14,80
- What Is a Keratin Treatment? Two Hair Experts Break It Down
- The Different Types of Ostomy Bags & Pouching Systems
- Farmhouse Fancy | Posh Peanut®
- Mesaje de Paste - Surprinde-i pe cei dragi cu aceste urari speciale! - Conde
- How to Choose the Best Moxibustion Stick? | My Chinese Recipes
- Pink Eye (Conjunctivitis)
- Liaison Brow Root Activator Review: Is It Worth It?
- Men's Gel Face Cleanser for All Skin Types | Detoxifying and Cleansing | Blu Atlas
- 14 Intriguing Facts About Adewale Akinnuoye-Agbaje
- Oral-B CrossAction Replacement Brush Heads, Black, 3-Count
- ≥ Vind walking stick op Marktplaats
- The best hair styler 2025: effective tools to shape, smooth and sculpt
- Tăng cường sinh lý nam giới Vitatree Essence Of Kangaroo 40000 Max 100 viên
- Best Dual Electric Toothbrushes: Upgrade Your Oral Health Game - TopTenReviewed
- gta vice city downlo fee fo winows 1 - Ardhindie.com
- The Best Curling Irons of 2024, Tested and Reviewed
- Controller detected but not working in Game on PC
- RoboCop 3 | Rotten Tomatoes
- Barton Fink Pictures
- Allergy and Immunology Jobs in Melbourne Beach, FL | PracticeLink
- Webshop - Mamie Gourmande
- African Superman Male Enhancement [3syowFYeSP]
- athlete with bunions; i want surgery
- Mayak Eggs (Korean Marinated Eggs) | What Great Grandma Ate
- Get smooth skin this summer with the best hair removal cream
- Tinnitus Hearing Aids
- Nagelstudio bei Haarscharf - Ihrem Friseur in Haßfurt (97437)
- Electrocardiogramme : définition, principes et indications
- Rezension - Rebecca F. Kuang - Yellowface (Buch) - booknerds.de
- No Rules: Get Phat für GBA kaufen | retroplace
- 20 YRS AGO – Keller’s WWE Raw Report (12/27/2004): Christian vs. Jericho, Triple H vs. Benjamin, plus Hassan hits Jim Ross, Bischoff announces Chamber special referee, more
- James Hoffman Author Biography And Career History - Coffeeble
- Best Smart Scales in UK 2023 - Reviews & Buyers Guide
- Best Gaming Keyboard
- New Year, New Adventures: 10 Innovative Travel Gadgets And Gear For Your 2025 Goals
- 7 Homemade Granola Bars With Less Than 9 Grams of Sugar | Livestrong.com
- Paper Napkins | Paper Serviettes | Stylish Modern Party Supplies
- Big Crunch oder Big Rip
- Disney Speedstorm Community Pit Stop – Racer Balancing and Stat Caps
- Mahjong Fortuna kostenlos online spielen » HIER! 🕹️
- Top Solo Female Travel Destinations 2025: Budget-Friendly Adventures For For Solo Female Traveller
- Starter Locs: Styling Methods and Maintenance of Tender Locks - Curly Craze
- 75+ Creative Red Valentines Day Nails Ideas to Try
- تفاوت سی سی کرم با بی بی کرم
- How to Make Apple Filling - Easiest Recipe
- [Humble Bundle] Humble Choice (MAY 2024) Yakuza: Like a Dragon + Hi-Fi RUSH, and more for CA$14.99 + tax! - RedFlagDeals.com Forums
- Consider This Your Ultimate Guide to the Best Beauty Devices for 2025
- Πώς να παρακολουθείς δωρεάν ταινίες και σειρές μέσω Stremio - TechGuides
- 12 Auburn Hair Colors That Are Set To Steal The Spotlight
- Banners of Ruin: Das animalische Card-Game im Early-Access-Check
- Mod categories at Kane & Lynch 2: Dog Days Nexus
- Sun Zi - New World Encyclopedia
- Kudzuvine root beauty tea
- Arginin: Wirkung und Effekte
- Cupping: So sorgt die Faszienmassage für straffe Haut
- crutch - Tłumaczenie po polsku
- Request List 20 Jan 25
- DOOM 3: BFG Edition Achievements
- ✨ 110 Pairs C. ARTIST 100% Natural Eyelash Extension Under Eye Gel Pads patches kit Collagen Aloe Vera Hydrogel Eye Patches Eyelash Extension Supplies Tools, Lash Extension Supplies, lash tech supplies — 🛍️ The Retail Market
- TMA Solutions | Hệ Thống AI Cảnh Báo Sâu Bệnh Lúa T-Pest
- ZWILLING Twin Signature 3-pc Starter Review
- Feet - Structure, Function, Location, Anatomy, Diagram
- What channel is Notre Dame vs Ohio State today? Time, TV schedule to watch CFP championship
- Biographien: D. B. Sweeney - myFanbase
- Top Devices and Tools in 2025: The Future of Skincare
- Propolis Tinktur und Tropfen - Herstellung und Produktvorstellung
- Not Sure Which Nail File Grit You Should Be Using? Here's Your Ultimate Guide
- The 11 Very Best Makeup Brushes and Makeup-Brush Sets
- Best Pedicure Tool To Remove dead skin and callus
- BenchMATE Manual Bottle Label Applicator | Bottle Labeling Machine
- Ranking All 4 ‘To All the Boys’ Movies and Series
- 'One World Under Doom' Turns the Fantastic Four Into the Fantastic Three
- Yum Yummy Recipes with ingredients,nutritions,instructions and related recipes
- 15 Hydrating Lip Balms That’ll Actually Get the Job Done
- Get Rid Of Body Odor! Natural Deodorant Recipe With Baking Soda, Essential Oils
- Best Pill Cutter, Crusher, and Splitter For Home Use - Health Articl
- [고2] 2022년 6월 모의고사: 29번
- Tattoo Technik - Traditionelle und moderne Tätowiertechniken
- Lash Sensational Sky High Tinted Primer | Maybelline
- They Call It Late Night with Jason Kelce
- What is the Tsh Normal Range? | Thyroid Health Unveiled
- Some Obvious Hudson Valley towns didn't make the Most-Instagrammed List for New York
- Spirulina: Wirkung, Dosierung & Nebenwirkungen - VitaminExpress
- Ron Hall facts: Age, ex-girlfriends, eye, Instagram and Love Island history explained
- Programme TV du vendredi 17 janvier 2025 sur Das Erste
- 15 Best Aromatherapy Candles to Transform Your Space and Soothe Your Mind - Aromatherapy Naturals
- Goed Gezond met Claudine | Oosterwolde (81511930)
- Doom III (Doom II Mod) v.1.0c file
- Codice Karim - Film (2021)
- Scale Conversion Calculator & Scale Factor Calculator
- Top First Responder Medical Bags for Every Emergency Situation
- Hair Awards: The 60 Best Hair Products of 2024
- Kivaverse: World of Tokusatsu (New) - Chapter 80 - KivatheDCWizard
- We Tested 30 Types of Toilet Paper — And the Results Might Surprise You
- Irgendwie und sowieso 12 Folgen Episodenguide
- The 6 Best Tattoo Power Supplies - Our Ultimate Review Guide - Tattify
- How to Fix Enshrouded Crashing on Windows PC - PUPUWEB
- Crítica de la primera temporada de 'Dinosaur', la nueva serie de Niamh McKeown estrenada en Filmin | Mindies
Article information
Author: Reed Wilderman
Last Updated:
Views: 6280
Rating: 4.1 / 5 (52 voted)
Reviews: 83% of readers found this page helpful
Author information
Name: Reed Wilderman
Birthday: 1992-06-14
Address: 998 Estell Village, Lake Oscarberg, SD 48713-6877
Phone: +21813267449721
Job: Technology Engineer
Hobby: Swimming, Do it yourself, Beekeeping, Lapidary, Cosplaying, Hiking, Graffiti
Introduction: My name is Reed Wilderman, I am a faithful, bright, lucky, adventurous, lively, rich, vast person who loves writing and wants to share my knowledge and understanding with you.