You can not select more than 25 topics
			Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
		
		
		
		
		
			
		
			
				
					
					
						
							13 lines
						
					
					
						
							471 B
						
					
					
				
			
		
		
	
	
							13 lines
						
					
					
						
							471 B
						
					
					
				| from typing import Any, Dict
 | |
| from pydantic import BaseModel, ConfigDict
 | |
| 
 | |
| 
 | |
| class ExponentialLRParams(BaseModel):
 | |
|     """Configuration for `torch.optim.lr_scheduler.ExponentialLR`."""
 | |
|     model_config = ConfigDict(frozen=True)
 | |
| 
 | |
|     gamma: float = 0.95 # Multiplicative factor of learning rate decay
 | |
| 
 | |
|     def asdict(self) -> Dict[str, Any]:
 | |
|         """Returns a dictionary of valid parameters for `torch.optim.lr_scheduler.ExponentialLR`."""
 | |
|         return self.model_dump() |