From 786a69890ec14686c50e6846e74376693f421756 Mon Sep 17 00:00:00 2001 From: jdcpni Date: Thu, 19 Dec 2024 15:58:17 -0500 Subject: [PATCH] Feat/timermechanism (#3149) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * • recurrenttransfermechanism.py - _instantiate_attributes_after_function(): fix assignment of StabilityFunction, and force update of default_variable for output_port * • recurrenttransfermechanism.py _instantiate_attributes_after_function: revise call to _update_default_variable to use energy.variable instead of value * • recurrenttransfermechanism.py _instantiate_attributes_after_function: revise call to _update_default_variable to use energy.variable instead of value * • composition.py - reset(): add clear_results arg • timerfunctions.py - functions all written for python and pytorch... need llvm versions • timerfunctions.py - Desmos plots updated * Add Mechanism and Functions to support progression of integration along a trajectory • timermechanism.py: sublass of IntegratorMechanism • timerfunctions.py: subclass of TransferFunction - LinearTimer, AcceleratingTimer, DeceleratingTimer, AsymptoticTimer • test_timer_mechanism.py --------- Co-authored-by: jdcpni --- .../Coffee Shop World/DeclanParams.py | 1 + docs/source/Core.rst | 2 + docs/source/IntegratorMechanisms.rst | 1 + docs/source/NonStatefulFunctions.rst | 1 + docs/source/TimerFunctions.rst | 10 + docs/source/TimerMechanism.rst | 7 + docs/source/TransferFunctions.rst | 5 +- .../core/components/functions/__init__.py | 8 +- .../core/components/functions/function.py | 7 +- .../functions/nonstateful/timerfunctions.py | 1225 +++++++++++++++++ .../nonstateful/transferfunctions.py | 66 +- .../functions/stateful/integratorfunctions.py | 38 +- .../core/components/mechanisms/mechanism.py | 20 +- .../processing/integratormechanism.py | 3 +- .../processing/transfermechanism.py | 4 +- psyneulink/core/globals/keywords.py | 72 +- .../processing/integrator/__init__.py | 3 + .../integrator/collapsingboundmechanism.py | 300 ---- .../processing/integrator/timermechanism.py | 412 ++++++ .../transfer/recurrenttransfermechanism.py | 2 +- tests/mechanisms/test_timer_mechanism.py | 73 + 21 files changed, 1867 insertions(+), 393 deletions(-) create mode 100644 docs/source/TimerFunctions.rst create mode 100644 docs/source/TimerMechanism.rst create mode 100644 psyneulink/core/components/functions/nonstateful/timerfunctions.py delete mode 100644 psyneulink/library/components/mechanisms/processing/integrator/collapsingboundmechanism.py create mode 100644 psyneulink/library/components/mechanisms/processing/integrator/timermechanism.py create mode 100644 tests/mechanisms/test_timer_mechanism.py diff --git a/Scripts/Models (Under Development)/EGO/Using EMComposition/Coffee Shop World/DeclanParams.py b/Scripts/Models (Under Development)/EGO/Using EMComposition/Coffee Shop World/DeclanParams.py index c2dbbbf8180..6b57d6011b1 100644 --- a/Scripts/Models (Under Development)/EGO/Using EMComposition/Coffee Shop World/DeclanParams.py +++ b/Scripts/Models (Under Development)/EGO/Using EMComposition/Coffee Shop World/DeclanParams.py @@ -72,6 +72,7 @@ def calc_prob(em_preds, test_ys): normalize_field_weights = True, # whether to normalize the field weights during memory retrieval normalize_memories = False, # whether to normalize the memory during memory retrieval # normalize_memories = True, # whether to normalize the memory during memory retrieval + normalize_memories = False, # whether to normalize the memory vectors # softmax_temperature = None, # temperature of the softmax used during memory retrieval (smaller means more argmax-like softmax_temperature = .1, # temperature of the softmax used during memory retrieval (smaller means more argmax-like # softmax_temperature = ADAPTIVE, # temperature of the softmax used during memory retrieval (smaller means more argmax-like diff --git a/docs/source/Core.rst b/docs/source/Core.rst index ea1f1b7105d..ddfd8d7f2b4 100644 --- a/docs/source/Core.rst +++ b/docs/source/Core.rst @@ -69,6 +69,8 @@ Core - `TransferFunctions` + - `TimerFunctions` + - `TransformFunctions` - `StatefulFunctions` diff --git a/docs/source/IntegratorMechanisms.rst b/docs/source/IntegratorMechanisms.rst index 3b1922ee60a..6a44460f7a2 100644 --- a/docs/source/IntegratorMechanisms.rst +++ b/docs/source/IntegratorMechanisms.rst @@ -12,3 +12,4 @@ IntegratorMechanisms DDM EpisodicMemoryMechanism + TimerMechanism diff --git a/docs/source/NonStatefulFunctions.rst b/docs/source/NonStatefulFunctions.rst index f69c780ad63..0eebaed04c5 100644 --- a/docs/source/NonStatefulFunctions.rst +++ b/docs/source/NonStatefulFunctions.rst @@ -15,4 +15,5 @@ Functions that do *not* depend on a previous value. OptimizationFunctions SelectionFunctions TransferFunctions + TimerFunctions TransformFunctions \ No newline at end of file diff --git a/docs/source/TimerFunctions.rst b/docs/source/TimerFunctions.rst new file mode 100644 index 00000000000..a778597b789 --- /dev/null +++ b/docs/source/TimerFunctions.rst @@ -0,0 +1,10 @@ +TimerFunctions +============== + +.. toctree:: + :maxdepth: 3 + +.. automodule:: psyneulink.core.components.functions.nonstateful.timerfunctions + :members: TimerFunction, LinearTimer, AcceleratingTimer, DeceleratingTimer, AsymptoticTimer + :private-members: + :exclude-members: Parameters diff --git a/docs/source/TimerMechanism.rst b/docs/source/TimerMechanism.rst new file mode 100644 index 00000000000..954911f3b32 --- /dev/null +++ b/docs/source/TimerMechanism.rst @@ -0,0 +1,7 @@ +TimerMechanism +============== + +.. automodule:: psyneulink.library.components.mechanisms.processing.integrator.timermechanism + :members: + :private-members: + :exclude-members: random, Parameters diff --git a/docs/source/TransferFunctions.rst b/docs/source/TransferFunctions.rst index 205bd6aa004..64ff3a8d18c 100644 --- a/docs/source/TransferFunctions.rst +++ b/docs/source/TransferFunctions.rst @@ -4,7 +4,8 @@ TransferFunctions .. toctree:: :maxdepth: 3 -.. automodule:: psyneulink.core.components.functions.transferfunctions - :members: TransferFunction, Identity, Linear, Exponential, Logistic, Tanh, ReLU, Angle, Gaussian, GaussianDistort, BinomialDistort, Dropout, SoftMax, LinearMatrix, TransferWithCosts, CostFunctions +.. automodule:: psyneulink.core.components.functions.nonstateful.transferfunctions + :members: TransferFunction, Identity, Linear, Exponential, Logistic, Tanh,ReLU, Angle, Gaussian, GaussianDistort, BinomialDistort, Dropout, SoftMax, LinearMatrix, TransferWithCosts, +CostFunctions :private-members: :exclude-members: Parameters diff --git a/psyneulink/core/components/functions/__init__.py b/psyneulink/core/components/functions/__init__.py index 3bc91c54816..a72e6a0767a 100644 --- a/psyneulink/core/components/functions/__init__.py +++ b/psyneulink/core/components/functions/__init__.py @@ -1,13 +1,14 @@ from . import function from .nonstateful import selectionfunctions, objectivefunctions, optimizationfunctions, transformfunctions, \ - learningfunctions, transferfunctions, distributionfunctions, fitfunctions + learningfunctions, timerfunctions, transferfunctions, distributionfunctions, fitfunctions from . import stateful from .stateful import integratorfunctions, memoryfunctions from . import userdefinedfunction from .function import * -from psyneulink.core.components.functions.nonstateful.transformfunctions import * from psyneulink.core.components.functions.nonstateful.transferfunctions import * +from psyneulink.core.components.functions.nonstateful.timerfunctions import * +from psyneulink.core.components.functions.nonstateful.transformfunctions import * from psyneulink.core.components.functions.nonstateful.selectionfunctions import * from psyneulink.core.components.functions.nonstateful.distributionfunctions import * from psyneulink.core.components.functions.nonstateful.objectivefunctions import * @@ -21,8 +22,9 @@ __all__ = list(function.__all__) __all__.extend(userdefinedfunction.__all__) -__all__.extend(transformfunctions.__all__) __all__.extend(transferfunctions.__all__) +__all__.extend(timerfunctions.__all__) +__all__.extend(transformfunctions.__all__) __all__.extend(selectionfunctions.__all__) __all__.extend(stateful.__all__) __all__.extend(distributionfunctions.__all__) diff --git a/psyneulink/core/components/functions/function.py b/psyneulink/core/components/functions/function.py index a63a5a23976..d2f832b74fd 100644 --- a/psyneulink/core/components/functions/function.py +++ b/psyneulink/core/components/functions/function.py @@ -534,13 +534,16 @@ class Function_Base(Function): Attributes ---------- - variable: value + variable: number format and default value can be specified by the :keyword:`variable` argument of the constructor; otherwise, they are specified by the Function's :keyword:`class_defaults.variable`. function : function called by the Function's `owner ` when it is executed. + value : number + the result returned by calling the Function. + COMMENT: enable_output_type_conversion : Bool : False specifies whether `function output type conversion ` is enabled. @@ -554,7 +557,7 @@ class Function_Base(Function): specifies whether the return value of the function is different than the shape of either is outermost dimension (axis 0) of its its `variable `, or any of the items in the next dimension (axis 1). Used to determine whether the shape of the inputs to the `Component` to which the function is assigned - should be based on the `variable ` of the function or its `value `. + should be based on the `variable ` of the function or its `value `. COMMENT owner : Component diff --git a/psyneulink/core/components/functions/nonstateful/timerfunctions.py b/psyneulink/core/components/functions/nonstateful/timerfunctions.py new file mode 100644 index 00000000000..b1539a1c365 --- /dev/null +++ b/psyneulink/core/components/functions/nonstateful/timerfunctions.py @@ -0,0 +1,1225 @@ +# +# Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy of the License at: +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +# +# +# ******************************************* TIMER FUNCTIONS ***************************************************** +""" + +* `TimerFunction` +* `LinearTimer` +* `AcceleratingTimer` +* `DeceleratingTimer` +* `AsymptoticTimer` + +Overview +-------- + +Functions for which `initial ` and `final ` values and a `duration +` can be specified, for use with a `TimerMechanism`. + +.. _TimerFunction_Types: + +Types +~~~~~ + +There are four types that implement different functional forms, each of which is rising if `initial +` is less than `final ` and declining for the reverse: + +* **LinearTimer** - progresses linearly from `initial ` to `final ` value. + (see `interactive graph `_). + +* **AcceleratingTimer** - advances from initial ` to `final ` value + by progressively larger amounts at an adjustable exponential `rate ` + (see `interactive graph `_). + +* **DeceleratingTimer** - advances from initial ` to `final ` value + by progressively smaller amounts at an adjustable exponential `rate ` + (see `interactive graph `_). + +* **AsymptoticTimer** - progresses at a fixed exponential `rate ` from `initial + ` to within `tolerance ` of `final ` + (see `interactive graph `_). + + +.. _TimerFunction_StandardAttributes: + +Standard Attributes +~~~~~~~~~~~~~~~~~~~ + +TimerFunctions have the following Parameters: + +.. _TimerFunction_Initial: +* **initial**: specifies the `value ` that the function has when its `variable + ` is 0. + +.. _TimerFunction_Final: +* **final**: specifies the `value ` that the function has when its `variable + ` is equal to `duration `. + +.. _TimerFunction_Duration: +* **duration**: specifies the value of the `variable ` at which the`value + ` of the function is equal to `final `. + +.. _TimerFunction_Rate: +* **rate**: specifies the rate at which the progression of the `value ` of the function changes. + +TimerFunction Class References +------------------------------ + +""" + +from math import e + +import numpy as np +try: + import torch +except ImportError: + torch = None +from beartype import beartype + +from psyneulink._typing import Optional + +from psyneulink.core import llvm as pnlvm +from psyneulink.core.components.functions.nonstateful.transferfunctions import TransferFunction +from psyneulink.core.globals.context import handle_external_context +from psyneulink.core.globals.parameters import Parameter, check_user_specified +from psyneulink.core.globals.utilities import (ValidParamSpecType) +from psyneulink.core.globals.preferences.basepreferenceset import \ + REPORT_OUTPUT_PREF, PreferenceEntry, PreferenceLevel, ValidPrefSet +from psyneulink.core.globals.keywords import \ + (ADDITIVE_PARAM, ACCELERATING_TIMER_FUNCTION, ASYMPTOTIC_TIMER_FUNCTION, DECELERATING_TIMER_FUNCTION, + DURATION, FINAL, INITIAL, LINEAR_TIMER_FUNCTION, MULTIPLICATIVE_PARAM, OFFSET, PREFERENCE_SET_NAME, + RATE, TIMER_FUNCTION, TIMER_FUNCTION_TYPE, TOLERANCE) + +__all__ = ['LinearTimer','AcceleratingTimer','DeceleratingTimer','AsymptoticTimer'] + + +class TimerFunction(TransferFunction): # -------------------------------------------------------------------------------- + """Subclass of TransferFunction that allows a initial, final and duration value to be specified; + for use with a `TimerMechanism`. + + Attributes + ---------- + + variable : number or array + contains value to be transformed. + + initial : float (>0) + determines the value of the function when `variable ` = 0. + + final : float + determines the value of the function when `variable ` = `duration `. + + duration : float (>0) + determines the value of `variable ` at which the value of the function is equal + to `final `. + + rate : float (>1.0) + determines the rate at which the value of the function accelerates. + + """ + componentType = TIMER_FUNCTION_TYPE + componentName = TIMER_FUNCTION + + class Parameters(TransferFunction.Parameters): + """ + Attributes + ---------- + + duration + see `duration ` + + :default value: None + :type: 'float' + + final + see `final ` + + :default value: None + :type: 'float' + + initial + see `initial ` + + :default value: None + :type: 'float' + + rate + see `rate ` + + :default value: None + :type: 'float' + """ + initial = Parameter(1.0, modulable=True) + final = Parameter(0.0, modulable=True) + duration = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM]) + rate = Parameter(1.0, modulable=True) + + def _validate_duration(self, duration): + if duration <= 0: + return f"must be greater than 0." + + def _validate_rate(self, rate): + if rate < 1: + return f"must be greater than or equal to 1.0." + + +class LinearTimer(TimerFunction): + """LinearTimer( \ + default_variable, \ + initial=0.0, \ + final=1.0, \ + duration=1.0, \ + params=None, \ + owner=None, \ + name=None, \ + prefs=None \ + ) + + .. _LinearTimer: + | + `function ` returns linear transform of `variable `. + + .. math:: + \\left(\\frac{final-initial}{duration}\\right) \\cdot variable + initial + + such that: + + .. math:: + value=initial \\ for\\ variable=0 + + value=final\\ for\\ variable=duration + + where: + + **initial** determines the `value ` of the function + when its `variable ` = 0. + + **final** determines the `value ` of the function + when its `variable ` = duration. + + **duration** determines the value of `variable ` + at which the value of the function = final. + + `derivative ` returns the derivative of the LinearTimer Function: + + .. math:: + \\frac{final-initial}{duration} + + See `graph `_ for interactive plot of the function using `Desmos + `_. + + Arguments + --------- + + default_variable : number or array : default class_defaults.variable + specifies a template for the value to be transformed. + + initial : float : default 1.0 + specifies the value the function should have when `variable ` = 0; + must be greater than or equal to 0. + + final : float : default 1.0 + specifies the value the function should have when `variable ` = `duration + `; must be greater than `initial `. + + duration : float : default 1.0 + specifies the value of `variable ` at which the value of the function + should equal `final `; must be greater than 0. + + params : Dict[param keyword: param value] : default None + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters + in arguments of the constructor. + + owner : Component + `component ` to which to assign the Function. + + name : str : default see `name ` + specifies the name of the Function. + + prefs : PreferenceSet or specification dict : default Function.classPreferences + specifies the `PreferenceSet` for the Function (see `prefs ` for details). + + Attributes + ---------- + + variable : number or array + contains value to be transformed. + + initial : float (>0) + determines the value of the function when `variable ` = 0. + + final : float + determines the value of the function when `variable ` = `duration `. + + duration : float (>0) + determines the value of `variable ` at which the value of the function is equal + to `final `. + + owner : Component + `component ` to which the Function has been assigned. + + name : str + the name of the Function; if it is not specified in the **name** argument of the constructor, a default is + assigned by FunctionRegistry (see `Registry_Naming` for conventions used for default and duplicate names). + + prefs : PreferenceSet or specification dict : Function.classPreferences + the `PreferenceSet` for function; if it is not specified in the **prefs** argument of the Function's + constructor, a default is assigned using `classPreferences` defined in __init__.py (see `Preferences` + for details). + """ + + componentName = LINEAR_TIMER_FUNCTION + + classPreferences = { + PREFERENCE_SET_NAME: 'LinearTimerClassPreferences', + REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), + } + + _model_spec_class_name_is_generic = True + + # FIX: REINSTATE Parameters AND VALIDATE INITIAL, FINAL AND DURATION + + @check_user_specified + @beartype + def __init__(self, + default_variable=None, + initial: Optional[ValidParamSpecType] = None, + final: Optional[ValidParamSpecType] = None, + duration: Optional[ValidParamSpecType] = None, + params=None, + owner=None, + prefs: Optional[ValidPrefSet] = None): + super().__init__( + default_variable=default_variable, + initial=initial, + final=final, + duration=duration, + params=params, + owner=owner, + prefs=prefs, + ) + + def _function(self, + variable=None, + context=None, + params=None, + ): + """ + + Arguments + --------- + + variable : number or array : default class_defaults.variable + a single value or array to be exponentiated. + + params : Dict[param keyword: param value] : default None + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters in + arguments of the constructor. + + Returns + ------- + + LinearTimer rise transform of variable : number or array + + """ + initial = self._get_current_parameter_value(INITIAL, context) + final = self._get_current_parameter_value(FINAL, context) + duration = self._get_current_parameter_value(DURATION, context) + result = ((final - initial) / duration) * variable + initial + return self.convert_output_type(result) + + @handle_external_context() + def derivative(self, input, output=None, context=None): + """Derivative of `function ` at **input**: + + .. math:: + (final - initial) * \\left(\\frac{(1 + duration * e^{variable})}{duration * e^{duration}}\\right) + + Arguments + --------- + + input : number + value of the input to the LinearTimer transform at which derivative is to be taken. + + Returns + ------- + derivative : number or array + """ + initial = self._get_current_parameter_value(INITIAL, context) + final = self._get_current_parameter_value(FINAL, context) + duration = self._get_current_parameter_value(DURATION, context) + return (final - initial) / duration + + # FIX: + def _gen_llvm_transfer(self, builder, index, ctx, vi, vo, params, state, *, tags:frozenset): + ptri = builder.gep(vi, [ctx.int32_ty(0), index]) + ptro = builder.gep(vo, [ctx.int32_ty(0), index]) + + duration_ptr = ctx.get_param_or_state_ptr(builder, self, DURATION, param_struct_ptr=params) + initial_ptr = ctx.get_param_or_state_ptr(builder, self, INITIAL, param_struct_ptr=params) + final_ptr = ctx.get_param_or_state_ptr(builder, self, FINAL, param_struct_ptr=params) + offset_ptr = ctx.get_param_or_state_ptr(builder, self, OFFSET, param_struct_ptr=params) + + initial = pnlvm.helpers.load_extract_scalar_array_one(builder, initial_ptr) + final = pnlvm.helpers.load_extract_scalar_array_one(builder, final_ptr) + duration = pnlvm.helpers.load_extract_scalar_array_one(builder, duration_ptr) + offset = pnlvm.helpers.load_extract_scalar_array_one(builder, offset_ptr) + + exp_f = ctx.get_builtin("exp", [ctx.float_ty]) + val = builder.load(ptri) + val = builder.fmul(val, duration) + val = builder.fadd(val, initial) + val = builder.call(exp_f, [val]) + + if "derivative" in tags: + # f'(x) = s*r*e^(r*x + b) + val = builder.fmul(val, final) + val = builder.fmul(val, duration) + else: + # f(x) = s*e^(r*x + b) + o + val = builder.fmul(val, final) + val = builder.fadd(val, offset) + + builder.store(val, ptro) + + # FIX: + def _gen_pytorch_fct(self, device, context=None): + final = self._get_pytorch_fct_param_value(FINAL, device, context) + initial = self._get_pytorch_fct_param_value(INITIAL, device, context) + duration = self._get_pytorch_fct_param_value(DURATION, device, context) + return lambda x : ((final - initial) / duration) * x + initial + + +class AcceleratingTimer(TimerFunction): + """ + AcceleratingTimer( \ + default_variable, \ + initial=0.0, \ + final=1.0, \ + duration=1.0, \ + rate=1.0, \ + params=None, \ + owner=None, \ + name=None, \ + prefs=None \ + ) + + .. _AcceleratingTimer: + | + `function ` returns acceleratingTimer rise transform of `variable + `; this is the inverse of the `AcceleratingTimer` Function. + + .. math:: + initial+\\left(final-initial\\right)\\left(\\frac{variable}{duration}\\right)^{rate}e^{\\left(\\left( + \\frac{variable}{duration}\\right)^{rate}-1\\right)} + + such that: + + .. math:: + value=initial \\ for\\ variable=0 + + value=final\\ for\\ variable=duration + + where: + + **initial** determines the `value ` of the function + when its `variable ` = 0. + + **final** determines the `value ` of the function + when its `variable ` = duration. + + **duration** determines the value of `variable ` + at which the value of the function = final. + + **rate** determines the `rate ` of acceleration of the function. + + `derivative ` returns the derivative of the AcceleratingTimer Function: + + .. math:: + (final-initial) \\cdot \\left[ rate \\cdot \\left(\\frac{variable}{duration}\\right)^{rate-1} + \\cdot \\frac{1}{duration} \\cdot e^{\\left(\\left(\\frac{variable}{duration}\\right)^{rate}-1\\right)} + + \\left(\\frac{variable}{duration}\\right)^{rate} \\cdot e^{\\left(\\left(\\frac{variable}{duration}\\right)^{ + rate}-1\\right)} \\cdot rate \\cdot \\frac{1}{duration}\\right] + + See `graph `_ for interactive plot of the function using `Desmos + `_. + + Arguments + --------- + + default_variable : number or array : default class_defaults.variable + specifies a template for the value to be transformed. + + initial : float : default 1.0 + specifies the value the function should have when `variable ` = 0; + must be greater than or equal to 0. + + final : float : default 1.0 + specifies the value the function should have when `variable ` = `duration + `; must be greater than `initial `. + + duration : float : default 1.0 + specifies the value of `variable ` at which the value of the function + should equal `final `; must be greater than 0. + + rate : float : default 1.0 + specifies the rate at which the value of the function accelerates; must be greater than 0. + + params : Dict[param keyword: param value] : default None + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters + in arguments of the constructor. + + owner : Component + `component ` to which to assign the Function. + + name : str : default see `name ` + specifies the name of the Function. + + prefs : PreferenceSet or specification dict : default Function.classPreferences + specifies the `PreferenceSet` for the Function (see `prefs ` for details). + + Attributes + ---------- + + variable : number or array + contains value to be transformed. + + initial : float (>0) + determines the value of the function when `variable ` = 0. + + final : float + determines the value of the function when `variable ` = `duration `. + + duration : float (>0) + determines the value of `variable ` at which the value of the function is equal + to `final `. + + rate : float (>1.0) + determines the rate at which the value of the function accelerates. + + owner : Component + `component ` to which the Function has been assigned. + + name : str + the name of the Function; if it is not specified in the **name** argument of the constructor, a default is + assigned by FunctionRegistry (see `Registry_Naming` for conventions used for default and duplicate names). + + prefs : PreferenceSet or specification dict : Function.classPreferences + the `PreferenceSet` for function; if it is not specified in the **prefs** argument of the Function's + constructor, a default is assigned using `classPreferences` defined in __init__.py (see `Preferences` + for details). + """ + + componentName = ACCELERATING_TIMER_FUNCTION + + classPreferences = { + PREFERENCE_SET_NAME: 'AcceleratingTimerClassPreferences', + REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), + } + + _model_spec_class_name_is_generic = True + + # FIX: REINSTATE Parameters AND VALIDATE INITIAL, FINAL AND DURATION + + @check_user_specified + @beartype + def __init__(self, + default_variable=None, + initial: Optional[ValidParamSpecType] = None, + final: Optional[ValidParamSpecType] = None, + duration: Optional[ValidParamSpecType] = None, + params=None, + owner=None, + prefs: Optional[ValidPrefSet] = None): + super().__init__( + default_variable=default_variable, + initial=initial, + final=final, + duration=duration, + params=params, + owner=owner, + prefs=prefs, + ) + + def _function(self, + variable=None, + context=None, + params=None, + ): + """ + + Arguments + --------- + + variable : number or array : default class_defaults.variable + a single value or array to be exponentiated. + + params : Dict[param keyword: param value] : default None + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters in + arguments of the constructor. + + Returns + ------- + + AcceleratingTimer rise transform of variable : number or array + + """ + initial = self._get_current_parameter_value(INITIAL, context) + final = self._get_current_parameter_value(FINAL, context) + duration = self._get_current_parameter_value(DURATION, context) + rate = self._get_current_parameter_value(RATE, context) + + result = (initial + (final - initial) * np.power((variable / duration),rate) + * np.exp(np.power((variable / duration),rate) - 1)) + + return self.convert_output_type(result) + + @handle_external_context() + def derivative(self, input, output=None, context=None): + """Derivative of `function ` at **input**: + + .. math:: + (final - initial) * \\left(\\frac{(1 + duration * e^{variable})}{duration * e^{duration}}\\right) + + Arguments + --------- + + input : number + value of the input to the AcceleratingTimer transform at which derivative is to be taken. + + Returns + ------- + derivative : number or array + """ + initial = self._get_current_parameter_value(INITIAL, context) + final = self._get_current_parameter_value(FINAL, context) + duration = self._get_current_parameter_value(DURATION, context) + rate = self._get_current_parameter_value(RATE, context) + + return ((final - initial) * + (rate * np.power((input / duration), (rate - 1)) + * ((1 / duration) + * (np.exp(np.power((input / duration), rate) - 1) + + (np.power((input / duration), rate)) + * np.exp(np.power((input / duration), rate) - 1) * rate * 1 / duration)))) + + # FIX: + def _gen_llvm_transfer(self, builder, index, ctx, vi, vo, params, state, *, tags:frozenset): + ptri = builder.gep(vi, [ctx.int32_ty(0), index]) + ptro = builder.gep(vo, [ctx.int32_ty(0), index]) + + duration_ptr = ctx.get_param_or_state_ptr(builder, self, DURATION, param_struct_ptr=params) + initial_ptr = ctx.get_param_or_state_ptr(builder, self, INITIAL, param_struct_ptr=params) + final_ptr = ctx.get_param_or_state_ptr(builder, self, FINAL, param_struct_ptr=params) + offset_ptr = ctx.get_param_or_state_ptr(builder, self, OFFSET, param_struct_ptr=params) + + duration = pnlvm.helpers.load_extract_scalar_array_one(builder, duration_ptr) + initial = pnlvm.helpers.load_extract_scalar_array_one(builder, initial_ptr) + final = pnlvm.helpers.load_extract_scalar_array_one(builder, final_ptr) + offset = pnlvm.helpers.load_extract_scalar_array_one(builder, offset_ptr) + + exp_f = ctx.get_builtin("exp", [ctx.float_ty]) + val = builder.load(ptri) + val = builder.fmul(val, duration) + val = builder.fadd(val, initial) + val = builder.call(exp_f, [val]) + + if "derivative" in tags: + # f'(x) = s*r*e^(r*x + b) + val = builder.fmul(val, final) + val = builder.fmul(val, duration) + else: + # f(x) = s*e^(r*x + b) + o + val = builder.fmul(val, final) + val = builder.fadd(val, offset) + + builder.store(val, ptro) + + # FIX: + def _gen_pytorch_fct(self, device, context=None): + final = self._get_pytorch_fct_param_value(FINAL, device, context) + initial = self._get_pytorch_fct_param_value(INITIAL, device, context) + duration = self._get_pytorch_fct_param_value(DURATION, device, context) + rate = self._get_pytorch_fct_param_value(RATE, device, context) + + return lambda x : (initial + (final - initial) * torch.power((x / duration),rate) + * torch.exp(torch.power((x / duration),rate) - 1)) + + +class DeceleratingTimer(TimerFunction): # --------------------------------------------------------------------------- + """ + DeceleratingTimer( \ + default_variable, \ + initial=1.0, \ + duration=1.0, \ + final=0.01, \ + rate=1.0, \ + params=None, \ + owner=None, \ + name=None, \ + prefs=None \ + ) + + .. _DeceleratingTimer: + | + `function ` returns exponentially decaying transform of `variable + `: + + .. math:: + \\frac{\\left(initial-final-direction\\right)}{e^{\\ln\\left(-direction\\left(initial\\ -\\ + final-direction\\right)\\right)\\left(\\frac{variable}{duration}\\right)^{ + rate}}}+final+direction + + such that: + + .. math:: + value = initial + offset\\ for\\ variable=0 + + value = (initial * final) + offset\\ for\\ variable=duration + + where: + + **initial**, together with `offset `, determines the value of the function when + `variable ` = 0, and is used together with `final ` + to determine the value of the function when `variable ` = `duration + `. + + **duration** determines the value of `variable ` at which + the value of the function should equal :math:`initial * final + offset`. + + **final** is the fraction of `initial ` when, added to `offset + `, is used to determine the value of the function when `variable + ` should equal `duration `. + + **rate** determines the `rate ` of deceleration of the function. + + **direction** is +1 if final > initial, otherwise -1, and is used to determine the direction of the + progression (rising or decaying) of the TimerFunction. + + `derivative ` returns the derivative of the DeceleratingTimer Function: + + .. math:: + \\frac{direction \\cdot rate \\cdot(initial-final-direction)\\cdot\\ln(direction(final-initial+direction)) \\cdot \\left(\\frac{ + variable}{duration}\\right)^{rate-1}}{duration\\cdot e^{\\ln(direction(final-initial+direction))\\left(\\frac{variable}{ + duration}\\right)^{rate}}} + + See `graph `_ for interactive plot of the function using `Desmos + `_. + + + Arguments + --------- + + default_variable : number or array : default class_defaults.variable + specifies a template for the value to be transformed. + + initial : float : default 1.0 + specifies, together with `offset `, the value of the function when `variable + ` = 0; must be greater than 0. + + final : float : default 0.01 + specifies the fraction of `initial ` when added to `offset `, + that determines the value of the function when `variable ` = `duration + `; must be between 0 and 1. + + duration : float : default 1.0 + specifies the value of `variable ` at which the `value of the function + should equal `initial ` * `final ` + `offset + `; must be greater than 0. + + rate : float : default 1.0 + specifies the rate at which the value of the function decelerates; must be greater than 0. + + params : Dict[param keyword: param value] : default None + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters + in arguments of the constructor. + + owner : Component + `component ` to which to assign the Function. + + name : str : default see `name ` + specifies the name of the Function. + + prefs : PreferenceSet or specification dict : default Function.classPreferences + specifies the `PreferenceSet` for the Function (see `prefs ` for details). + + Attributes + ---------- + + variable : number or array + contains value to be transformed. + + initial : float + determines, together with `offset `, the value of the function when `variable + ` = 0. + + final : float + determines the fraction of `initial ` when added to `offset `, + that determines the value of the function when `variable ` = `duration + `. + + duration : float (>0) + determines the value of `variable ` at which the value of the function should + equal `initial ` * `final ` + `offset `. + + rate : float (>1.0) + determines the rate at which the value of the function decelerates. + + owner : Component + `component ` to which the Function has been assigned. + + name : str + the name of the Function; if it is not specified in the **name** argument of the constructor, a default is + assigned by FunctionRegistry (see `Registry_Naming` for conventions used for default and duplicate names). + + prefs : PreferenceSet or specification dict : Function.classPreferences + the `PreferenceSet` for function; if it is not specified in the **prefs** argument of the Function's + constructor, a default is assigned using `classPreferences` defined in __init__.py (see `Preferences` + for details). + """ + + componentName = DECELERATING_TIMER_FUNCTION + + classPreferences = { + PREFERENCE_SET_NAME: 'DeceleratingTimerClassPreferences', + REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), + } + + _model_spec_class_name_is_generic = True + + @check_user_specified + @beartype + def __init__(self, + default_variable=None, + initial: Optional[ValidParamSpecType] = None, + final: Optional[ValidParamSpecType] = None, + duration: Optional[ValidParamSpecType] = None, + rate: Optional[ValidParamSpecType] = None, + params=None, + owner=None, + prefs: Optional[ValidPrefSet] = None): + super().__init__( + default_variable=default_variable, + initial=initial, + final=final, + duration=duration, + rate=rate, + params=params, + owner=owner, + prefs=prefs, + ) + + def _function(self, + variable=None, + context=None, + params=None, + ): + """ + + Arguments + --------- + + variable : number or array : default class_defaults.variable + amount by which to increment timer on current execution; if this is not specified, the timer is incremented + by the value of `increment `. + + params : Dict[param keyword: param value] : default None + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters in + arguments of the constructor. + + Returns + ------- + + Exponentially decayed value of variable : number or array + + """ + initial = self._get_current_parameter_value(INITIAL, context) + final = self._get_current_parameter_value(FINAL, context) + duration = self._get_current_parameter_value(DURATION, context) + rate = self._get_current_parameter_value(RATE, context) + + direction = 1 if final > initial else -1 + + result = ((initial - final - direction) / + np.exp((np.log(-direction * (initial - final - direction)) * np.power((variable / duration),rate))) + + final + direction) + + return self.convert_output_type(result) + + @handle_external_context() + def derivative(self, input, output=None, context=None): + """Derivative of `function ` at **input**: + + .. math:: + \\frac{direction \\cdot rate \\cdot(initial-final-direction)\\cdot\\ln(direction( + final-initial+direction))\\cdot \\left(\\frac{variable}{duration}\\right)^{rate-1}}{duration\\cdot e^{\\ln( + direction(final-initial+direction))\\left(\\frac{variable}{duration}\\right)^{rate}}} + + + Arguments + --------- + + input : number + value of the input to the DeceleratingTimer transform at which derivative is to be taken. + + Derivative of `function ` at **input**. + + Returns + ------- + derivative : number or array + """ + + initial = self._get_current_parameter_value(INITIAL, context) + final = self._get_current_parameter_value(FINAL, context) + duration = self._get_current_parameter_value(DURATION, context) + rate = self._get_current_parameter_value(RATE, context) + direction = 1 if final > initial else -1 + + return (direction * rate * (initial - final - direction) * np.log(direction * (final - initial + direction)) * + np.power((input / duration), (rate - 1)) + / (duration * np.exp(np.log(direction * (final - initial + direction)) * + np.power((input / duration), rate)))) + + # FIX: + def _gen_llvm_transfer(self, builder, index, ctx, vi, vo, params, state, *, tags:frozenset): + ptri = builder.gep(vi, [ctx.int32_ty(0), index]) + ptro = builder.gep(vo, [ctx.int32_ty(0), index]) + + duration_ptr = ctx.get_param_or_state_ptr(builder, self, DURATION, param_struct_ptr=params) + initial_ptr = ctx.get_param_or_state_ptr(builder, self, INITIAL, param_struct_ptr=params) + final_ptr = ctx.get_param_or_state_ptr(builder, self, FINAL, param_struct_ptr=params) + offset_ptr = ctx.get_param_or_state_ptr(builder, self, OFFSET, param_struct_ptr=params) + + duration = pnlvm.helpers.load_extract_scalar_array_one(builder, duration_ptr) + initial = pnlvm.helpers.load_extract_scalar_array_one(builder, initial_ptr) + final = pnlvm.helpers.load_extract_scalar_array_one(builder, final_ptr) + offset = pnlvm.helpers.load_extract_scalar_array_one(builder, offset_ptr) + + exp_f = ctx.get_builtin("exp", [ctx.float_ty]) + val = builder.load(ptri) + val = builder.fmul(val, duration) + val = builder.fadd(val, initial) + val = builder.call(exp_f, [val]) + + if "derivative" in tags: + # f'(x) = s*r*e^(r*x + b) + val = builder.fmul(val, final) + val = builder.fmul(val, duration) + else: + # f(x) = s*e^(r*x + b) + o + val = builder.fmul(val, final) + val = builder.fadd(val, offset) + + builder.store(val, ptro) + + def _gen_pytorch_fct(self, device, context=None): + final = self._get_pytorch_fct_param_value(FINAL, device, context) + initial = self._get_pytorch_fct_param_value(INITIAL, device, context) + duration = self._get_pytorch_fct_param_value(DURATION, device, context) + rate = self._get_pytorch_fct_param_value(RATE, device, context) + direction = 1 if final > initial else -1 + + return lambda x : ((initial - final - direction) / + (torch.log(-direction(initial - final - direction)) * torch.power((x / duration),rate)) + + final + direction) + + +class AsymptoticTimer(TimerFunction): # --------------------------------------------------------------------------- + """ + AsymptoticTimer( \ + default_variable, \ + initial=1.0, \ + final=0, \ + duration=1.0, \ + tolerance=0.01, \ + params=None, \ + owner=None, \ + name=None, \ + prefs=None \ + ) + + .. AsymptoticTimer: + | + `function ` returns exponentially progressing transform of `variable + ` toward an asymptoticTimer value that reaches `duration ` + when it falls within the specified `tolerance ` of `final + `: + + .. math:: + (initial - final) * \\frac{\\ln(tolerance)}{duration} *e^{\\left(\\frac{variable * \\ln(tolerance)} + {duration}\\right)} + final + + such that: + + .. math:: + value = initial for\\ variable=0 + + value = ((initial - final) \\cdot tolerance) for\\ variable=duration + + where: + + **initial**, determines the value of the function when `variable ` = 0, + and is used together with `final ` and `tolerance + ` to determine the value of the function at which `variable + ` = `duration `. + + **final** is the asymptoticTimer value toward which the function decays. + + **tolerance** is the fraction of `initial ` - `final + ` used to determine the value of the function when `variable + ` is equal to `duration `. + + **duration** determines the value of `variable ` at which + the value of the function is equal to :math:`initial \\cdot final`. + + .. _note:: + The function rises if `final ` > `initial ` >, + and decays if `final ` < `initial `. + + `derivative ` returns the derivative of the AsymptoticTimer Function: + + .. math:: + \\frac{initial\\cdot\\ln(tolerance)}{duration}\\cdot e^{\\frac{variable\\cdot\\ln(tolerance)}{duration}} + + See `graph `_ for interactive plot of the function using `Desmos + `_. + + Arguments + --------- + + default_variable : number or array : default class_defaults.variable + specifies a template for the value to be transformed. + + initial : float : default 1.0 + specifies the value of the function when `variable`=0; must be greater than 0. + + final : float : default 0.0 + specifies the asymptoticTimer value toward which the function decays. + + tolerance : float : default 0.01 + specifies the fraction of `initial `-`final ` + that determines the value of the function when `variable ` = `duration; must be + between 0 and 1. + + duration : float : default 1.0 + specifies the value of `variable ` at which the `value of the function + should equal `initial ` * `final `; + must be greater than 0. + + params : Dict[param keyword: param value] : default None + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters + in arguments of the constructor. + + owner : Component + `component ` to which to assign the Function. + + name : str : default see `name ` + specifies the name of the Function. + + prefs : PreferenceSet or specification dict : default Function.classPreferences + specifies the `PreferenceSet` for the Function (see `prefs ` for details). + + Attributes + ---------- + + variable : number or array + contains value to be transformed. + + initial : float (>0) + determines the value of the function when `variable ` = 0. + + final : float + determines the asymptoticTimer value toward which the function decays. + + tolerance : float (0,1) + determines the fraction of `initial ` - final + that determines the value of the function when `variable ` = `duration + `. + + duration : float (>0) + determines the value of `variable ` at which the value of the function should + equal `initial ` * `final `. + + bounds : (None, None) + + owner : Component + `component ` to which the Function has been assigned. + + name : str + the name of the Function; if it is not specified in the **name** argument of the constructor, a default is + assigned by FunctionRegistry (see `Registry_Naming` for conventions used for default and duplicate names). + + prefs : PreferenceSet or specification dict : Function.classPreferences + the `PreferenceSet` for function; if it is not specified in the **prefs** argument of the Function's + constructor, a default is assigned using `classPreferences` defined in __init__.py (see `Preferences` + for details). + """ + + componentName = ASYMPTOTIC_TIMER_FUNCTION + + classPreferences = { + PREFERENCE_SET_NAME: 'AsymptoticTimerClassPreferences', + REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), + } + + _model_spec_class_name_is_generic = True + + class Parameters(TimerFunction.Parameters): + """ + Attributes + ---------- + rate + see `tolerance ` + + :default value: None + :type: ``float`` + + tolerance + see `tolerance ` + + :default value: 0.01 + :type: ``float`` + """ + rate = Parameter(None) + tolerance = Parameter(0.01, modulable=True) + + def _validate_rate(self, rate): + if rate is not None: + return f"is not used and should be left as None." + + def _validate_tolerance(self, tolerance): + if tolerance <= 0 or tolerance >= 1: + return f"must be between 0 and 1." + + @check_user_specified + @beartype + def __init__(self, + default_variable=None, + initial: Optional[ValidParamSpecType] = None, + final: Optional[ValidParamSpecType] = None, + tolerance: Optional[ValidParamSpecType] = None, + duration: Optional[ValidParamSpecType] = None, + params=None, + owner=None, + prefs: Optional[ValidPrefSet] = None): + super().__init__( + default_variable=default_variable, + initial=initial, + duration=duration, + final=final, + tolerance=tolerance, + params=params, + owner=owner, + prefs=prefs, + ) + + def _function(self, + variable=None, + context=None, + params=None, + ): + """ + + Arguments + --------- + + variable : number or array : default class_defaults.variable + amount by which to increment timer on current execution; if this is not specified, the timer is incremented + by the value of `increment `. + + params : Dict[param keyword: param value] : default None + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters in + arguments of the constructor. + + Returns + ------- + + Exponentially decayed value of variable : number or array + + """ + initial = self._get_current_parameter_value(INITIAL, context) + duration = self._get_current_parameter_value(DURATION, context) + tolerance = self._get_current_parameter_value(TOLERANCE, context) + final = self._get_current_parameter_value(FINAL, context) + + result = (initial - final) * np.exp(variable * np.log(tolerance) / duration) + final + + return self.convert_output_type(result) + + @handle_external_context() + def derivative(self, input, output=None, context=None): + """Derivative of `function ` at **input**: + + .. math:: + \\frac{initial\\cdot\\ln(tolerance)}{duration}\\cdot e^{\\frac{variable\\cdot\\ln(tolerance)}{duration}} + + Arguments + --------- + + input : number + value of the input to the AsymptoticTimer transform at which derivative is to be taken. + + Derivative of `function ` at **input**. + + Returns + ------- + derivative : number or array + """ + + initial = self._get_current_parameter_value(INITIAL, context) + tolerance = self._get_current_parameter_value(TOLERANCE, context) + final = self._get_current_parameter_value(FINAL, context) + duration = self._get_current_parameter_value(DURATION, context) + + return (initial * np.log(tolerance) / duration) * np.exp(input * np.log(tolerance) / duration) + + # FIX: + def _gen_llvm_transfer(self, builder, index, ctx, vi, vo, params, state, *, tags:frozenset): + ptri = builder.gep(vi, [ctx.int32_ty(0), index]) + ptro = builder.gep(vo, [ctx.int32_ty(0), index]) + + duration_ptr = ctx.get_param_or_state_ptr(builder, self, DURATION, param_struct_ptr=params) + initial_ptr = ctx.get_param_or_state_ptr(builder, self, INITIAL, param_struct_ptr=params) + final_ptr = ctx.get_param_or_state_ptr(builder, self, FINAL, param_struct_ptr=params) + + duration = pnlvm.helpers.load_extract_scalar_array_one(builder, duration_ptr) + initial = pnlvm.helpers.load_extract_scalar_array_one(builder, initial_ptr) + final = pnlvm.helpers.load_extract_scalar_array_one(builder, final_ptr) + + exp_f = ctx.get_builtin("exp", [ctx.float_ty]) + val = builder.load(ptri) + val = builder.fmul(val, duration) + val = builder.fadd(val, initial) + val = builder.call(exp_f, [val]) + + if "derivative" in tags: + # f'(x) = s*r*e^(r*x + b) + val = builder.fmul(val, final) + val = builder.fmul(val, duration) + else: + # f(x) = s*e^(r*x + b) + o + val = builder.fmul(val, final) + + builder.store(val, ptro) + + def _gen_pytorch_fct(self, device, context=None): + initial = self._get_pytorch_fct_param_value(INITIAL, device, context) + tolerance = self._get_pytorch_fct_param_value(TOLERANCE, device, context) + final = self._get_pytorch_fct_param_value(FINAL, device, context) + duration = self._get_pytorch_fct_param_value(DURATION, device, context) + + return lambda x : (initial - final) * torch.exp(x * torch.log(tolerance) / duration) + final diff --git a/psyneulink/core/components/functions/nonstateful/transferfunctions.py b/psyneulink/core/components/functions/nonstateful/transferfunctions.py index 26c96cd3a8c..92c4cb13dfc 100644 --- a/psyneulink/core/components/functions/nonstateful/transferfunctions.py +++ b/psyneulink/core/components/functions/nonstateful/transferfunctions.py @@ -10,19 +10,24 @@ # ******************************************* TRANSFER FUNCTIONS ***************************************************** """ -* `Identity` -* `Linear` -* `Exponential` -* `Logistic` -* `Tanh` -* `ReLU` -* `Angle` -* `Gaussian` -* `GaussianDistort` -* `BinomialDistort` -* `Dropout` -* `SoftMax` -* `TransferWithCosts` +**Deterministic** + * `Identity` + * `Linear` + * `Exponential` + * `Logistic` + * `Tanh` + * `ReLU` + +**Probabilistic** + * `Angle` + * `Gaussian` + * `GaussianDistort` + * `BinomialDistort` + * `Dropout` + * `SoftMax` + +**Other** + * `TransferWithCosts` Overview -------- @@ -93,15 +98,17 @@ (ADAPTIVE, ADDITIVE_PARAM, ALL, ANGLE_FUNCTION, BIAS, BINOMIAL_DISTORT_FUNCTION, DROPOUT_FUNCTION, EXPONENTIAL_FUNCTION, GAIN, GAUSSIAN_DISTORT_FUNCTION, GAUSSIAN_FUNCTION, IDENTITY_FUNCTION, INTERCEPT, LEAK, LINEAR_FUNCTION, LOGISTIC_FUNCTION, - TANH_FUNCTION, MAX_INDICATOR, MAX_VAL, MULTIPLICATIVE_PARAM, - OFF, OFFSET, ON, OUTPUT_TYPE, PER_ITEM, PROB, PRODUCT, PROB_INDICATOR, - RATE, RELU_FUNCTION, SCALE, SLOPE, SOFTMAX_FUNCTION, STANDARD_DEVIATION, SUM, - TRANSFER_FUNCTION_TYPE, TRANSFER_WITH_COSTS_FUNCTION, VARIANCE, VARIABLE, X_0, PREFERENCE_SET_NAME) + MAX_INDICATOR, MAX_VAL, MULTIPLICATIVE_PARAM, OFF, OFFSET, ON, OUTPUT_TYPE, + PER_ITEM, PROB, PRODUCT, PROB_INDICATOR, RATE, RELU_FUNCTION, + SCALE, SLOPE, SOFTMAX_FUNCTION, STANDARD_DEVIATION, SUM, + TANH_FUNCTION, TRANSFER_FUNCTION_TYPE, TRANSFER_WITH_COSTS_FUNCTION, + VARIANCE, VARIABLE, X_0, PREFERENCE_SET_NAME) from psyneulink.core.globals.parameters import \ FunctionParameter, Parameter, get_validator_by_function, check_user_specified, copy_parameter_value from psyneulink.core.globals.preferences.basepreferenceset import \ REPORT_OUTPUT_PREF, PreferenceEntry, PreferenceLevel, ValidPrefSet -from psyneulink.core.globals.utilities import ValidParamSpecType, convert_all_elements_to_np_array, safe_len, is_matrix_keyword +from psyneulink.core.globals.utilities import ( + ValidParamSpecType, convert_all_elements_to_np_array, safe_len, is_matrix_keyword) __all__ = ['Angle', 'BinomialDistort', 'Dropout', 'Exponential', 'Gaussian', 'GaussianDistort', 'Identity', 'Linear', 'Logistic', 'ReLU', 'SoftMax', 'Tanh', 'TransferFunction', 'TransferWithCosts' @@ -789,10 +796,11 @@ def _gen_pytorch_fct(self, device, context=None): # ********************************************************************************************************************** -# Logistic +# Logistic # ********************************************************************************************************************** -class Logistic(TransferFunction): # ------------------------------------------------------------------------------------ + +class Logistic(TransferFunction): # ----------------------------------------------------------------------------------- """ Logistic( \ default_variable, \ @@ -1029,7 +1037,7 @@ def derivative(self, input=None, output=None, context=None): """ derivative(input=None, output=None) - Derivative of `function ` at either **input** or **output**. + Derivative of `function ` at either **input** or **output**. COMMENT: RESTORE WHEN TEST IN DERIVATIVE IS RESTORED Either **input** or **output** must be specified. @@ -1148,7 +1156,7 @@ class Tanh(TransferFunction): # ----------------------------------------------- .. _Tanh_Function: - `function ` returns hyperbolic tangent of `variable `: + `function ` returns hyperbolic tangent of `variable `: .. math:: @@ -1156,8 +1164,8 @@ class Tanh(TransferFunction): # ----------------------------------------------- .. note:: - The `Logistic` function is an offset and scaled version of this function. - The parameters used here have the same meaning as those used for the `Logistic` Function. + The `Tanh` function is an offset and scaled version of this function. + The parameters used here have the same meaning as those used for the `Tanh` Function. `derivative ` returns the derivative of the hyperbolic tangent at its **input**: @@ -1172,19 +1180,19 @@ class Tanh(TransferFunction): # ----------------------------------------------- specifies template for the value to be transformed. gain : float : default 1.0 - specifies value by which to multiply `variable ` before logistic transformation + specifies value by which to multiply `variable ` before Tanh transformation bias : float : default 0.0 specifies value to add to each element of `variable ` before applying `gain ` - and before logistic transformation. This argument is identical to x_0, with the opposite sign. + and before Tanh transformation. This argument is identical to x_0, with the opposite sign. x_0 : float : default 0.0 specifies value to subtract from each element of `variable ` before applying `gain ` - and before logistic transformation. This argument is identical to bias, with the opposite sign. + and before Tanh transformation. This argument is identical to bias, with the opposite sign. offset : float : default 0.0 specifies value to add to each element of `variable ` after applying `gain ` - but before logistic transformation. + but before Tanh transformation. scale : float : default 1.0 specifies value by which to multiply each element after applying Tanh transform. @@ -1223,7 +1231,7 @@ class Tanh(TransferFunction): # ----------------------------------------------- offset : float : default 0.0 value to added to each element of `variable ` after applying `gain ` - but before logistic transformation. + but before tanh transformation. scale : float : default 1.0 value by which element is multiplied after applying Tanh transform. diff --git a/psyneulink/core/components/functions/stateful/integratorfunctions.py b/psyneulink/core/components/functions/stateful/integratorfunctions.py index eab72c5cdf4..bcd614ef687 100644 --- a/psyneulink/core/components/functions/stateful/integratorfunctions.py +++ b/psyneulink/core/components/functions/stateful/integratorfunctions.py @@ -29,6 +29,11 @@ import warnings import numpy as np +from math import e +try: + import torch +except ImportError: + torch = None from beartype import beartype from psyneulink._typing import Callable, Mapping, Optional, Union @@ -36,21 +41,19 @@ from psyneulink.core import llvm as pnlvm from psyneulink.core.components.component import DefaultsFlexibility from psyneulink.core.components.functions.nonstateful.distributionfunctions import DistributionFunction, NormalDist -from psyneulink.core.components.functions.function import ( - DEFAULT_SEED, FunctionError, _random_state_getter, - _seed_setter, _noise_setter -) +from psyneulink.core.components.functions.function import (DEFAULT_SEED, FunctionError, _random_state_getter, + _seed_setter, _noise_setter) from psyneulink.core.components.functions.stateful.statefulfunction import StatefulFunction from psyneulink.core.globals.context import ContextFlags, handle_external_context from psyneulink.core.globals.keywords import \ - ACCUMULATOR_INTEGRATOR_FUNCTION, ADAPTIVE_INTEGRATOR_FUNCTION, ADDITIVE_PARAM, \ - DECAY, DEFAULT_VARIABLE, DRIFT_DIFFUSION_INTEGRATOR_FUNCTION, DRIFT_ON_A_SPHERE_INTEGRATOR_FUNCTION, \ - DUAL_ADAPTIVE_INTEGRATOR_FUNCTION, FITZHUGHNAGUMO_INTEGRATOR_FUNCTION, FUNCTION, \ - INCREMENT, INITIALIZER, INPUT_PORTS, INTEGRATOR_FUNCTION, INTEGRATOR_FUNCTION_TYPE, \ - INTERACTIVE_ACTIVATION_INTEGRATOR_FUNCTION, LEAKY_COMPETING_INTEGRATOR_FUNCTION, \ - MULTIPLICATIVE_PARAM, NOISE, OFFSET, OPERATION, ORNSTEIN_UHLENBECK_INTEGRATOR_FUNCTION, OUTPUT_PORTS, PRODUCT, \ - RATE, REST, SIMPLE_INTEGRATOR_FUNCTION, SUM, TIME_STEP_SIZE, THRESHOLD, VARIABLE, MODEL_SPEC_ID_MDF_VARIABLE, \ - PREVIOUS_VALUE + (ACCUMULATOR_INTEGRATOR_FUNCTION, ADAPTIVE_INTEGRATOR_FUNCTION, ADDITIVE_PARAM, + DECAY, DEFAULT_VARIABLE, DRIFT_DIFFUSION_INTEGRATOR_FUNCTION, DRIFT_ON_A_SPHERE_INTEGRATOR_FUNCTION, + DUAL_ADAPTIVE_INTEGRATOR_FUNCTION, FITZHUGHNAGUMO_INTEGRATOR_FUNCTION, FUNCTION, + INCREMENT, INITIALIZER, INPUT_PORTS, INTEGRATOR_FUNCTION, INTEGRATOR_FUNCTION_TYPE, + INTERACTIVE_ACTIVATION_INTEGRATOR_FUNCTION, LEAKY_COMPETING_INTEGRATOR_FUNCTION, + MODEL_SPEC_ID_MDF_VARIABLE, MULTIPLICATIVE_PARAM, NOISE, OFFSET, OPERATION, ORNSTEIN_UHLENBECK_INTEGRATOR_FUNCTION, + OUTPUT_PORTS, PREVIOUS_VALUE, PRODUCT, RATE, REST, SCALE, SIMPLE_INTEGRATOR_FUNCTION, SUM, TIME_STEP_SIZE, + THRESHOLD, VARIABLE) from psyneulink.core.globals.parameters import Parameter, check_user_specified from psyneulink.core.globals.preferences.basepreferenceset import ValidPrefSet from psyneulink.core.globals.utilities import ValidParamSpecType, all_within_range, is_numeric_scalar, \ @@ -58,8 +61,8 @@ __all__ = ['SimpleIntegrator', 'AdaptiveIntegrator', 'DriftDiffusionIntegrator', 'DriftOnASphereIntegrator', 'OrnsteinUhlenbeckIntegrator', 'FitzHughNagumoIntegrator', 'AccumulatorIntegrator', - 'LeakyCompetingIntegrator', 'DualAdaptiveIntegrator', 'InteractiveActivationIntegrator', - 'S_MINUS_L', 'L_MINUS_S', 'IntegratorFunction' + 'LeakyCompetingIntegrator', 'DualAdaptiveIntegrator', + 'InteractiveActivationIntegrator', 'S_MINUS_L', 'L_MINUS_S', 'IntegratorFunction' ] @@ -400,10 +403,6 @@ def _gen_llvm_load_param(self, ctx, builder, params, index, param, *, state=None return pnlvm.helpers.load_extract_scalar_array_one(builder, value_p) - -# *********************************************** INTEGRATOR FUNCTIONS ************************************************* - - class AccumulatorIntegrator(IntegratorFunction): # -------------------------------------------------------------------- """ AccumulatorIntegrator( \ @@ -420,7 +419,7 @@ class AccumulatorIntegrator(IntegratorFunction): # ---------------------------- .. _AccumulatorIntegrator: Accumulates at a constant rate, that is either linear or exponential, depending on `rate - `. `function ` ignores `variable + `; `function ` ignores `variable ` and returns: .. math:: @@ -710,6 +709,7 @@ class SimpleIntegrator(IntegratorFunction): # --------------------------------- .. _SimpleIntegrator: + Acculuates at a rate determined by its `variable ` and `rate `; `function ` returns: .. math:: diff --git a/psyneulink/core/components/mechanisms/mechanism.py b/psyneulink/core/components/mechanisms/mechanism.py index 3170e94f876..4e37262018a 100644 --- a/psyneulink/core/components/mechanisms/mechanism.py +++ b/psyneulink/core/components/mechanisms/mechanism.py @@ -1246,8 +1246,8 @@ class Mechanism_Base(Mechanism): function : Function : default Linear specifies the function used to generate the Mechanism's `value `; - can be a PsyNeuLink `Function` or a `UserDefinedFunction`; it `value ` is used to determine - the shape of the `primary outputPort ` of the Mechanism. + can be a PsyNeuLink `Function` or a `UserDefinedFunction`; it `value ` is used to + determine the shape of the `primary outputPort ` of the Mechanism. output_ports : str, list or np.ndarray : default None specifies the OutputPorts for the Mechanism; if it is not specified, a single OutputPort is created @@ -1347,14 +1347,14 @@ class Mechanism_Base(Mechanism): name of a parameter of the function, and its value is the parameter's value. value : 2d np.array [array(float64)] - result of the Mechanism's `execute` method, which is usually (but not always) the `value ` - of it `function `. It is always at least a 2d np.array, with the - items of axis 0 corresponding to the values referenced by the corresponding `index ` - attribute of the Mechanism's `OutputPorts `. The first item is generally referenced by the - Mechanism's `primary OutputPort ` (i.e., the one in the its `output_port - ` attribute), as well as the first item of `output_values - `. The `value ` is `None` until the Mechanism - has been executed at least once. + result of the Mechanism's `execute` method, which is usually (but not always) the `value ` + of it `function ` (it is not if the Mechanism implements any auxiliary function(s) + after calling its primary function). It is always at least a 2d np.array, with the items of axis 0 corresponding + to the values referenced by the corresponding `index ` attribute of the Mechanism's + `OutputPorts `. The first item is generally referenced by the Mechanism's `primary OutputPort + ` (i.e., the one in the its `output_port ` attribute), as well + as the first item of `output_values `. The `value ` is + None until the Mechanism has been executed at least once. .. note:: the `value ` of a Mechanism is not necessarily the same as its diff --git a/psyneulink/core/components/mechanisms/processing/integratormechanism.py b/psyneulink/core/components/mechanisms/processing/integratormechanism.py index e87174cd023..4a5650c42f7 100644 --- a/psyneulink/core/components/mechanisms/processing/integratormechanism.py +++ b/psyneulink/core/components/mechanisms/processing/integratormechanism.py @@ -8,8 +8,8 @@ # ************************************** IntegratorMechanism ************************************************* -""" +""" Contents -------- @@ -19,7 +19,6 @@ * `IntegratorMechanism_Execution` * `IntegratorMechanism_Class_Reference` - .. _IntegratorMechanism_Overview: Overview diff --git a/psyneulink/core/components/mechanisms/processing/transfermechanism.py b/psyneulink/core/components/mechanisms/processing/transfermechanism.py index 32e72ee825c..9d3a2160245 100644 --- a/psyneulink/core/components/mechanisms/processing/transfermechanism.py +++ b/psyneulink/core/components/mechanisms/processing/transfermechanism.py @@ -317,7 +317,7 @@ ` is set to True a TransferMechanism will execute until it terminates, using a `convergence criterion `. However, the Mechanism's method of termination can be configured using its `termination_measure ` and `termination_comparison_op -` `Parameters` can be used to congifure other termination conditions. +` `Parameters` can be used to configure other termination conditions. There are two broad types of termination condition: convergence and boundary termination. .. _TransferMechanism_Convergence_Termination: @@ -546,7 +546,7 @@ ` (``0.3``) takes precendence, and is assigned as the value of the TransferMechanism's `integration_rate `, overriding the specified value (``0.1``). The same applies for the specification of the TransferMechanism's **initial_value** argument and the **initializer** -for its `integration_function `. Notice also that two values are reported for +for its `integrator_function `. Notice also that two values are reported for the Mechanism's `integration_rate `. This is because this is a `modulable Parameter `. The ``integration_rate.base`` is the one that is assigned; ``integration_rate.modulated`` reports the value that was actually used when the Mechanism was last executed; diff --git a/psyneulink/core/globals/keywords.py b/psyneulink/core/globals/keywords.py index 7543fda313d..99e6e77f80e 100644 --- a/psyneulink/core/globals/keywords.py +++ b/psyneulink/core/globals/keywords.py @@ -24,25 +24,28 @@ # that are the ones actually used by the code. __all__ = [ - 'ACCUMULATOR_INTEGRATOR', 'ACCUMULATOR_INTEGRATOR_FUNCTION', - 'ADAPTIVE', 'ADAPTIVE_INTEGRATOR_FUNCTION', 'ADAPTIVE_MECHANISM', 'ADD_INPUT_PORT', 'ADD_OUTPUT_PORT', 'ADDITIVE', - 'ADDITIVE_PARAM', 'AFTER', 'ALL', 'ALLOCATION_SAMPLES', 'ALLOW_PROBES', 'ANGLE', 'ANGLE_FUNCTION','ANY', + 'ACCELERATING_TIMER_FUNCTION', 'ACCUMULATOR_INTEGRATOR', 'ACCUMULATOR_INTEGRATOR_FUNCTION', + 'ADAPTIVE', 'ADAPTIVE_INTEGRATOR_FUNCTION', 'ADAPTIVE_MECHANISM', + 'ADD_INPUT_PORT', 'ADD_OUTPUT_PORT', 'ADDITIVE', 'ADDITIVE_PARAM', + 'AFTER', 'ALL', 'ALLOCATION_SAMPLES', 'ALLOW_PROBES', 'ANGLE', 'ANGLE_FUNCTION','ANY', 'ARG_MAX', 'ARG_MAX_ABS', 'ARG_MAX_INDICATOR', 'ARG_MAX_ABS_INDICATOR', 'ARG_MIN', 'ARG_MIN_ABS', 'ARG_MIN_INDICATOR', 'ARG_MIN_ABS_INDICATOR', - 'ARGUMENT_THERAPY_FUNCTION', 'ARRANGEMENT', 'ASSERT', 'ASSIGN', 'ASSIGN_VALUE', 'AUTO','AUTO_ASSIGN_MATRIX', - 'AUTO_ASSOCIATIVE_PROJECTION', 'HAS_INITIALIZERS', 'AUTOASSOCIATIVE_LEARNING_MECHANISM', + 'ARGUMENT_THERAPY_FUNCTION', 'ARRANGEMENT', 'ASSERT', 'ASSIGN', 'ASSIGN_VALUE', 'ASYMPTOTIC_TIMER_FUNCTION', + 'AUTO','AUTO_ASSIGN_MATRIX', 'AUTO_ASSOCIATIVE_PROJECTION', 'AUTOASSOCIATIVE_LEARNING_MECHANISM', 'AUTODIFF_COMPOSITION', 'AUTODIFF_RESULTS', 'BACKPROPAGATION_FUNCTION', 'BINOMIAL_DISTORT_FUNCTION', 'BEFORE', 'BETA', 'BIAS', 'BOLD', 'BOTH', 'BOUNDS', 'BUFFER_FUNCTION', - 'CHANGED', 'CLAMP_INPUT', 'COMBINATION_FUNCTION_TYPE', 'COMBINE', 'COMBINE_MEANS_FUNCTION', - 'COMBINE_OUTCOME_AND_COST_FUNCTION', 'COMMAND_LINE', 'comparison_operators', 'COMPARATOR_MECHANISM', 'COMPONENT', - 'COMPONENT_INIT', 'COMPONENT_PREFERENCE_SET', 'COMPOSITION', 'COMPOSITION_FUNCTION_APPROXIMATOR', - 'COMPOSITION_INTERFACE_MECHANISM', 'CONCATENATE', 'CONCATENATE_FUNCTION', 'CONDITION', 'CONDITIONS', 'CONSTANT', + 'CHANGED', 'CLAMP_INPUT', 'TIMER_MECHANISM', 'COMBINATION_FUNCTION_TYPE', + 'COMBINE', 'COMBINE_MEANS_FUNCTION', 'COMBINE_OUTCOME_AND_COST_FUNCTION', 'COMMAND_LINE', + 'comparison_operators', 'COMPARATOR_MECHANISM', 'COMPONENT', 'COMPONENT_INIT', 'COMPONENT_PREFERENCE_SET', + 'COMPOSITION', 'COMPOSITION_FUNCTION_APPROXIMATOR', 'COMPOSITION_INTERFACE_MECHANISM', + 'CONCATENATE', 'CONCATENATE_FUNCTION', 'CONDITION', 'CONDITIONS', 'CONSTANT', 'ContentAddressableMemory_FUNCTION', 'CONTEXT', 'CONTROL', 'CONTROL_MECHANISM', 'CONTROL_PATHWAY', 'CONTROL_PROJECTION', 'CONTROL_PROJECTION_PARAMS', 'CONTROL_PROJECTIONS', 'CONTROL_SIGNAL', 'CONTROL_SIGNAL_SPECS', 'CONTROL_SIGNALS', 'CONTROLLED_PARAMS', 'CONTROLLER', 'CONTROLLER_OBJECTIVE', 'CORRELATION', 'CPU', 'COSINE', 'COSINE_SIMILARITY', 'COST_FUNCTION', 'COUNT', 'CROSS_ENTROPY', 'CURRENT_EXECUTION_TIME', 'CUSTOM_FUNCTION', 'CUDA', 'CYCLE', - 'DDM_MECHANISM', 'DECAY', 'DEFAULT', 'DEFAULT_CONTROL_MECHANISM', 'DEFAULT_INPUT', 'DEFAULT_MATRIX', + 'DDM_MECHANISM', 'DECAY', 'DECELERATING_TIMER_FUNCTION', + 'DEFAULT', 'DEFAULT_CONTROL_MECHANISM', 'DEFAULT_INPUT', 'DEFAULT_MATRIX', 'DEFAULT_PREFERENCE_SET_OWNER', 'DEFAULT_PROCESSING_MECHANISM', 'DEFAULT_VARIABLE', 'DEFERRED_ASSIGNMENT', 'DEFERRED_DEFAULT_NAME', 'DEFERRED_INITIALIZATION', 'DETERMINISTIC', 'DICT', 'DictionaryMemory_FUNCTION', 'DIFFERENCE', 'DIFFERENCE', @@ -50,13 +53,14 @@ 'DIST_FUNCTION_TYPE', 'DIST_MEAN', 'DIST_SHAPE', 'DISTANCE_FUNCTION', 'DISTANCE_METRICS', 'DISTRIBUTION_FUNCTION_TYPE', 'DIVISION', 'DOT_PRODUCT', 'DRIFT_DIFFUSION_INTEGRATOR_FUNCTION', 'DRIFT_ON_A_SPHERE_INTEGRATOR_FUNCTION', 'DROPOUT_FUNCTION', - 'DUAL_ADAPTIVE_INTEGRATOR_FUNCTION', + 'DUAL_ADAPTIVE_INTEGRATOR_FUNCTION', 'DURATION', 'EFFERENTS', 'EID_SIMULATION', 'EID_FROZEN', 'EITHER', 'ENABLE_CONTROLLER', 'ENABLED', 'ENERGY', 'ENTROPY', - 'EM_COMPOSITION', 'EM_STORAGE_FUNCTION', 'EM_STORAGE_MECHANISM', 'EPISODIC_MEMORY_MECHANISM', 'EPOCH', 'EPOCHS', + 'EM_COMPOSITION', 'EM_STORAGE_FUNCTION', 'EM_STORAGE_MECHANISM', 'END', + 'EPISODIC_MEMORY_MECHANISM', 'EPOCH', 'EPOCHS', 'EQUAL', 'ERROR_DERIVATIVE_FUNCTION', 'EUCLIDEAN', 'EVC_MECHANISM', 'EVC_SIMULATION', 'EXAMPLE_FUNCTION_TYPE', 'EXECUTE_UNTIL_FINISHED', 'EXECUTING', - 'EXECUTION', 'EXECUTION_COUNT', 'EXECUTION_ID', 'EXECUTION_MODE', 'EXECUTION_PHASE', - 'EXPONENTIAL', 'EXPONENT', 'EXPONENTIAL_DIST_FUNCTION', 'EXPONENTIAL_FUNCTION', 'EXPONENTS', + 'EXECUTION', 'EXECUTION_COUNT', 'EXECUTION_ID', 'EXECUTION_MODE', 'EXECUTION_PHASE', 'EXPONENT', 'EXPONENTS', + 'EXPONENTIAL', 'EXPONENTIAL_DIST_FUNCTION', 'EXPONENTIAL_FUNCTION', 'FEEDBACK', 'FITZHUGHNAGUMO_INTEGRATOR_FUNCTION', 'FINAL', 'FLAGS', 'FULL', 'FULL_CONNECTIVITY_MATRIX', 'FUNCTION', 'FUNCTIONS', 'FUNCTION_COMPONENT_CATEGORY','FUNCTION_CHECK_ARGS', 'FUNCTION_OUTPUT_TYPE', 'FUNCTION_OUTPUT_TYPE_CONVERSION', 'FUNCTION_PARAMS', @@ -64,9 +68,10 @@ 'GATING_PROJECTION_PARAMS', 'GATING_PROJECTIONS', 'GATING_SIGNAL', 'GATING_SIGNAL_SPECS', 'GATING_SIGNALS', 'GAUSSIAN', 'GAUSSIAN_FUNCTION', 'GILZENRAT_INTEGRATOR_FUNCTION', 'GREATER_THAN', 'GREATER_THAN_OR_EQUAL', 'GRADIENT_OPTIMIZATION_FUNCTION', 'GRID_SEARCH_FUNCTION', - 'HARD_CLAMP', 'HEBBIAN_FUNCTION', 'HETERO', 'HIGH', 'HOLLOW_MATRIX', 'IDENTITY_MATRIX', 'INCREMENT', 'INDEX', + 'HARD_CLAMP', 'HAS_INITIALIZERS', 'HEBBIAN_FUNCTION', 'HETERO', 'HIGH', 'HOLLOW_MATRIX', + 'IDENTITY_MATRIX', 'INCREMENT', 'INDEX', 'INIT_EXECUTE_METHOD_ONLY', 'INIT_FULL_EXECUTE_METHOD', 'INIT_FUNCTION_METHOD_ONLY', - 'INITIALIZE', 'INITIALIZED', 'INITIALIZER', 'INITIALIZE_CYCLE', 'INITIALIZE_CYCLE_VALUES', + 'INITIAL', 'INITIALIZE', 'INITIALIZED', 'INITIALIZER', 'INITIALIZE_CYCLE', 'INITIALIZE_CYCLE_VALUES', 'INITIALIZING', 'INITIALIZATION', 'INITIALIZATION_STATUS', 'INPUT', 'INPUTS', 'INPUT_CIM_NAME', 'INPUT_LABELS_DICT', 'INPUT_PORT', 'INPUT_PORTS', 'INPUT_PORT_PARAMS', 'INPUT_PORT_VARIABLES', 'INPUTS_DIM', 'INSET', 'CURRENT_VALUE', 'INTEGRATION_TYPE', @@ -77,7 +82,8 @@ 'LEARNING', 'LEARNING_FUNCTION', 'LEARNING_FUNCTION_TYPE', 'LEARNING_OBJECTIVE', 'LEARNING_MECHANISM', 'LEARNING_MECHANISMS', 'LEARNING_PATHWAY', 'LEARNING_PROJECTION', 'LEARNING_PROJECTION_PARAMS', 'LEARNING_RATE', 'LEARNING_SCALE', 'LEARNING_SCALE_LITERALS', 'LEARNING_SCALE_NAMES', 'LEARNING_SIGNAL', 'LEARNING_SIGNAL_SPECS', - 'LEARNING_SIGNALS', 'LESS_THAN', 'LESS_THAN_OR_EQUAL', 'LINEAR', 'LINEAR_COMBINATION_FUNCTION', 'LINEAR_FUNCTION', + 'LEARNING_SIGNALS', 'LESS_THAN', 'LESS_THAN_OR_EQUAL', + 'LINEAR', 'LINEAR_COMBINATION_FUNCTION', 'LINEAR_FUNCTION', 'LINEAR_TIMER_FUNCTION', 'LOG_ENTRIES', 'LOGISTIC_FUNCTION', 'Loss', 'LOSSES', 'LOW', 'LVOC_CONTROL_MECHANISM', 'MAPPING_PROJECTION', 'MAPPING_PROJECTION_PARAMS', 'MASKED_MAPPING_PROJECTION', 'MATRIX', 'MATRIX_KEYWORD_NAMES', 'MATRIX_KEYWORD_SET', 'MATRIX_KEYWORD_VALUES', 'MATRIX_KEYWORDS', @@ -125,12 +131,12 @@ 'SAMPLE', 'SAVE_ALL_VALUES_AND_POLICIES', 'SCALAR', 'SCALE', 'SCHEDULER', 'SELF', 'SENDER', 'SEPARATE', 'SEPARATOR_BAR', 'SHADOW_INPUT_NAME', 'SHADOW_INPUTS', 'SIMPLE', 'SIMPLE_INTEGRATOR_FUNCTION', 'SIMULATIONS', 'SINGLE', 'SINGLETON', 'INPUT_SHAPES', 'SLOPE', 'SOFT_CLAMP', 'SOFTMAX_FUNCTION', 'SOURCE', 'STABILITY_FUNCTION', - 'STANDARD_ARGS', 'STANDARD_DEVIATION', 'STANDARD_OUTPUT_PORTS', 'STORE', 'SUBTRACTION', 'SUM', + 'STANDARD_ARGS', 'STANDARD_DEVIATION', 'STANDARD_OUTPUT_PORTS', 'START', 'STORE', 'SUBTRACTION', 'SUM', 'TARGET', 'TARGET_MECHANISM', 'TARGET_LABELS_DICT', 'TERMINAL', 'TARGETS', 'TERMINATION_MEASURE', 'TERMINATION_THRESHOLD', 'TERMINATION_COMPARISION_OP', 'TERSE', 'TEXT', 'THRESHOLD', - 'TIME', 'TIME_STEP_SIZE', 'TIME_STEPS_DIM', 'TRAINED_OUTPUTS', 'TRAINING_SET', - 'TRANSFER_FUNCTION_TYPE', 'TRANSFER_MECHANISM', 'TRANSFER_WITH_COSTS_FUNCTION', - 'TRIAL', 'TRIALS_DIM', + 'TIME', 'TIME_STEP_SIZE', 'TIME_STEPS_DIM', 'TIMER_FUNCTION', 'TIMER_FUNCTION_TYPE', 'TOLERANCE', + 'TRAINED_OUTPUTS', 'TRAINING_SET', 'TRAJECTORY', + 'TRANSFER_FUNCTION_TYPE', 'TRANSFER_MECHANISM', 'TRANSFER_WITH_COSTS_FUNCTION', 'TRIAL', 'TRIALS_DIM', 'UNCHANGED', 'UNIFORM_DIST_FUNCTION', 'UPDATE', 'USER_DEFINED_FUNCTION', 'USER_DEFINED_FUNCTION_TYPE', 'VALUES', 'VALIDATE', 'VALIDATION', 'VALUE', 'VALUE_ASSIGNMENT', 'VALUE_FUNCTION', 'VARIABLE', 'VARIANCE', 'VECTOR', 'WALD_DIST_FUNCTION', 'WEIGHT', 'WEIGHTS', 'X_0', 'ZEROS_MATRIX', 'SHARED_COMPONENT_TYPES', @@ -492,6 +498,7 @@ class Loss(Enum): EXECUTION_PHASE = 'execution_phase' EXECUTION_MODE = 'execution_mode' SOURCE = 'source' +INITIAL = 'initial' INITIALIZE = "initialize" # Used as instruction to some methods INITIALIZING = " INITIALIZING " # Used as status and context for Log INITIALIZED = " INITIALIZED " # Used as status @@ -515,8 +522,12 @@ class Loss(Enum): RANDOM = 'random' FIRST= 'first' LAST = 'last' +FINAL = 'final' BEFORE = 'before' AFTER = 'after' +START = 'start' +END = 'end' +DURATION = 'duration' LOW = 'low' HIGH = 'high' MAX = 'max' @@ -574,7 +585,6 @@ class Loss(Enum): EXECUTION = 'EXECUTION' PROCESSING = 'PROCESSING' VALUE_ASSIGNMENT = 'VALUE_ASSIGNMENT' -FINAL = 'FINAL' #endregion @@ -711,6 +721,7 @@ class Loss(Enum): MEMORY_FUNCTION_TYPE = "MEMORY FUNCTION TYPE" INTEGRATOR_FUNCTION_TYPE = "INTEGRATOR FUNCTION TYPE" TRANSFER_FUNCTION_TYPE = "TRANSFER FUNCTION TYPE" +TIMER_FUNCTION_TYPE = "TIMER FUNCTION TYPE" LEABRA_FUNCTION_TYPE = "LEABRA FUNCTION TYPE" DISTRIBUTION_FUNCTION_TYPE = "DISTRIBUTION FUNCTION TYPE" OBJECTIVE_FUNCTION_TYPE = "OBJECTIVE FUNCTION TYPE" @@ -745,6 +756,7 @@ class Loss(Enum): INTEGRATOR_MECHANISM = "IntegratorMechanism" DDM_MECHANISM = "DDM" EPISODIC_MEMORY_MECHANISM = "EpisodicMemoryMechanism" +TIMER_MECHANISM = 'TimerMechanism' COMPOSITION_INTERFACE_MECHANISM = "CompositionInterfaceMechanism" PROCESSING_MECHANISM = "ProcessingMechanism" @@ -765,6 +777,7 @@ class Loss(Enum): LINEAR_FUNCTION = "Linear Function" LEABRA_FUNCTION = "Leabra Function" EXPONENTIAL_FUNCTION = "Exponential Function" +LOGARITHMIC_FUNCTION = "Logarithmic Function" LOGISTIC_FUNCTION = "Logistic Function" TANH_FUNCTION = "Tanh Function" RELU_FUNCTION = "ReLU Function" @@ -776,6 +789,13 @@ class Loss(Enum): SOFTMAX_FUNCTION = 'SoftMax Function' TRANSFER_WITH_COSTS_FUNCTION = "TransferWithCosts Function" +# TimerFunctions: +TIMER_FUNCTION = "TimerFunction" +LINEAR_TIMER_FUNCTION = "LinearTimer Function" +ACCELERATING_TIMER_FUNCTION = "AcceleratingTimer Function" +DECELERATING_TIMER_FUNCTION = "DeceleratingTimer Function" +ASYMPTOTIC_TIMER_FUNCTION = "AsymptoticTimer Function" + # SelectionFunctions: ONE_HOT_FUNCTION = "OneHot Function" @@ -1079,7 +1099,6 @@ class Loss(Enum): #region ---------------------------------------------- FUNCTION --------------------------------------------------- - # General ------------------------------------------------ FUNCTION_PARAMETER_PREFIX = 'func_' @@ -1185,14 +1204,19 @@ class Loss(Enum): OPERATION = "operation" OFFSET = "offset" NORMALIZE = "normalize" +TOLERANCE = "tolerance" REWARD = 'reward' NETWORK = 'network' GAMMA = 'gamma' +TRAJECTORY = 'trajectory' + #endregion +#region ---------------------------------------------- MODEL_SPEC -------------------------------------------------- + # model spec keywords MODEL_SPEC_ID_TYPE = 'type' MODEL_SPEC_ID_PSYNEULINK = 'PNL' @@ -1218,3 +1242,5 @@ class Loss(Enum): MODEL_SPEC_ID_INPUT_PORT_COMBINATION_FUNCTION = 'input_combination_function' SHARED_COMPONENT_TYPES = 'shared_component_types' + +#endregion diff --git a/psyneulink/library/components/mechanisms/processing/integrator/__init__.py b/psyneulink/library/components/mechanisms/processing/integrator/__init__.py index 028a4806c80..9d0ba52592e 100644 --- a/psyneulink/library/components/mechanisms/processing/integrator/__init__.py +++ b/psyneulink/library/components/mechanisms/processing/integrator/__init__.py @@ -1,8 +1,11 @@ from . import ddm from . import episodicmemorymechanism +from . import timermechanism from .ddm import * from .episodicmemorymechanism import * +from .timermechanism import * __all__ = list(ddm.__all__) __all__.extend(episodicmemorymechanism.__all__) +__all__.extend(timermechanism.__all__) diff --git a/psyneulink/library/components/mechanisms/processing/integrator/collapsingboundmechanism.py b/psyneulink/library/components/mechanisms/processing/integrator/collapsingboundmechanism.py deleted file mode 100644 index e87174cd023..00000000000 --- a/psyneulink/library/components/mechanisms/processing/integrator/collapsingboundmechanism.py +++ /dev/null @@ -1,300 +0,0 @@ -# Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. You may obtain a copy of the License at: -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed -# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and limitations under the License. - - -# ************************************** IntegratorMechanism ************************************************* - -""" - -Contents --------- - - * `IntegratorMechanism_Overview` - * `IntegratorMechanism_Creation` - * `IntegratorMechanism_Structure` - * `IntegratorMechanism_Execution` - * `IntegratorMechanism_Class_Reference` - - -.. _IntegratorMechanism_Overview: - -Overview --------- - -An IntegratorMechanism integrates its input, possibly based on its prior values. The input can be a single -scalar value or an array of scalars (list or 1d np.array). If it is a list or array, then each value is -independently integrated. The default function (`IntegratorFunction`) can be parametrized to implement either a simple -increment rate, additive accumulator, or an (exponentially weighted) time-averaging of its input. It can also be -assigned a custom function. - -.. _IntegratorMechanism_Creation: - -Creating an IntegratorMechanism -------------------------------- - -An IntegratorMechanism can be created directly by calling its constructor, or using the `mechanism` command and -specifying *INTEGRATOR_MECHANISM* as its **mech_spec** argument. Its function is specified in the **function** -argument, which can be parametrized by calling its constructor with parameter values:: - - >>> import psyneulink as pnl - >>> my_time_averaging_mechanism = pnl.IntegratorMechanism(function=pnl.AdaptiveIntegrator(rate=0.5)) - -The **default_variable** argument specifies the format of its input (i.e., whether it is a single scalar or an -array), as well as the value to use if none is provided when Mechanism is executed. Alternatively, the **input_shapes** -argument can be used to specify the length of the array, in which case it will be initialized with all zeros. - -.. _IntegratorMechanism_Structure: - -Structure ---------- - -An IntegratorMechanism has a single `InputPort`, the `value ` of which is -used as the `variable ` for its `function `. -The default for `function ` is `AdaptiveIntegrator(rate=0.5)`. However, -a custom function can also be specified, so long as it takes a numeric value, or a list or np.ndarray of numeric -values as its input, and returns a value of the same type and format. The Mechanism has a single `OutputPort`, -the `value ` of which is assigned the result of the call to the Mechanism's -`function `. - -.. _IntegratorMechanism_Execution: - -Execution ---------- - -When an IntegratorMechanism is executed, it carries out the specified integration, and assigns the result to the -`value ` of its `primary OutputPort `. For the default function -(`IntegratorFunction`), if the value specified for **default_variable** is a list or array, or **input_shapes** is greater -than 1, each element of the array is independently integrated. If its `rate ` parameter is a -single value, that rate is used for integrating each element. If the `rate ` parameter is a -list or array, then each element is used as the rate for the corresponding element of the input (in this case, `rate -` must be the same length as the value specified for **default_variable** or **input_shapes**). -Integration can be reset to the value of its `function `\\s `initializer by setting -its `reset ` parameter to a non-zero value, as described below. - -.. _IntegratorMechanism_Reset: - -*Resetting the IntegratorMechanism* -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -An IntegatorMechanism has a `modulable ` `reset ` parameter -that can be used to reset its value to the value of its `function `\\s `initializer -`. This also clears the `value ` `history `, -thus effectively setting the `previous_value ` of its `function -` to None. - -The `reset ` parameter can be used to reset the IntegratorMechanism under the control of a -`ControlMechanism`. This simplest way to do this is to specify the `reset ` parameter of -the IntgeratorMechanism in the **control** argument of the ControlMechanism's constructor, and to specify *OVERRIDE* -in its **modulation** argument, as in the following example:: - - >>> my_integrator = IntegratorMechanism() - >>> ctl_mech = pnl.ControlMechanism(modulation=pnl.OVERRIDE, control=(pnl.RESET, my_integrator)) - -In this case, any non-zero value of the ControlMechanism's `ControlSignal` will reset the IntegratorMechanism. -*OVERRIDE* must be used as its `modulation ` parameter (instead of its default value -of *MULTIPLICATIVE*), so that the value of the ControlMechanism's `ControlSignal` is assigned directly to the -IntegratorMechanism's `reset ` parameter (otherwise, since the default of the `reset -` parameter is 0, the ControlSignal's value has no effect). An alternative is to specify -the **reset_default** agument in the IntegratorMechanism constructor with a non-zero value, and while allowing the -ControlMechanism to use its default value for `modulation ` (i.e., *MULTIPLICATIVE*):: - - >>> my_integrator = IntegratorMechanism(reset_default=1) - >>> ctl_mech = pnl.ControlMechanism(control=(pnl.RESET, my_integrator)) - -In this case, a ControlSignal with a zero value suppresses a reset by multiplying the `reset -` parameter by 0, whereas a ControlSignal with a non-zero value multiples the `reset -` parameter's non-zero default value, resulting in a non-zero value that elicits a reset. - -.. _IntegratorMechanism_Class_Reference: - -Class Reference ---------------- - -""" -from collections.abc import Iterable - -from beartype import beartype - -from psyneulink._typing import Optional, Union -import numpy as np - -from psyneulink.core.components.functions.function import Function -from psyneulink.core.components.functions.stateful.integratorfunctions import AdaptiveIntegrator -from psyneulink.core.components.mechanisms.processing.processingmechanism import ProcessingMechanism_Base -from psyneulink.core.components.mechanisms.mechanism import Mechanism, MechanismError -from psyneulink.core.globals.keywords import \ - DEFAULT_VARIABLE, INTEGRATOR_MECHANISM, VARIABLE, PREFERENCE_SET_NAME, RESET -from psyneulink.core.globals.parameters import Parameter, check_user_specified -from psyneulink.core.globals.preferences.basepreferenceset import ValidPrefSet, REPORT_OUTPUT_PREF -from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel - -__all__ = [ - 'DEFAULT_RATE', 'IntegratorMechanism', 'IntegratorMechanismError' -] - -# IntegratorMechanism parameter keywords: -DEFAULT_RATE = 0.5 - -class IntegratorMechanismError(MechanismError): - pass - - -class IntegratorMechanism(ProcessingMechanism_Base): - """ - IntegratorMechanism( \ - function=AdaptiveIntegrator(rate=0.5)) - - Subclass of `ProcessingMechanism ` that integrates its input. - See `Mechanism ` for additional arguments and attributes. - - Arguments - --------- - - function : IntegratorFunction : default IntegratorFunction - specifies the function used to integrate the input. Must take a single numeric value, or a list or np.array - of values, and return one of the same form. - - reset_default : number, list or np.ndarray : default 0 - specifies the default value used for the `reset ` parameter. - - Attributes - ---------- - - reset : int, float or 1d array of length 1 : default 0 - if non-zero, the IntegratorMechanism's `reset ` method is called, which resets the - `value ` of the IntegratorMechanism to its initial value (see - `IntegratorMechanism_Reset` for additional details). - - """ - - componentType = INTEGRATOR_MECHANISM - - classPreferenceLevel = PreferenceLevel.TYPE - # These will override those specified in TYPE_DEFAULT_PREFERENCES - classPreferences = { - PREFERENCE_SET_NAME: 'IntegratorMechanismCustomClassPreferences', - REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)} - - class Parameters(ProcessingMechanism_Base.Parameters): - """ - Attributes - ---------- - - function - see `function ` - - :default value: `AdaptiveIntegrator`(initializer=numpy.array([0]), rate=0.5) - :type: `Function` - - reset - see `reset ` - - :default value: None - :type: 'list or np.ndarray' - """ - function = Parameter(AdaptiveIntegrator(rate=0.5), stateful=False, loggable=False) - reset = Parameter([0], modulable=True, constructor_argument='reset_default') - - # - @check_user_specified - @beartype - def __init__(self, - default_variable=None, - input_shapes=None, - input_ports:Optional[Union[list, dict]]=None, - function=None, - reset_default=0, - output_ports:Optional[Union[str, Iterable]]=None, - params=None, - name=None, - prefs: Optional[ValidPrefSet] = None, - **kwargs): - """Assign type-level preferences, default input value (SigmoidLayer_DEFAULT_BIAS) and call super.__init__ - """ - - super(IntegratorMechanism, self).__init__(default_variable=default_variable, - input_shapes=input_shapes, - function=function, - reset_default=reset_default, - params=params, - name=name, - prefs=prefs, - input_ports=input_ports, - output_ports=output_ports, - **kwargs) - - # IMPLEMENT: INITIALIZE LOG ENTRIES, NOW THAT ALL PARTS OF THE MECHANISM HAVE BEEN INSTANTIATED - - # def _parse_function_variable(self, variable, context=None, context=None): - # super()._parse_function_variable(variable, context, context) - - def _handle_default_variable(self, default_variable=None, input_shapes=None, input_ports=None, function=None, params=None): - """If any parameters with len>1 have been specified for the Mechanism's function, and Mechanism's - default_variable has not been specified, reshape Mechanism's variable to match function's, - but make sure function's has the same outer dimensionality as the Mechanism's - """ - - # Get variable for Mechanism - user_specified = False - if default_variable is not None: - variable = np.atleast_1d(default_variable) - user_specified = True - else: - variable = self.parameters.variable.default_value - user_specified = self.parameters.variable._user_specified - - # Only bother if an instantiated function was specified for the Mechanism - if isinstance(function, Function): - function_variable = function.parameters.variable.default_value - function_variable_len = function_variable.shape[-1] - variable_len = variable.shape[-1] - - # Raise error if: - # - the length of both Mechanism and function variable are greater than 1 and they don't match, or - # - the Mechanism's variable length is 1 and the function's is > 1 (in which case would like to assign - # shape of function's variable to that of Mechanism) but Mechanism's variable is user-specified. - if ((variable_len>1 and function_variable_len>1 and variable_len!=function_variable_len) or - (function_variable_len>1 and variable_len==1 and user_specified)): - raise IntegratorMechanismError(f"Shape of {repr(VARIABLE)} for function specified for {self.name} " - f"({function.name}: {function.variable.shape}) does not match " - f"the shape of the {repr(DEFAULT_VARIABLE)} specified for the " - f"{repr(Mechanism.__name__)}.") - - # If length of Mechanism's variable is 1 but the function's is longer, - # reshape Mechanism's inner dimension to match function - elif variable_len==1 and function_variable_len>1: - variable_shape = list(variable.shape) - variable_shape[-1] = function_variable.shape[-1] - # self.parameters.variable.default_value = np.zeros(tuple(variable_shape)) - variable = np.zeros(tuple(variable_shape)) - else: - variable = default_variable - else: - variable = default_variable - - # IMPLEMENTATON NOTE: - # Don't worry about case in which length of function's variable is 1 and Mechanism's is > 1 - # as the reshaping of the function's variable will be taken care of in _instantiate_function - - return super()._handle_default_variable(default_variable=variable, - input_shapes=input_shapes, - input_ports=input_ports, - function=function, - params=params) - - def _execute(self, variable=None, context=None, runtime_params=None, **kwargs): - """Override to check for call to reset by ControlSignal""" - # IMPLEMENTATION NOTE: - # This could be augmented to use reset parameter value as argument to reset() - # if it is the same shape an an initializer for the Mechanism - value = super()._execute(variable=variable, context=context, runtime_params=runtime_params, **kwargs) - # No need to reset during initialization (which will occur if **reset_default** != 0) - if not self.is_initializing: - if np.array(self._get_current_parameter_value(RESET,context)).squeeze(): - self.reset(context=context) - value = self.parameters.value._get(context).reshape(value.shape) - return value diff --git a/psyneulink/library/components/mechanisms/processing/integrator/timermechanism.py b/psyneulink/library/components/mechanisms/processing/integrator/timermechanism.py new file mode 100644 index 00000000000..de44b333d8b --- /dev/null +++ b/psyneulink/library/components/mechanisms/processing/integrator/timermechanism.py @@ -0,0 +1,412 @@ +# Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy of the License at: +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. + + +# ************************************** TimerMechanism ************************************************* + +""" + +Contents +-------- + + * `TimerMechanism_Overview` + * `TimerMechanism_Creation` + * `TimerMechanism_Structure` + * `TimerMechanism_Execution` + * `TimerMechanism_Examples` + * `TimerMechanism_Class_Reference` + + +.. _TimerMechanism_Overview: + +Overview +-------- + +A TimerMechanism is a type of `IntegratorMechanism` the `value ` of which begins at a specified +`start ` value, is changed monotonically each time it is executed, until it reaches a specified +`end ` value. The number of executions it takes to do so is determined by a combination of its +`duration ` and `increment ` parameters, and whether or not it +receives any input; and the path that its `value ` takes is determined by its `trajectory +` Function. It can be reset to its starting value by calling its `reset +` method, or by modulating its `reset ` Parameter with a `ControlSignal`. + +A TimerMechanism can be used to implement a variety of time-based processes, such as the collapse of a boundary over +time, or the rise of a value to a threshold. It can also be configured to execute multiple such processes in parallel, +each with its own starting and ending values, as well as direction, increment and input (on a given execution), all of +which use the same `trajectory ` Function. + +.. _TimerMechanism_Creation: + +Creating a TimerMechanism +------------------------- + +A TimerMechanism can be created directly by calling its constructor, or using the `mechanism` command and specifying +*TIMER_MECHANISM* as its **mech_spec** argument. It can be created with or without a source of input. By default, a +TimerMechanisms increments linearly, starting at 0, incrementing by 0.01 each time it is executed, and stopping when it +reaches 1. However, the shape, starting, ending and rate of increment can all be configured. The shape of the timer's +progression is specified by it **trajectory** argument, which must be a `TimerFunction` or an appropriately configured +`UserDefinedFunction` (see `below ` for details); the starting and ending `values +` of the timer are specified by its **start** and **end** arguments, respectively, and the ammount +it progresses each time the Mechanimsm is executed (in the absence of input) is specified by its **increment** argument. + +.. _TimerMechanism_Structure: + +Structure +--------- + +A TimerMechanism may or may not have a source of input, and has a single `OutputPort`. Its `function +` is an `SimpleIntegrator` `Function` that determines the input to the TimerMechanism's +`trajectory ` Function which, in turn, determines the `value ` of the +TimerMechanism. The TimerMechanism's `function ` is always a `SimpleIntegrator` that always +starts at 0 and, each time it is executed, increments the value of the TimerMechanism based either on its input or, if +it receives none, then its `increment ` Parameter (see `TimerMechanism_Execution` for +additional details). + + .. technical_note:: + The TimerMechanism's `function ` cannot be modified; its `rate + ` is set to the value specified for the TimerMechanism's `increment + ` Parameter, and its `initializer `, + `noise ` and `offset ` Parameters are fixed at 0 + +.. _TimerMechanism_Trajectory_Function: + +Its `trajectory ` Function must be either a `TimerFunction` or a `UserDefinedFunction` that +has **initial**, **final** and *duration** parameters, and takes a single numeric value, a list or an np.array as its +`variable `. + + .. technical_note:: + The `trajectory ` Function is an auxilliary function that takes the result of the + TimerMechanism's `function ` and transforms it to implement the specified trajectory + of the timer's `value `. The value of the `start ` parameter is + assigned as the `initial ` Parameter of its `trajectory ` + Function; its `end ` Parameter is assigned as the `final ` Parameter of + the `trajectory ` Function, and its `duration ` Parameter + is assigned as the `duration ` Parameter of the `trajectory `; + which is also used to set the TimerMechanism's `finished ` attribute. + +.. _TimerMechanism_Modulation: + +A TimerMechanism's `start `, `end ` and `increment ` +parameters can be `modulated ` by a `ControlMechanism`. + +.. _TimerMechanism_Execution: + +Execution +--------- + +When a TimerMechanism is executed, it advances its value by adding either its `input ` +or its `increment ` Parameter to the `previous_value ` +of its `function `, that is then passed to the `Function` specified by its `trajectory +` Parameter, the result of which is assigned to the `value ` of the +TimerMechansim's `primary OutputPort `. If its `default_variable ` +is a scalar or of length 1, or its **input_shapes** is specified as 1, the TimerMechanism generates a scalar value +as its output; otherwise, it generates an array of values, each element of which is independently advanced. If its +`increment ` Parameter is a single value, that is used for advancing all elements; if the +`increment ` Parameter is a list or array, then each element is used to increment the +corresponding element of the timer array. + +The TimerMechanism stops advancing after the `value ` of its `function ` +equals the TimerMechanism's `duration ` Parameter. If the TimerMechanism receives no input, +then it will stop advancing after the number of executions = `duration ` / `increment +`. If the TimerMechanism receives input, then the number of executions is determined by the +number of times it is executed, and the amount of input it receives on each execution. When the TimerMechanism stops +advancing, it sets its `finished ` attribute to `True`, and its `value ` +remains equal to its `end ` on any further executions, unless it is reset. If the TimerMechanism +is reset, its `value ` is set to its `start ` value. + + .. hint:: + A TimerMechanism's `finished ` attribute can be used together with a `Scheduler` + `Condition` to make the processing of other `Mechanisms ` contingent on the TimerMechanism + reaching its end value. + + .. note:: + The TimerMechanism's `finished ` attribute is not used to determine when it stops + advancing; rather, it is set to `True` when the TimerMechanism's `value ` reaches its + `end ` value. + +If a TimerMechanism continues to be executed after its `finished ` attribute is True, its +`value ` remains equal to its `end ` value. The TimerMechanism can be reset +to its `start ` value by setting its `reset ` Parameter to a non-zero value, +as described for a standard `IntegratorMechanism `. + +.. _TimerMechanism_Examples: + +Examples +-------- + +The following example creates a TimerMechanism with a linear decay from 1 to 0: + + >>> import psyneulink as pnl + >>> my_timer_mechanism = pnl.TimerMechanism(trajectory=LINEAR, start=1, end=0)) + + +.. _TimerMechanism_Class_Reference: + +Class Reference +--------------- + +""" +from collections.abc import Iterable + +from beartype import beartype + +from psyneulink._typing import Optional, Union +import numpy as np + +from psyneulink.core.components.functions.nonstateful.timerfunctions import TimerFunction, LinearTimer +from psyneulink.core.components.functions.stateful.integratorfunctions import IntegratorFunction, SimpleIntegrator +from psyneulink.core.components.mechanisms.processing.integratormechanism import IntegratorMechanism +from psyneulink.core.components.mechanisms.mechanism import MechanismError +from psyneulink.core.globals.keywords import TIMER_MECHANISM, PREFERENCE_SET_NAME, RESET, TRAJECTORY +from psyneulink.core.globals.parameters import Parameter, FunctionParameter, check_user_specified +from psyneulink.core.globals.preferences.basepreferenceset import ValidPrefSet, REPORT_OUTPUT_PREF +from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel + +__all__ = [ + 'DEFAULT_RATE', 'TimerMechanism', 'TimerMechanismError' +] + +# TimerMechanism parameter keywords: +DEFAULT_RATE = 1 + + +class TimerMechanismError(MechanismError): + pass + + +class TimerMechanism(IntegratorMechanism): + """ + TimerMechanism( \ + start=0, \ + increment=1, \ + end=1, \ + function=SimpleIntegrator(rate=1), \ + trajectory=LinearTimer, \ + duration=1) + + Subclass of `IntegratorMechanism` that advances its input until it reaches a specified value. + See `IntegratorMechanism ` for additional arguments and attributes. + + Arguments + --------- + + start : scalar, list or array : default 0 + specifies the starting `value ` of the timer; if a list or array, the length must be + the same as specified for **default_variable** or **input_shapes** (see `TimerMechanism_Execution` for + additional details). + + increment : scalar, list or array : default 1 + specifies the amount by which the `previous_value ` of the + TimerMechanism's `function ` is incremented each time the TimerMechanism is + executed in the absence of input; if a list or array, the length must be the same as specified for + **default_variable** or **input_shapes** (see `TimerMechanism_Execution` for additional details). + + function : IntegratorFunction : default SimpleIntegrator(rate=1) + specifies the function used to increment the input to the TimerMechanism's `trajectory + ` Function; must take a single numeric value, or a list or np.array + of values, and return one of the same shape. + + trajectory : TransferFunction or UserDefinedFunction : default LinearTimer + specifies the shape of the timer's trajectory; must be a supported `TransferFunction` + (see `trajectory function `) + + end : scalar, list or array : default 1 + specifies the value of its `trajectory ` function at which the timer stops advancing; + if a list or array, the length must be the same as specified for **default_variable** or **input_shapes** (see + `TimerMechanism_Execution` for additional details). + + duration : scalar, list or array : default 1 + specifies the value of its `variable ` at which the timer stops advancing; if a list + or array, the length must be the same as specified for **default_variable** or **input_shapes** (see + `TimerMechanism_Execution` for additional details). + + reset_default : number, list or np.ndarray : default 0 + specifies the default value used for the `reset ` parameter. + + Attributes + ---------- + + start : scalar, list or array + determines the starting `value ` of the timer; assigned as the `start` Parameter of the + TimerMechanism's `trajectory ` if it has one; otherwise, it is computed if possible + or, an error is raised (see `TimerMechanism_Execution` for additional details). + + increment : scalar, list or array + determines the amount by which the `previous_value ` of the + TimerMechanism's `function ` is incremented each time the TimerMechanism is + executed in the absence of input; assigned as the `rate ` of the TimerMechanism's + `function ` (see `TimerMechanism_Execution` for additional details). + + function : IntegratorFunction + determines the function used to advance the input to the TimerMechanism's `trajectorytrajectory + ` Function; if the TimerMechanism receives an external input, that is + used to advance the timer; otherwise, the `increment ` Parameter is used. + + trajectory : TimerFunction or UserDefinedFunction + determines the `Function` used to transform the ouput of the TimerMechanism's `function + ` to generate its output; this determines the shape of the trajectory + of the TimerMechanism's `value `. + + end : scalar, list or array : default 1 + determines the value of its `trajectory ` function at which the timer stops + advancing; if a list or array, the length must be the same as specified for **default_variable** or + **input_shapes** (see `TimerMechanism_Execution` for additional details). + + duration : scalar, list or array + determines the value at which the timer stops advancing, after which it sets its `finished + ` Parameter to `True` and `value ` remains equal to + its `duration ` value. + + finished : bool + indicates whether the TimerMechanism has reached its `duration ` value + (see `TimerMechanism_Execution` for additional details). + + reset : int, float or 1d array of length 1 : default 0 + if non-zero, the TimerMechanism's `reset ` method is called, which resets the + `value ` of the TimerMechanism to its initial value (see + `TimerMechanism_Reset` for additional details). + + """ + componentType = TIMER_MECHANISM + + classPreferenceLevel = PreferenceLevel.TYPE + # These will override those specified in TYPE_DEFAULT_PREFERENCES + classPreferences = { + PREFERENCE_SET_NAME: 'TimerMechanismCustomClassPreferences', + REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)} + + class Parameters(IntegratorMechanism.Parameters): + """ + Attributes + ---------- + + finished + see `finished ` + + :default value: False + :type: `bool` + + duration + see `duration ` + + :default value: 1 + :type: `float` + + function + see `function ` + + :default value: `SimpleIntegrator`(initializer=numpy.array([0]), rate=1) + :type: `Function` + + increment + see `increment ` + + :default value: 1 + :type: `float` + + start + see `start ` + + :default value: 0 + :type: `float` + + end + see `end ` + + :default value: 1 + :type: `float` + + trajectory + see `trajectory ` + + :default value: `LinearTimer` + :type: `Function` + """ + function = Parameter(SimpleIntegrator, stateful=False, loggable=False) + trajectory = Parameter(LinearTimer, stateful=False, loggable=False) + start = FunctionParameter(0, function_name='trajectory', function_parameter_name='initial', primary=True) + increment = FunctionParameter(.01, function_name='function', function_parameter_name='rate', primary=True) + end = FunctionParameter(1, function_name='trajectory', function_parameter_name='final', primary=True ) + duration = FunctionParameter(1, function_name='trajectory', function_parameter_name='duration', primary=True ) + finished = Parameter(False, stateful=True, loggable=True) + + def _validate_trajectory(self, trajectory): + if not (isinstance(trajectory, TimerFunction) + or (isinstance(trajectory, type) and issubclass(trajectory, TimerFunction))): + return f'must be a TimerFunction' + + def _validate_start(self, start): + if not isinstance(start, (int, float, list, np.ndarray)): + return f'must be an int, float or a list or array of either' + + def _validate_increment(self, increment): + if not isinstance(increment, (int, float, list, np.ndarray)): + return f'must be an int, float or a list or array of either' + + def _validate_end(self, end): + if not isinstance(end, (int, float, list, np.ndarray)): + return f'must be an int, float or a list or array of either' + + def _validate_durat(self, duration): + if not isinstance(duration, (int, float, list, np.ndarray)): + return f'must be an int, float or a list or array of either' + + @check_user_specified + @beartype + def __init__(self, + input_shapes=None, + start=None, + increment=None, + function=None, + trajectory=None, + end=None, + duration=None, + params=None, + name=None, + prefs: Optional[ValidPrefSet] = None, + **kwargs): + """Assign type-level preferences, default input value (SigmoidLayer_DEFAULT_BIAS) and call super.__init__ + """ + + super(TimerMechanism, self).__init__(default_variable=1, + input_shapes=input_shapes, + start=start, + increment=increment, + function=function, + trajectory=trajectory, + end=end, + duration=duration, + finished=False, + params=params, + name=name, + prefs=prefs, + **kwargs) + + def _execute(self, variable=None, context=None, runtime_params=None, **kwargs): + """Override to check for call to reset by ControlSignal""" + if not self.is_initializing and self.parameters.finished.get(context): + return self.trajectory(self.function.parameters.previous_value._get(context)) + + x = super()._execute(variable=variable, context=context, runtime_params=runtime_params, **kwargs) + y = self.trajectory(x) + + # No need to reset during initialization (which will occur if **reset_default** != 0) + if not self.is_initializing: + + if np.allclose(x,self.parameters.duration.get(context)): + self.parameters.finished._set(True, context) + + # If reset Parameter has been set, reset() TimerMechanism to start value + if np.array(self._get_current_parameter_value(RESET,context)).squeeze(): + self.reset(self.parameters.start._get(context), context=context) + y = self.parameters.value._get(context).reshape(y.shape) + + return y + + def reset(self, *args, force=False, context=None, **kwargs): + super().reset(*args, force=force, context=context, **kwargs) + self.finished = False diff --git a/psyneulink/library/components/mechanisms/processing/transfer/recurrenttransfermechanism.py b/psyneulink/library/components/mechanisms/processing/transfer/recurrenttransfermechanism.py index b05f2d77859..8d66dbf7a3b 100644 --- a/psyneulink/library/components/mechanisms/processing/transfer/recurrenttransfermechanism.py +++ b/psyneulink/library/components/mechanisms/processing/transfer/recurrenttransfermechanism.py @@ -6,7 +6,7 @@ # See the License for the specific language governing permissions and limitations under the License. # NOTES: -# * NOW THAT NOISE AND INTEGRATION_RATE ARE PROPRETIES THAT DIRECTLY REFERERENCE integrator_function, +# * NOW THAT NOISE AND INTEGRATION_RATE ARE PROPERTIES THAT DIRECTLY REFERERENCE integrator_function, # SHOULD THEY NOW BE VALIDATED ONLY THERE (AND NOT IN TransferMechanism)?? # * ARE THOSE THE ONLY TWO integrator PARAMS THAT SHOULD BE PROPERTIES?? diff --git a/tests/mechanisms/test_timer_mechanism.py b/tests/mechanisms/test_timer_mechanism.py new file mode 100644 index 00000000000..0cedc1e88ae --- /dev/null +++ b/tests/mechanisms/test_timer_mechanism.py @@ -0,0 +1,73 @@ +import pytest +import numpy as np +import psyneulink as pnl +from psyneulink.library.components.mechanisms.processing.integrator.timermechanism import TimerMechanism +from psyneulink.core.components.functions.nonstateful.timerfunctions import ( + LinearTimer, AcceleratingTimer, DeceleratingTimer, AsymptoticTimer) + + +class TestTimer: + + arg_names = ('timer_function', 'start','end','duration','increment','expected_no_input', 'expected_w_input') + # Note: the values below have been independently verified against those in the Desmos graphing calculator + timer_test_data = [ + (LinearTimer, .1, .4, .3, .1, (.2, .3, .4, .4), + (.35, .45)), + (LinearTimer, -1, -4, 3, 1, (-2, -3, -4, -4), + (-3.5, -4.5)), + (AcceleratingTimer, .1, .3, .3, .1, (0.13422781, 0.19553751, .3, .3), + (0.24108029, 0.37565076)), + (AcceleratingTimer, -1, -3, 3, 1, (-1.3422781, -1.9553751, -3, -3), + (-2.41080287, -3.7565076)), + (DeceleratingTimer, -.1, -.3, .3, .1, (-0.17075676534276596, -0.2373414308173889, + -0.30000000000000004, -0.30000000000000004), + (-0.26914668, -0.32992988)), + (DeceleratingTimer, 1, 3, 3, 1, (1.919916176948096, 2.5577504296925917, 3.0, 3.0), + (2.79906304, 3.16731682)), + (AsymptoticTimer, .1, .3, .3, .1, (0.25691130619936233, 0.29071682233277443, 0.298, 0.298, 0.298), + (0.29569113, 0.29907168)), + (AsymptoticTimer, -1, -3, 3, 1, (-2.5691130619936233, -2.9071682233277443, -2.98, -2.98, -2.98), + (-2.95691131, -2.9907168)) + ] + + @pytest.mark.mechanism + @pytest.mark.integrator_mechanism + @pytest.mark.parametrize(arg_names, + timer_test_data, + ids=[x[0] for x in timer_test_data] + ) + def test_timer_functions(self, timer_function, start, end, duration, increment, + expected_no_input, expected_w_input): + timer = TimerMechanism(trajectory=timer_function, start=start, + end=end, + increment=increment, + duration=duration) + # Test without input: + for i in range(len(expected_no_input) - 1): + np.testing.assert_allclose(timer.execute(), expected_no_input[i]) + + # Test with input on 2nd execution: + assert timer.value != 0 + timer.reset() + assert timer.value == 0 + np.testing.assert_allclose(timer.execute(), expected_no_input[0]) + np.testing.assert_allclose(timer.execute(1.5), expected_w_input[0]) + np.testing.assert_allclose(timer.execute(), expected_w_input[1]) + + def test_timer_in_composition(self): + for with_input in {True, False}: + input_mech = pnl.ProcessingMechanism() + if with_input: + timer = TimerMechanism(trajectory=LinearTimer, start=1, end=3, increment=1, duration=2) + inputs = {input_mech: 2.5, timer: 1.7} + expected = [[2.5], [2.7]] + else: + timer = TimerMechanism(trajectory=LinearTimer, + start=1, end=3, increment=1, duration=2, + input_ports={pnl.NAME: 'STEPS', + pnl.DEFAULT_INPUT: pnl.DEFAULT_VARIABLE}) + inputs = {input_mech: 2.5} + expected = [[2.5], [2]] + comp = pnl.Composition(nodes=[input_mech, timer]) + result = comp.run(inputs=inputs) + np.testing.assert_allclose(result, expected)