Skip to content

Commit

Permalink
Add simple solver for ConsPrefShock
Browse files Browse the repository at this point in the history
This is where we really start to see that subfunctions for solvers really should be used, as this is almost entirely a copy-paste from ConsIndShock. The idea behind the OO solver encapsulation wasn't bad, but the resulting code was horrible.
  • Loading branch information
mnwhite committed Mar 11, 2024
1 parent 936930a commit fb10736
Show file tree
Hide file tree
Showing 2 changed files with 282 additions and 11 deletions.
19 changes: 11 additions & 8 deletions HARK/ConsumptionSaving/ConsIndShockModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@
AgentType,
NullFunc,
_log,
make_one_period_oo_solver,
set_verbosity_level,
)

Expand Down Expand Up @@ -547,7 +546,7 @@ def calc_vPPnext(S, a, R):
if CubicBool:
vPPfuncNow = MargMargValueFuncCRRA(cFuncNow, CRRA)
else:
vPPfuncNow = None # Dummy object
vPPfuncNow = NullFunc() # Dummy object

# Construct this period's value function if requested
if vFuncBool:
Expand All @@ -563,14 +562,14 @@ def calc_vPPnext(S, a, R):

# Construct the end-of-period value function
aNrm_temp = np.insert(aNrmNow, 0, BoroCnstNat)
EndOfPrdvNvrsFunc = CubicInterp(aNrm_temp, EndOfPrdvNvrs, EndOfPrdvNvrsP)
EndOfPrdvFunc = ValueFuncCRRA(EndOfPrdvNvrsFunc, CRRA)
EndOfPrd_vNvrsFunc = CubicInterp(aNrm_temp, EndOfPrdvNvrs, EndOfPrdvNvrsP)
EndOfPrd_vFunc = ValueFuncCRRA(EndOfPrd_vNvrsFunc, CRRA)

# Compute expected value and marginal value on a grid of market resources
mNrm_temp = mNrmMinNow + aXtraGrid
cNrm_temp = cFuncNow(mNrm_temp)
aNrm_temp = mNrm_temp - cNrm_temp
v_temp = uFunc(cNrm_temp) + EndOfPrdvFunc(aNrm_temp)
v_temp = uFunc(cNrm_temp) + EndOfPrd_vFunc(aNrm_temp)
vP_temp = uFunc.der(cNrm_temp)

# Construct the beginning-of-period value function
Expand All @@ -585,7 +584,7 @@ def calc_vPPnext(S, a, R):
)
vFuncNow = ValueFuncCRRA(vNvrsFuncNow, CRRA)
else:
vFuncNow = None # Dummy object
vFuncNow = NullFunc() # Dummy object

# Create and return this period's solution
solution_now = ConsumerSolution(
Expand Down Expand Up @@ -3241,7 +3240,9 @@ def calc_transition_matrix(self, shk_dstn=None):
if not hasattr(shk_dstn, "pmv"):
shk_dstn = self.IncShkDstn

self.cPol_Grid = [] # List of consumption policy grids for each period in T_cycle
self.cPol_Grid = (
[]
) # List of consumption policy grids for each period in T_cycle
self.aPol_Grid = [] # List of asset policy grids for each period in T_cycle
self.tran_matrix = [] # List of transition matrices

Expand Down Expand Up @@ -3622,7 +3623,9 @@ def J_from_F(F):
else:
peturbed_list = [getattr(self, shk_param) + dx] + (
params["T_cycle"] - 1
) * [getattr(self, shk_param)] # Sequence of interest rates the agent
) * [
getattr(self, shk_param)
] # Sequence of interest rates the agent

setattr(ZerothColAgent, shk_param, peturbed_list) # Set attribute to agent

Expand Down
274 changes: 271 additions & 3 deletions HARK/ConsumptionSaving/ConsPrefShockModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

import numpy as np

from HARK import make_one_period_oo_solver
from HARK import make_one_period_oo_solver, NullFunc
from HARK.ConsumptionSaving.ConsIndShockModel import (
ConsIndShockSolver,
ConsKinkedRsolver,
Expand All @@ -19,7 +19,7 @@
init_idiosyncratic_shocks,
init_kinked_R,
)
from HARK.distribution import MeanOneLogNormal
from HARK.distribution import MeanOneLogNormal, expected
from HARK.interpolation import (
CubicInterp,
LinearInterp,
Expand All @@ -28,6 +28,7 @@
MargValueFuncCRRA,
ValueFuncCRRA,
)
from HARK.rewards import UtilityFuncCRRA

# Make a dictionary to specify a preference shock consumer
init_preference_shocks = dict(
Expand Down Expand Up @@ -82,7 +83,7 @@ def __init__(self, **kwds):
params.update(kwds)

IndShockConsumerType.__init__(self, **params)
self.solve_one_period = make_one_period_oo_solver(ConsPrefShockSolver)
self.solve_one_period = solve_one_period_ConsPrefShock

def pre_solve(self):
self.update_solution_terminal()
Expand Down Expand Up @@ -289,6 +290,273 @@ def get_Rfree(self): # Specify which get_Rfree to use
###############################################################################


def solve_one_period_ConsPrefShock(
solution_next,
IncShkDstn,
PrefShkDstn,
LivPrb,
DiscFac,
CRRA,
Rfree,
PermGroFac,
BoroCnstArt,
aXtraGrid,
vFuncBool,
CubicBool,
):
"""
Solves one period of a consumption-saving model with idiosyncratic shocks to
permanent and transitory income, with one risk free asset and CRRA utility.
Parameters
----------
solution_next : ConsumerSolution
The solution to the succeeding one period problem.
IncShkDstn : distribution.Distribution
A discrete approximation to the income process between the period being
solved and the one immediately following (in solution_next). Order:
permanent shocks, transitory shocks.
PrefShkDstn : distribution.Distribution
Discrete distribution of the multiplicative utility shifter. Order:
probabilities, preference shocks.
LivPrb : float
Survival probability; likelihood of being alive at the beginning of
the succeeding period.
DiscFac : float
Intertemporal discount factor for future utility.
CRRA : float
Coefficient of relative risk aversion.
Rfree : float
Risk free interest factor on end-of-period assets.
PermGroGac : float
Expected permanent income growth factor at the end of this period.
BoroCnstArt: float or None
Borrowing constraint for the minimum allowable assets to end the
period with. If it is less than the natural borrowing constraint,
then it is irrelevant; BoroCnstArt=None indicates no artificial bor-
rowing constraint.
aXtraGrid: np.array
Array of "extra" end-of-period asset values-- assets above the
absolute minimum acceptable level.
vFuncBool: boolean
An indicator for whether the value function should be computed and
included in the reported solution.
CubicBool: boolean
An indicator for whether the solver should use cubic or linear inter-
polation.
Returns
-------
solution: ConsumerSolution
The solution to the single period consumption-saving problem. Includes
a consumption function cFunc (using linear splines), a marginal value
function vPfunc, a minimum acceptable level of normalized market re-
sources mNrmMin, normalized human wealth hNrm, and bounding MPCs MPCmin
and MPCmax. It might also have a value function vFunc. The consumption
function is defined over normalized market resources and the preference
shock, c = cFunc(m,PrefShk), but the (marginal) value function is defined
unconditionally on the shock, just before it is revealed.
"""
# Define the current period utility function and effective discount factor
uFunc = UtilityFuncCRRA(CRRA)
DiscFacEff = DiscFac * LivPrb # "effective" discount factor

# Unpack next period's income and preference shock distributions
ShkPrbsNext = IncShkDstn.pmv
PermShkValsNext = IncShkDstn.atoms[0]
TranShkValsNext = IncShkDstn.atoms[1]
PermShkMinNext = np.min(PermShkValsNext)
TranShkMinNext = np.min(TranShkValsNext)
PrefShkPrbs = PrefShkDstn.pmv
PrefShkVals = PrefShkDstn.atoms.flatten()

# Calculate the probability that we get the worst possible income draw
IncNext = PermShkValsNext * TranShkValsNext
WorstIncNext = PermShkMinNext * TranShkMinNext
WorstIncPrb = np.sum(ShkPrbsNext[IncNext == WorstIncNext])
# WorstIncPrb is the "Weierstrass p" concept: the odds we get the WORST thing

# Unpack next period's (marginal) value function
vFuncNext = solution_next.vFunc # This is None when vFuncBool is False
vPfuncNext = solution_next.vPfunc
vPPfuncNext = solution_next.vPPfunc # This is None when CubicBool is False

# Update the bounding MPCs and PDV of human wealth:
PatFac = ((Rfree * DiscFacEff) ** (1.0 / CRRA)) / Rfree
try:
MPCminNow = 1.0 / (1.0 + PatFac / solution_next.MPCmin)
except:
MPCminNow = 0.0

Check warning on line 389 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L388-L389

Added lines #L388 - L389 were not covered by tests
Ex_IncNext = np.dot(ShkPrbsNext, TranShkValsNext * PermShkValsNext)
hNrmNow = PermGroFac / Rfree * (Ex_IncNext + solution_next.hNrm)
temp_fac = (WorstIncPrb ** (1.0 / CRRA)) * PatFac
MPCmaxNow = 1.0 / (1.0 + temp_fac / solution_next.MPCmax)

# Calculate the minimum allowable value of money resources in this period
PermGroFacEffMin = (PermGroFac * PermShkMinNext) / Rfree
BoroCnstNat = (solution_next.mNrmMin - TranShkMinNext) * PermGroFacEffMin

# Set the minimum allowable (normalized) market resources based on the natural
# and artificial borrowing constraints
if BoroCnstArt is None:
mNrmMinNow = BoroCnstNat

Check warning on line 402 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L402

Added line #L402 was not covered by tests
else:
mNrmMinNow = np.max([BoroCnstNat, BoroCnstArt])

# Set the upper limit of the MPC (at mNrmMinNow) based on whether the natural
# or artificial borrowing constraint actually binds
if BoroCnstNat < mNrmMinNow:
MPCmaxEff = 1.0 # If actually constrained, MPC near limit is 1
else:
MPCmaxEff = MPCmaxNow # Otherwise, it's the MPC calculated above

Check warning on line 411 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L411

Added line #L411 was not covered by tests

# Define the borrowing-constrained consumption function
cFuncNowCnst = LinearInterp(
np.array([mNrmMinNow, mNrmMinNow + 1.0]), np.array([0.0, 1.0])
)

# Construct the assets grid by adjusting aXtra by the natural borrowing constraint
aNrmNow = np.asarray(aXtraGrid) + BoroCnstNat

# Define local functions for taking future expectations
def calc_mNrmNext(S, a, R):
return R / (PermGroFac * S["PermShk"]) * a + S["TranShk"]

def calc_vNext(S, a, R):
return (S["PermShk"] ** (1.0 - CRRA) * PermGroFac ** (1.0 - CRRA)) * vFuncNext(

Check warning on line 426 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L426

Added line #L426 was not covered by tests
calc_mNrmNext(S, a, R)
)

def calc_vPnext(S, a, R):
return S["PermShk"] ** (-CRRA) * vPfuncNext(calc_mNrmNext(S, a, R))

def calc_vPPnext(S, a, R):
return S["PermShk"] ** (-CRRA - 1.0) * vPPfuncNext(calc_mNrmNext(S, a, R))

Check warning on line 434 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L434

Added line #L434 was not covered by tests

# Calculate end-of-period marginal value of assets at each gridpoint
vPfacEff = DiscFacEff * Rfree * PermGroFac ** (-CRRA)
EndOfPrdvP = vPfacEff * expected(calc_vPnext, IncShkDstn, args=(aNrmNow, Rfree))

cNrm_base = uFunc.derinv(EndOfPrdvP, order=(1, 0))
PrefShkCount = PrefShkVals.size
PrefShk_temp = np.tile(
np.reshape(PrefShkVals ** (1.0 / CRRA), (PrefShkCount, 1)),
(1, cNrm_base.size),
)
cNrmNow = np.tile(cNrm_base, (PrefShkCount, 1)) * PrefShk_temp
mNrmNow = cNrmNow + np.tile(aNrmNow, (PrefShkCount, 1))

# Add the bottom point to the c and m arrays
m_for_interpolation = np.concatenate(
(BoroCnstNat * np.ones((PrefShkCount, 1)), mNrmNow), axis=1
)
c_for_interpolation = np.concatenate((np.zeros((PrefShkCount, 1)), cNrmNow), axis=1)

# Construct the consumption function as a cubic or linear spline interpolation
# for each value of PrefShk, interpolated across those values.
if CubicBool:
# This is not yet supported, not sure why we never got to it
raise (

Check warning on line 459 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L459

Added line #L459 was not covered by tests
ValueError,
"Cubic interpolation is not yet supported by the preference shock model!",
)

# Make the preference-shock specific consumption functions
cFuncs_by_PrefShk = []
for j in range(PrefShkCount):
MPCmin_j = MPCminNow * PrefShkVals[j] ** (1.0 / CRRA)
cFunc_this_shk = LowerEnvelope(
LinearInterp(
m_for_interpolation[j, :],
c_for_interpolation[j, :],
intercept_limit=hNrmNow * MPCmin_j,
slope_limit=MPCmin_j,
),
cFuncNowCnst,
)
cFuncs_by_PrefShk.append(cFunc_this_shk)

# Combine the list of consumption functions into a single interpolation
cFuncNow = LinearInterpOnInterp1D(cFuncs_by_PrefShk, PrefShkVals)

# Make the ex ante marginal value function (before the preference shock)
m_grid = aXtraGrid + mNrmMinNow
vP_vec = np.zeros_like(m_grid)
for j in range(PrefShkCount): # numeric integration over the preference shock
vP_vec += (
uFunc.der(cFuncs_by_PrefShk[j](m_grid)) * PrefShkPrbs[j] * PrefShkVals[j]
)
vPnvrs_vec = uFunc.derinv(vP_vec, order=(1, 0))
vPfuncNow = MargValueFuncCRRA(LinearInterp(m_grid, vPnvrs_vec), CRRA)

# Define this period's marginal marginal value function
if CubicBool:
pass # This is impossible to reach right now

Check warning on line 494 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L494

Added line #L494 was not covered by tests
else:
vPPfuncNow = NullFunc() # Dummy object

# Construct this period's value function if requested
if vFuncBool:
# Calculate end-of-period value, its derivative, and their pseudo-inverse
EndOfPrdv = DiscFacEff * expected(calc_vNext, IncShkDstn, args=(aNrmNow, Rfree))
EndOfPrdvNvrs = uFunc.inv(

Check warning on line 502 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L501-L502

Added lines #L501 - L502 were not covered by tests
EndOfPrdv
) # value transformed through inverse utility
EndOfPrdvNvrsP = EndOfPrdvP * uFunc.derinv(EndOfPrdv, order=(0, 1))
EndOfPrdvNvrs = np.insert(EndOfPrdvNvrs, 0, 0.0)
EndOfPrdvNvrsP = np.insert(EndOfPrdvNvrsP, 0, EndOfPrdvNvrsP[0])

Check warning on line 507 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L505-L507

Added lines #L505 - L507 were not covered by tests
# This is a very good approximation, vNvrsPP = 0 at the asset minimum

# Construct the end-of-period value function
aNrm_temp = np.insert(aNrmNow, 0, BoroCnstNat)
EndOfPrd_vNvrsFunc = CubicInterp(aNrm_temp, EndOfPrdvNvrs, EndOfPrdvNvrsP)
EndOfPrd_vFunc = ValueFuncCRRA(EndOfPrd_vNvrsFunc, CRRA)

Check warning on line 513 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L511-L513

Added lines #L511 - L513 were not covered by tests

# Compute expected value and marginal value on a grid of market resources,
# accounting for all of the discrete preference shocks
mNrm_temp = mNrmMinNow + aXtraGrid
v_temp = np.zeros_like(mNrm_temp)
vP_temp = np.zeros_like(mNrm_temp)
for j in range(PrefShkCount):
this_shock = PrefShkVals[j]
this_prob = PrefShkPrbs[j]
cNrm_temp = cFuncNow(mNrm_temp, this_shock * np.ones_like(mNrm_temp))
aNrm_temp = mNrm_temp - cNrm_temp
v_temp += this_prob * (

Check warning on line 525 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L517-L525

Added lines #L517 - L525 were not covered by tests
this_shock * uFunc(cNrm_temp) + EndOfPrd_vFunc(aNrm_temp)
)
vP_temp += this_prob * this_shock * uFunc.der(cNrm_temp)

Check warning on line 528 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L528

Added line #L528 was not covered by tests

# Construct the beginning-of-period value function
# value transformed through inverse utility
vNvrs_temp = uFunc.inv(v_temp)
vNvrsP_temp = vP_temp * uFunc.derinv(v_temp, order=(0, 1))
mNrm_temp = np.insert(mNrm_temp, 0, mNrmMinNow)
vNvrs_temp = np.insert(vNvrs_temp, 0, 0.0)
vNvrsP_temp = np.insert(vNvrsP_temp, 0, MPCmaxEff ** (-CRRA / (1.0 - CRRA)))
MPCminNvrs = MPCminNow ** (-CRRA / (1.0 - CRRA))
vNvrsFuncNow = CubicInterp(

Check warning on line 538 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L532-L538

Added lines #L532 - L538 were not covered by tests
mNrm_temp, vNvrs_temp, vNvrsP_temp, MPCminNvrs * hNrmNow, MPCminNvrs
)
vFuncNow = ValueFuncCRRA(vNvrsFuncNow, CRRA)

Check warning on line 541 in HARK/ConsumptionSaving/ConsPrefShockModel.py

View check run for this annotation

Codecov / codecov/patch

HARK/ConsumptionSaving/ConsPrefShockModel.py#L541

Added line #L541 was not covered by tests

else:
vFuncNow = NullFunc() # Dummy object

# Create and return this period's solution
solution_now = ConsumerSolution(
cFunc=cFuncNow,
vFunc=vFuncNow,
vPfunc=vPfuncNow,
vPPfunc=vPPfuncNow,
mNrmMin=mNrmMinNow,
hNrm=hNrmNow,
MPCmin=MPCminNow,
MPCmax=MPCmaxEff,
)
return solution_now


class ConsPrefShockSolver(ConsIndShockSolver):
"""
A class for solving the one period consumption-saving problem with risky
Expand Down

0 comments on commit fb10736

Please sign in to comment.