Commit e74e9477 authored by Simon Ciranka's avatar Simon Ciranka
Browse files

added confirmatory/chosenunchosenAsym; trialwise model eval; new paramrecovery; updated figures

parent 04aa2c6c
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
#########
######## Define Parameter Boundaries
########
#lrup;lrdown;itemTi;pairlr;pairTI;noisePair;noiseItem
ParamBoundaries=list()
#Only Itemlevel Asym (A)
ParamBoundaries[[1]]<-tibble(
lb=c(0,0,0), # lower bounds
ub=c(0.2,0.2,1) # upper bounds
)
#Only Itemlevel Sym (B)
ParamBoundaries[[2]]<-tibble(
lb=c(0,0), # lower bounds
ub=c(0.2,1) # upper bounds
)
#Only Item TI Asym (C)
ParamBoundaries[[3]]<-tibble(
lb=c(0,0,0,0), # lower bounds
ub=c(0.2,0.2,10,1) # upper bounds
)
#Only Item TI Sym (D)
ParamBoundaries[[4]]<-tibble(
lb=c(0,0,0), # lower bounds
ub=c(0.2,10,1) # upper bounds
)
#Only Pairs (E)
ParamBoundaries[[5]]<-tibble(
lb=c(0,0), # lower bounds
ub=c(1,1) # upper bounds
)
#Pair TI (F)
ParamBoundaries[[6]]<-tibble(
lb=c(0,0,0), # lower bounds
ub=c(1,100,1) # upper bounds
)
# mixture Item TI & Pair _NO_ TI & ASYM learning rate for items (G)
ParamBoundaries[[7]]<-tibble(
lb=c(0,0,0,0,0,0), # lower bounds
ub=c(0.2,0.2,10,1,1,1) # upper bounds
)
# mixture Item TI & Pair _NO_ TI & SYM learning rate for items (H)
ParamBoundaries[[8]]<-tibble(
lb=c(0,0,0,0,0), # lower bounds
ub=c(0.2,10,1,1,1) # upper bounds
)
# mixture Item TI & Pair TI & ASYM learning rate for items (I)
ParamBoundaries[[9]]<-tibble(
lb=c(0,0,0,0,0,0,0), # lower bounds
ub=c(0.2,0.2,10,1,100,1,1) # upper bounds
)
# mixture Item TI & Pair TI & SYM learning rate for items (J)
ParamBoundaries[[10]]<-tibble(
lb=c(0,0,0,0,0,0), # lower bounds
ub=c(0.2,10,1,100,1,1) # upper bounds
)
# mixture Item TI & Pair _NO_ TI & ASYM learning rate for items (G)
ParamBoundaries[[11]]<-tibble(
lb=c(0,0,0,0), # lower bounds
ub=c(0.2,0.2,1,1) # upper bounds
)
FitModel<-function(model,data,modelcode,cv){
print(model)
# select the boundariesfrom the list with which i define models for modelcomparison.
data_long_Choice10=data
lb=ParamBoundaries[[model]]$lb
ub=ParamBoundaries[[model]]$ub
print(lb)
print(ub)
MLE_TI=modelcode
#browser()
parametersAll<-foreach(sub = unique(data_long_Choice10$subject),.combine = 'rbind')%dopar% {
library(DEoptim)
library(pracma)
library(tidyverse)
# sub=9
#vector that tells the algorithm whether it should fit the asym (0) or the sym (1) model
asymParam=c(0,1,0,1,1,1,0,1,0,1,0,1)
#for (sub in unique(data_long_Choice10$subject)){
#concataneate trial values.
trials=cbind(data_long_Choice10[data_long_Choice10$subject==sub,]$stim1,
data_long_Choice10[data_long_Choice10$subject==sub,]$stim2)
trials=cbind(data_long_Choice10[data_long_Choice10$subject==sub,]$stim1,
data_long_Choice10[data_long_Choice10$subject==sub,]$stim2)
#array of feedback
###
### CODE WHETHER THE FEEDBACK WAS CORRECT OR NOT AND ASSUME THAT SUBS UPDATE CORRECTLY!!!!
###
fb=data_long_Choice10[data_long_Choice10$subject==sub,]$FBtru
#get teh recoded choice
if (is.null(cv)){# do you fit subject data?
choice=data_long_Choice10[data_long_Choice10$subject==sub,]$choice_right1wrong0
}else{
#select the simulated data column corresponding to the model of interest.
choice=data_long_Choice10[data_long_Choice10$subject==sub,]%>%select(cv)
}
data=data.frame(
trials,
FBtru=fb,
subcho=choice,
asym=asymParam[model]
)%>%magrittr::set_colnames(c("X1","X2","FBtru","subcho","asym"))
print(sub)
opt<-DEoptim(MLE_TI,lower=lb,upper=ub,# give reasonable parameter boundaries (or fix them)
data=data#,control=(itermax = 50)
)
# #
# # #check if it looks different when i use another optimization
# # #
#browser()
set.seed(1234)
parameters<-opt$optim$bestmem%>%as_tibble()%>%
magrittr::set_colnames(c("est"))%>%
mutate(subject=sub,
Gsq=opt$optim$bestval,
model=model,
par=row_number()
)
#browser()
#starting values Berni used
#other than berni i set my starting vals to the upper bound
#opt<- fmincon(@(b),MLE_TI(b,trials,FBtru,svals,subcho),startingvals,NULL,NULL,NULL,NULL,lb[c(1,2,6)],ub[c(1,2,6)])
#opt<-fmincon(x0=(lb),fn=MLE_TI,data=data,lb=lb,ub=ub,maxiter = 10000,maxfeval=10000)
#browser()
# give reasonable parameter boundaries (or fix them)
# trials=trials,
# FBtru=fb,
# svals=c(1,1),#uniformative prior on betas
# subcho=choice
# # control=
# )
# opt<- optim(par=lb,MLE_TI, data=data,
# method = c("L-BFGS-B"),
# lower = lb, upper = ub
# )
#browser()
# parameters<-opt$par%>%as_tibble()%>%
# magrittr::set_colnames(c("est"))%>%
# mutate(subject=sub,
# Gsq=opt$value,
# model=model,
# par=row_number()
# )
return(parameters)
}
return(parametersAll)
}
#########
######## Define Parameter Boundaries
########
#lrup;lrdown;itemTi;pairlr;pairTI;noisePair;noiseItem
ParamBoundaries=list()
#Only Itemlevel Asym (A)
ParamBoundaries[[1]]<-tibble(
lb=c(0,0), # lower bounds
ub=c(0.2,1) # upper bounds
)
#Only Itemlevel Sym (B)
ParamBoundaries[[2]]<-tibble(
lb=c(0,0), # lower bounds
ub=c(0.2,1) # upper bounds
)
#Only Item TI Asym (C)
ParamBoundaries[[3]]<-tibble(
lb=c(0,0,0), # lower bounds
ub=c(0.2,10,1) # upper bounds
)
#Only Item TI Sym (D)
ParamBoundaries[[4]]<-tibble(
lb=c(0,0,0), # lower bounds
ub=c(0.2,10,1) # upper bounds
)
#Only Pairs (E)
ParamBoundaries[[5]]<-tibble(
lb=c(0,0), # lower bounds
ub=c(1,1) # upper bounds
)
#Pair TI (F)
ParamBoundaries[[6]]<-tibble(
lb=c(0,0,0), # lower bounds
ub=c(1,100,1) # upper bounds
)
# mixture Item TI & Pair _NO_ TI & ASYM learning rate for items (G)
ParamBoundaries[[7]]<-tibble(
lb=c(0,0,0,0,0), # lower bounds
ub=c(0.2,10,1,1,1) # upper bounds
)
# mixture Item TI & Pair _NO_ TI & SYM learning rate for items (H)
ParamBoundaries[[8]]<-tibble(
lb=c(0,0,0,0,0), # lower bounds
ub=c(0.2,10,1,1,1) # upper bounds
)
# mixture Item TI & Pair TI & ASYM learning rate for items (I)
ParamBoundaries[[9]]<-tibble(
lb=c(0,0,0,0,0,0), # lower bounds
ub=c(0.2,10,1,100,1,1) # upper bounds
)
# mixture Item TI & Pair TI & SYM learning rate for items (J)
ParamBoundaries[[10]]<-tibble(
lb=c(0,0,0,0,0,0), # lower bounds
ub=c(0.2,10,1,100,1,1) # upper bounds
)
FitModel<-function(model,data,modelcode,cv){
print(model)
# select the boundariesfrom the list with which i define models for modelcomparison.
data_long_Choice10=data
lb=ParamBoundaries[[model]]$lb
ub=ParamBoundaries[[model]]$ub
print(lb)
print(ub)
MLE_TI=modelcode
#browser()
parametersAll<-foreach(sub = unique(data_long_Choice10$subject),.combine = 'rbind')%dopar% {
library(DEoptim)
library(pracma)
library(tidyverse)
# sub=9
#vector that tells the algorithm whether it should fit the asym (0) or the sym (1) model
asymParam=c(0,1,0,1,1,1,0,1,0,1,0)
#for (sub in unique(data_long_Choice10$subject)){
#concataneate trial values.
trials=cbind(data_long_Choice10[data_long_Choice10$subject==sub,]$stim1,
data_long_Choice10[data_long_Choice10$subject==sub,]$stim2)
trials=cbind(data_long_Choice10[data_long_Choice10$subject==sub,]$stim1,
data_long_Choice10[data_long_Choice10$subject==sub,]$stim2)
#array of feedback
###
### CODE WHETHER THE FEEDBACK WAS CORRECT OR NOT AND ASSUME THAT SUBS UPDATE CORRECTLY!!!!
###
fb=data_long_Choice10[data_long_Choice10$subject==sub,]$FBtru
#get teh recoded choice
if (is.null(cv)){# do you fit subject data?
choice=data_long_Choice10[data_long_Choice10$subject==sub,]$choice_right1wrong0
}else{
#select the simulated data column corresponding to the model of interest.
choice=data_long_Choice10[data_long_Choice10$subject==sub,]%>%select(cv)
}
data=data.frame(
trials,
FBtru=fb,
subcho=choice,
asym=asymParam[model]
)%>%magrittr::set_colnames(c("X1","X2","FBtru","subcho","asym"))
print(sub)
set.seed(1234)
opt<-DEoptim(MLE_TI,lower=lb,upper=ub,# give reasonable parameter boundaries (or fix them)
data=data#,control=(itermax = 50)
)
# #
# # #check if it looks different when i use another optimization
# # #
#browser()
parameters<-opt$optim$bestmem%>%as_tibble()%>%
magrittr::set_colnames(c("est"))%>%
mutate(subject=sub,
Gsq=opt$optim$bestval,
model=model,
par=row_number()
)
#browser()
#starting values Berni used
#other than berni i set my starting vals to the upper bound
#opt<- fmincon(@(b),MLE_TI(b,trials,FBtru,svals,subcho),startingvals,NULL,NULL,NULL,NULL,lb[c(1,2,6)],ub[c(1,2,6)])
#opt<-fmincon(x0=(lb),fn=MLE_TI,data=data,lb=lb,ub=ub,maxiter = 10000,maxfeval=10000)
#browser()
# give reasonable parameter boundaries (or fix them)
# trials=trials,
# FBtru=fb,
# svals=c(1,1),#uniformative prior on betas
# subcho=choice
# # control=
# )
# opt<- optim(par=lb,MLE_TI, data=data,
# method = c("L-BFGS-B"),
# lower = lb, upper = ub
# )
#browser()
# parameters<-opt$par%>%as_tibble()%>%
# magrittr::set_colnames(c("est"))%>%
# mutate(subject=sub,
# Gsq=opt$value,
# model=model,
# par=row_number()
# )
return(parameters)
}
return(parametersAll)
}
FitModelELOVAT<-function(model,data,modelcode,cv){
print(model)
# select the boundariesfrom the list with which i define models for modelcomparison.
data_long_Choice10=data
lb=ParamBoundaries[[model]]$lb
ub=ParamBoundaries[[model]]$ub
print(lb)
print(ub)
MLE_TI=modelcode
#browser()
parametersAll<-foreach(sub = unique(data_long_Choice10$subject),.combine = 'rbind')%dopar% {
# for(sub in unique(data_long_Choice10$subject)){
library(DEoptim)
library(pracma)
library(tidyverse)
# sub=9
#vector that tells the algorithm whether it should fit the asym (0) or the sym (1) model
asymParam=c(0,1,1,0,1,1,0)
#for (sub in unique(data_long_Choice10$subject)){
#concataneate trial values.
trials=cbind(data_long_Choice10[data_long_Choice10$subject==sub,]$stim1,
data_long_Choice10[data_long_Choice10$subject==sub,]$stim2)
trials=cbind(data_long_Choice10[data_long_Choice10$subject==sub,]$stim1,
data_long_Choice10[data_long_Choice10$subject==sub,]$stim2)
#array of feedback
###
### CODE WHETHER THE FEEDBACK WAS CORRECT OR NOT AND ASSUME THAT SUBS UPDATE CORRECTLY!!!!
###
fb=data_long_Choice10[data_long_Choice10$subject==sub,]$FBtru
#get teh recoded choice
if (is.null(cv)){# do you fit subject data?
choice=data_long_Choice10[data_long_Choice10$subject==sub,]$choice_right1wrong0
}else{
#select the simulated data column corresponding to the model of interest.
choice=data_long_Choice10[data_long_Choice10$subject==sub,]%>%select(cv)
}
data=data.frame(
trials,
FBtru=fb,
subcho=choice,
asym=asymParam[model]
)%>%magrittr::set_colnames(c("X1","X2","FBtru","subcho","asym"))
print(sub)
opt<-DEoptim(MLE_TI,lower=lb,upper=ub,# give reasonable parameter boundaries (or fix them)
data=data#,control=(itermax = 50)
)
# #
# # #check if it looks different when i use another optimization
# # #
#browser()
set.seed(1234)
parameters<-opt$optim$bestmem%>%as_tibble()%>%
magrittr::set_colnames(c("est"))%>%
mutate(subject=sub,
Gsq=opt$optim$bestval,
model=model,
par=row_number()
)
#browser()
#starting values Berni used
#other than berni i set my starting vals to the upper bound
#opt<- fmincon(@(b),MLE_TI(b,trials,FBtru,svals,subcho),startingvals,NULL,NULL,NULL,NULL,lb[c(1,2,6)],ub[c(1,2,6)])
#opt<-fmincon(x0=(lb),fn=MLE_TI,data=data,lb=lb,ub=ub,maxiter = 10000,maxfeval=10000)
#browser()
# give reasonable parameter boundaries (or fix them)
# trials=trials,
# FBtru=fb,
# svals=c(1,1),#uniformative prior on betas
# subcho=choice
# # control=
# )
# opt<- optim(par=lb,MLE_TI, data=data,
# method = c("L-BFGS-B"),
# lower = lb, upper = ub
# )
#browser()
# parameters<-opt$par%>%as_tibble()%>%
# magrittr::set_colnames(c("est"))%>%
# mutate(subject=sub,
# Gsq=opt$value,
# model=model,
# par=row_number()
# )
return(parameters)
}
return(parametersAll)
}
#########
######## Define Parameter Boundaries
########
#lrup;lrdown;itemTi;pairlr;pairTI;noisePair;noiseItem
ParamBoundaries=list()
#Only Itemlevel Asym (A)
ParamBoundaries[[1]]<-tibble(
lb=c(0,0,0,0), # lower bounds
ub=c(0.2,0.2,1,10) # upper bounds
)
#Only Itemlevel Sym (B)
ParamBoundaries[[2]]<-tibble(
lb=c(0,0,0), # lower bounds
ub=c(0.2,1,10) # upper bounds
)
#Only Item TI Asym (C)
ParamBoundaries[[3]]<-tibble(
lb=c(0,0,0,0,0), # lower bounds
ub=c(0.2,0.2,10,1,10) # upper bounds
)
#Only Item TI Sym (D)
ParamBoundaries[[4]]<-tibble(
lb=c(0,0,0,0), # lower bounds
ub=c(0.2,10,1,10) # upper bounds
)
#Only Pairs (E)
ParamBoundaries[[5]]<-tibble(
lb=c(0,0), # lower bounds
ub=c(1,1) # upper bounds
)
#Pair TI (F)
ParamBoundaries[[6]]<-tibble(
lb=c(0,0,0), # lower bounds
ub=c(1,100,1) # upper bounds
)
# mixture Item TI & Pair _NO_ TI & ASYM learning rate for items (G)
ParamBoundaries[[7]]<-tibble(
lb=c(0,0,0,0,0,0,0), # lower bounds
ub=c(0.2,0.2,10,1,1,1,10) # upper bounds
)
# mixture Item TI & Pair _NO_ TI & SYM learning rate for items (H)
ParamBoundaries[[8]]<-tibble(
lb=c(0,0,0,0,0,0), # lower bounds
ub=c(0.2,10,1,1,1,10) # upper bounds
)
# mixture Item TI & Pair TI & ASYM learning rate for items (I)
ParamBoundaries[[9]]<-tibble(
lb=c(0,0,0,0,0,0,0,0), # lower bounds
ub=c(0.2,0.2,10,1,100,1,1,10) # upper bounds
)
# mixture Item TI & Pair TI & SYM learning rate for items (J)
ParamBoundaries[[10]]<-tibble(
lb=c(0,0,0,0,0,0,0), # lower bounds
ub=c(0.2,10,1,100,1,1,10) # upper bounds
)
FitModel<-function(model,data,modelcode,cv){
print(model)
# select the boundariesfrom the list with which i define models for modelcomparison.
data_long_Choice10=data
lb=ParamBoundaries[[model]]$lb
ub=ParamBoundaries[[model]]$ub
print(lb)
print(ub)
MLE_TI=modelcode
#browser()
parametersAll<-foreach(sub = unique(data_long_Choice10$subject),.combine = 'rbind')%dopar% {
library(DEoptim)
library(pracma)
library(tidyverse)
# sub=9
#vector that tells the algorithm whether it should fit the asym (0) or the sym (1) model
asymParam=c(0,1,0,1,1,1,0,1,0,1,0)
#for (sub in unique(data_long_Choice10$subject)){
#concataneate trial values.
trials=cbind(data_long_Choice10[data_long_Choice10$subject==sub,]$stim1,
data_long_Choice10[data_long_Choice10$subject==sub,]$stim2)
trials=cbind(data_long_Choice10[data_long_Choice10$subject==sub,]$stim1,
data_long_Choice10[data_long_Choice10$subject==sub,]$stim2)
#array of feedback
###
### CODE WHETHER THE FEEDBACK WAS CORRECT OR NOT AND ASSUME THAT SUBS UPDATE CORRECTLY!!!!
###
fb=data_long_Choice10[data_long_Choice10$subject==sub,]$FBtru
#get teh recoded choice
if (is.null(cv)){# do you fit subject data?
choice=data_long_Choice10[data_long_Choice10$subject==sub,]$choice_right1wrong0
}else{
#select the simulated data column corresponding to the model of interest.
choice=data_long_Choice10[data_long_Choice10$subject==sub,]%>%select(cv)
}
data=data.frame(
trials,
FBtru=fb,
subcho=choice,
asym=asymParam[model]
)%>%magrittr::set_colnames(c("X1","X2","FBtru","subcho","asym"))
print(sub)
set.seed(1234)
opt<-DEoptim(MLE_TI,lower=lb,upper=ub,# give reasonable parameter boundaries (or fix them)
data=data#,control=(itermax = 50)
)
# #
# # #check if it looks different when i use another optimization
# # #
#browser()
parameters<-opt$optim$bestmem%>%as_tibble()%>%
magrittr::set_colnames(c("est"))%>%
mutate(subject=sub,
Gsq=opt$optim$bestval,
model=model,
par=row_number()
)
#browser()
#starting values Berni used
#other than berni i set my starting vals to the upper bound
#opt<- fmincon(@(b),MLE_TI(b,trials,FBtru,svals,subcho),startingvals,NULL,NULL,NULL,NULL,lb[c(1,2,6)],ub[c(1,2,6)])