Posted By: Dalla
Zorro crashes without error message - advise long params array - 06/08/18 19:34
I'm trying to run the following script to train and test a Keras model through the R bridge. When training and sending params to R like this
Zorro starts loading for a while, then just crashes/shuts down without any error message.
On the other hand, if I uncomment the code below
// generate the signals
And here is the R script
Code:
adviseShort(NEURAL+BALANCED,0,params,20);
Zorro starts loading for a while, then just crashes/shuts down without any error message.
On the other hand, if I uncomment the code below
// generate the signals
Code:
adviseShort(NEURAL+BALANCED,0,params[0], params[1],params[2],params[3],params[4],params[5], params[6],params[7],params[8],params[9], params[10],params[11],params[12],params[13], params[14],params[15],params[16],params[17], params[18],params[19]);
Code:
Then I'm not having any issues. I know I've made use of the params array before, but for some reason it's not working now. #include <r.h> var neural(int mode, int model, int numSignals, void* Data) { if(!wait(0)) return 0; // open an R script with the same name as the strategy script if(mode == NEURAL_INIT) { if(!Rstart(strf("%s.r",Script),0)) return 0; Rx("neural.init()"); return 1; } // export batch training samples to a file to be read by R if(mode == NEURAL_TRAIN) { string name = strf("Data\signals%i.csv",Core); file_write(name,Data,0); Rx(strf("XY <- read.csv('%s%s',header = F)",slash(ZorroFolder),slash(name))); if(!Rx(strf("neural.train(%i,XY)",model+1),2)) return 0; return 1; } // predict the target if(mode == NEURAL_PREDICT) { Rset("X",(double*)Data,numSignals); Rx(strf("Y <- neural.predict(%i,X)",model+1)); var pred = Rd("Y[1]"); return pred; } // save all trained models if(mode == NEURAL_SAVE) { print(TO_ANY,"nStore %s",strrchr(Data,'\')+1); return Rx(strf("neural.save('%s')",slash(Data)),3); } // load all trained models if(mode == NEURAL_LOAD) { printf("nLoad %s",strrchr(Data,'\')+1); timer(); int neuralLoadReturnCode = Rx(strf("neural.load('%s')",slash(Data)),3); printf("nNEURAL_LOAD call time = %.3f ms",timer()); return neuralLoadReturnCode; } return 1; } int timezone = CET; var changePrice(int n) { return scale((price(0) - price(n))/price(0),100)/100; } var range(int n) { return scale((HH(n) - LL(n))/priceClose(0),100)/100; } var vol(int n) { return scale((marketVol(0) - marketVol(n))/marketVol(0),100)/100; } /////////////////////////////////////////////////////////////////////// function run() { set(PLOTNOW+RULES+LOGFILE+OPENEND); LookBack = 200; BarPeriod = 60; // use slider StartDate = 2006; // start date period EndDate = 2018; // fixed simulation period Weekend = 1; BarOffset = 55; asset("EUR/USD"); NumCores = -1; Verbose = 7; WFOPeriod = 30*24*BarPeriod; // Define parameters int hours = 10; LifeTime = 2; // no trade costs... Spread = Slippage = Commission = RollLong = RollShort = 0; var params[20]; int i; for(i=1; i<21; i++) { params[i-1] = changePrice(i); } ´ if (Train) { adviseShort(NEURAL+BALANCED,0,params,20); enterShort(); } else { if(lhour(timezone) == hours) { if (adviseShort(NEURAL+BALANCED,0,params,20) > 0.5) { enterShort(); } } } // generate the signals /* if (Train) { adviseShort(NEURAL+BALANCED,0,params[0], params[1],params[2],params[3],params[4],params[5], params[6],params[7],params[8],params[9], params[10],params[11],params[12],params[13], params[14],params[15],params[16],params[17], params[18],params[19]); enterShort(); } else { if(lhour(timezone) == hours) { if (adviseShort(NEURAL+BALANCED,0,params[0], params[1],params[2],params[3],params[4],params[5], params[6],params[7],params[8],params[9], params[10],params[11],params[12],params[13], params[14],params[15],params[16],params[17], params[18],params[19]) > 0.5) { enterShort(); } } } */ }
And here is the R script
Code:
### Keras ### required.packages <- c('keras', 'caret', 'ggplot2') new.packages <- required.packages[!(required.packages %in% installed.packages()[,"Package"])] if(length(new.packages)) install.packages(new.packages, repos='http://cran.us.r-project.org') library(keras, quietly = TRUE) library(caret, quietly = TRUE) library(ggplot2, quietly = TRUE) neural.init <- function() { set.seed(503) Models <<- vector("list") } neural.train <- function(model, XY) { XY <- as.matrix(XY, ncol = ncol(XY)) X <- XY[,-ncol(XY)] Y <- XY[,ncol(XY)] # convert target to binary Y <- ifelse(Y > 0, 1, 0) # build network architecture mod <- keras_model_sequential() mod %>% layer_dense(units = 20, activation = 'relu', input_shape = c(ncol(X))) %>% layer_dropout(rate = 0.2) %>% layer_dense(units = 20, activation = 'relu') %>% layer_dropout(rate = 0.2) %>% layer_dense(units = 20, activation = 'relu') %>% layer_dropout(rate = 0.2) %>% layer_dense(units = 1, activation = 'sigmoid') mod %>% compile( loss = 'binary_crossentropy', optimizer = optimizer_rmsprop(), metrics = c('accuracy') ) history <- mod %>% fit( X, Y, epochs = 100, batch_size = 1000, validation_split = 0, shuffle = FALSE ) Models[[model]] <<- serialize_model(mod, include_optimizer = TRUE) } neural.predict <- function(model, X) { X <- as.matrix(X, nrow=1) m <- Models[[model]] #pred <- m %>% predict_classes(t(X)) pred <- m %>% predict_proba(X) return(pred) } neural.save <- function(name) { save(Models,file=name) #save_model_weights_hdf5(Models,file=name) #model_json = model_to_json(Models) #write(model_json, file=name) } neural.load <- function(name) { #model_from_json(name, custom_objects = NULL) k_clear_session() load(name) for(i in c(1:length(Models))) { print(i) Models[[i]] <<- unserialize_model(Models[[i]], custom_objects = NULL, compile = NULL) } } hp.tune <- function(dropout, num_epochs, filename) { neural.init() # load path <- "C:/Zorro/Data/" XY <<- read.csv(paste0(path, 'adv_69_export_dataEURUSD_L.csv'), header = F) XY <- data.frame(XY) # convert to data frame. some models will need a matrix, but df OK for caret # convert target to factor XY[, ncol(XY)] <- ifelse(XY[, ncol(XY)] > 0, 1, 0) XY[, ncol(XY)] <- as.factor(XY[, ncol(XY)]) # split into training and test sets training.samples <- createDataPartition(XY[, ncol(XY)], p=0.75, list=FALSE) XY.train <- XY[training.samples, ] XY.test <- XY[-training.samples, ] X_train <- as.matrix(XY.train[,-ncol(XY)]) Y_train <- as.matrix(XY.train[, ncol(XY)]) X_test <- as.matrix(XY.test[, -ncol(XY)]) Y_test <- as.matrix(XY.test[, ncol(XY)]) # scale features scaler <- preProcess(X_train, method = c("center", "scale")) X_train <- predict(scaler, X_train) X_test <- predict(scaler, X_test) # build network architecture model <- keras_model_sequential() model %>% layer_dense(units = 20, activation = 'relu', input_shape = c(ncol(X_train))) %>% layer_dropout(rate = dropout) %>% layer_dense(units = 20, activation = 'relu') %>% layer_dropout(rate = dropout) %>% layer_dense(units = 20, activation = 'relu') %>% layer_dropout(rate = dropout) %>% layer_dense(units = 1, activation = 'sigmoid') model %>% compile( loss = 'binary_crossentropy', optimizer = optimizer_rmsprop(), metrics = c('accuracy') ) history <- model %>% fit( X_train, Y_train, epochs = num_epochs, batch_size = 1000, validation_split = 0.33, shuffle = FALSE ) train_val <- plot(history) png(paste0(path, filename, ".png")) print(train_val) dev.off() } TestOOS = function() { neural.init() #XY <<- read.csv('Y:/Zorro/Data/OvernightEffectsDeepLearnUSDJPY_S.csv',header = F) XY <<- read.csv('Y:/Zorro/Data/AL6_Season_Signals_EURUSD.csv',header = F) splits <- nrow(XY)*0.8 XY.tr <<- head(XY,splits); XY.ts <<- tail(XY,-splits) neural.train(2,XY.tr) XY.ts <- as.matrix(XY.ts) X <<- XY.ts[,-ncol(XY.ts)] Y <<- XY.ts[,ncol(XY.ts)] Y.ob <<- ifelse(Y > 0,1,0) ar <- array(X,c(dim(X)[1],10,4)) mod <<- unserialize_model(Models[[2]], custom_objects = NULL, compile = NULL) Y <<- mod %>% predict_classes(X) Y.pr <<- ifelse(Y > 0.5,1,0) confusionMatrix(as.factor(Y.pr),as.factor(Y.ob)) }