'Program LAGSPACE.BAS fits neural net to time series and estimates sensitivities to each time lag 'Input data from text file '#DEBUG ERROR ON 'These are for debugging '#DEBUG DISPLAY ON #RESOURCE ICON, LagSpaceIcon, "lagspace.ico" DEFEXT a-z 'Do all calculations in extended (80-bit) precision GLOBAL c() AS EXT FUNCTION PBMAIN DIM c(6) REGISTER j&,i&,arg##,x## 'These variables are held in CPU registers for optimum speed FONT NEW "symbol", 9 TO symb& 'Need this for Greek characters 10 'restart ver$="1.3.5"'Program version number eta=.999 'Learning rate cmax&=1e7 'Number of iterations re&=8 'Range of error (decades) bt&=1 '0 for bias term; otherwise 1 ct&=0 '0 for constant term; otherwise 1 dwm=2 'Maximum perturbation ntest&=18 'Withhold this many points from training for testing nsave&=18 'Save this many future predicted points nudge=0.5## 'Amount to nudge the parameters back toward zero pruning&=0 'Pruning level (0 = no pruning) sn&=0 '0 for no sound; otherwise 1 fil$=COMMAND$ COLOR 15,1 CLS PRINT ,," Program LagSpace" PRINT ,,SPACE$(8-LEN(ver$)/2)+"Version "+ver$ PRINT ,,"(c)2010-"+RIGHT$(DATE$,4)+" J. C. Sprott" PRINT 'set parameters GOSUB 20 GOSUB 30 GOSUB 40 GOSUB 50 GOSUB 60 CLS GOSUB 70 90 'continue ddw=dwm: verybest=0##: ypverybest=0## 95 'continue SELECT CASE AS CONST$ f$ CASE "P": nc&=3 CASE "Q": nc&=6 CASE "R": nc&=6 CASE "S": nc&=3 CASE ELSE: nc&=0 END SELECT cmaxo&=1e3 ebest=50*ebest nts&=MAX(ntest&,nsave&) REDIM PRESERVE averybest(100,d&),prune&(100,d&),bverybest(100),cverybest(nc&) 'Maximum of 100 neurons REDIM a(n&,d&),b(n&),abest(n&,d&),bbest(n&),cbest(nc&) REDIM xpred(-d& TO nts&),sig(d&),sgnsig(d&),ypbest(640) REDIM xlast(d&),dx(d&),le(d&),letot(d&),levar(d&),ledev&(d&) OPEN fil$ FOR INPUT AS #1 FILESCAN #1, RECORDS TO nmax& REDIM x(nmax&),ex(nmax&),exbest(nmax&) FOR k&=1 TO nmax& INPUT#1,x(k&) NEXT k& CLOSE #1 file$=MID$(fil$,INSTR(-1,fil$,"\")+1) screen12(file$,ver$) PRINT" Trials Epsilon Error" file$=EXTRACT$(file$,".") nmax&=nmax&-ntest& 'Don't fit the last ntest& data points so they can be used to test the prediction xmax=0##: numle&=0 FOR k&=1 TO nmax&+ntest&: xmax=MAX(xmax,ABS(x(k&))): NEXT k& 'Calculate range of data GRAPHIC BOX(39,0)-(639,460) FOR i&=1 TO re& GRAPHIC LINE(40,459*i&/re&)-(50,459*i&/re&) GRAPHIC LINE(628,459*i&/re&)-(638,459*i&/re&) NEXT i& FOR i&=1 TO LOG10(cmax&) GRAPHIC LINE(39+599*i&/LOG10(cmax&),1)-(39+599*i&/LOG10(cmax&),11) GRAPHIC LINE(39+599*i&/LOG10(cmax&),449)-(39+599*i&/LOG10(cmax&),459) NEXT i& GRAPHIC SET POS(20,0) GRAPHIC PRINT"1" GRAPHIC SET POS(20,222) GRAPHIC PRINT"e" GRAPHIC SET POS(0,444) GRAPHIC PRINT USING$("##^^^^",10^-re&) GRAPHIC SET POS(40,462) GRAPHIC PRINT"1" GRAPHIC SET POS(322,462) GRAPHIC PRINT"Trials" GRAPHIC SET POS(600,462) GRAPHIC PRINT USING$("##^^^^",cmax&) RANDOMIZE TIMER tstart=TIMER IF ISFILE(file$+".txt") THEN KILL file$+".txt" q$="": xpcur&=-10 neurons&=n&: dims&=d&: countnd&=1 'Allows for introducing n and d gradually DO IF verybest=0## THEN ebest=1000 ELSE ebest=10*verybest: ddw=MIN(dwm,SQR(verybest)) bbest(0)=bverybest(0) FOR i&=1 TO n& FOR j&=0 TO d& abest(i&,j&)=averybest(i&,j&) NEXT j& bbest(i&)=bverybest(i&) NEXT i& FOR j&=0 TO nc&: cbest(j&)=cverybest(j&): NEXT j& IF cbest(0)=0## THEN cbest(0)=1## FOR c&=1 TO cmax& q$=GRAPHIC$(INKEY$) IF q$=$ESC THEN EXIT LOOP IF q$="" THEN q$=UCASE$(INKEY$): ELSE CONSOLE SET FOCUS IF q$<>"" THEN CLS SELECT CASE AS CONST$ q$ CASE "": EXIT SELECT CASE "A": ct&=(ct&+1) MOD 2: IF ct&=1 THEN GOTO 90 CASE "B": bt&=(bt&+1) MOD 2: IF bt&=1 THEN GOTO 90 CASE "C": EXIT SELECT CASE "D": PRINT: GOSUB 40: GOTO 95 CASE "E": PRINT: GOSUB 50: GOTO 95 CASE "F": PRINT: fil$="": GOSUB 20: GOTO 90 CASE "H","?": GOSUB 85: EXIT SELECT CASE "L": PRINT: GOSUB 70: GOTO 90 CASE "N": PRINT: GOSUB 30: GOTO 95 CASE "P": PRINT: GOSUB 71: EXIT SELECT CASE "R": GOTO 10 CASE "S": PRINT: GOSUB 74: EXIT SELECT CASE "T": PRINT: GOSUB 60: GOTO 90 CASE "U": PRINT: GOSUB 73: GOTO 95 CASE "W": PRINT: GOSUB 72: GOTO 95 CASE "X": PRINT: GOSUB 75: GOTO 95 CASE "Y": PRINT: GOSUB 80: GOTO 95 CASE $ESC: EXIT LOOP CASE" " CLS PRINT"LagSpace "+ver$ PRINT"Commands:"; PRINT,"A: Toggle constant term (now ";: IF ct&=0 THEN PRINT"on)" ELSE PRINT"off)" PRINT,"B: Toggle bias term (now ";: IF bt&=0 THEN PRINT"on)" ELSE PRINT"off)" PRINT,"C: Continue calculating" PRINT,"D: Change number of time lags" PRINT,"E: Change exponent of errors" PRINT,"F: Change data file" PRINT,"H: Help!" PRINT,"L: Change nonlinear function" PRINT,"N: Change number of neurons" PRINT,"P: Prune weak connections (now ";: IF pruning& THEN PRINT"on)" ELSE PRINT"off)" PRINT,"R: Restart" PRINT,"S: Toggle sound (now ";: IF sn& THEN PRINT"on)" ELSE PRINT"off)" PRINT,"T: Change number of training points" PRINT,"U: Change number of predicted points to save" PRINT,"W: Change number of data points withheld" PRINT,"X: Change range of trials" PRINT,"Y: Change error range" PRINT," End program" PRINT WHILE NOT (INSTAT OR GRAPHIC(INSTAT)): WEND END SELECT IF pruning&=0 THEN RESET prune&() q$="" e=0## prunes&=0 FOR i&=1 TO neurons&: FOR j&=ct& TO dims& IF a(i&,j&)=0## THEN INCR prunes& NEXT j&: NEXT i& pc=1##/SQR(neurons&*(dims&-ct&+1)+neurons&+nc&+1-prunes&) 'Probability of changing a given parameter at each trial IF bt&=0 THEN b(0)=bbest(0)+ddw*(gauss2-nudge*SGN(bbest(0))) ELSE b(0)=0## FOR i&=1 TO neurons& b(i&)=bbest(i&) IF RND<9*pc THEN b(i&)=b(i&)+ddw*(gauss2-nudge*SGN(bbest(i&))) FOR j&=ct& TO dims& 'Eliminates constant term if ct&=1 dj=1/2^(MIN(d&,5##)*j&/d&) 'Reduce neighborhood for large j& by a factor of 1-32 a(i&,j&)=abest(i&,j&) IF RND2) IF e0 AND improved&=0 THEN ddw=-ddw 'Try going in the opposite direction ELSE 'IF improved&>1 THEN PRINT improved& seed=TIMER+1##/SQR(e) 'Reseed the RNG if the trial failed IF improved&>0 THEN ddw=MIN(dwm,(1##+1e-3*improved&)*ABS(ddw)) improved&=0 ELSE ddw=eta*ABS(ddw) END IF END IF RANDOMIZE seed xp=39+599*LOG10(c&)/LOG10(cmax&) yp=-459*LOG10(ebest)/re& ypp=MIN(yp,459) 'Error has shrunk too small to plot IF c&=1 OR xpold<39 THEN GRAPHIC SET PIXEL(xp,ypp) xpold=xp: ypold=ypp ELSE IF ABS(xp-xpold)<1 AND ABS(ypp-ypold)<1 THEN EXIT IF rd&=5 GRAPHIC SET MIX %MIX_NOTXORSRC GRAPHIC ELLIPSE (xpcur&-rd&,ypcur&-rd&)-(xpcur&+rd&,ypcur&+rd&),%RED,%RED GRAPHIC SET MIX %MIX_COPYSRC GRAPHIC LINE(xpold,ypold)-(xp,ypp) xpcur&=xp: ypcur&=ypp GRAPHIC SET MIX %MIX_NOTXORSRC GRAPHIC ELLIPSE (xpcur&-rd&,ypcur&-rd&)-(xpcur&+rd&,ypcur&+rd&),%RED,%RED GRAPHIC SET MIX %MIX_COPYSRC xpold=xp: ypold=ypp END IF IF (c& MOD 1000) THEN ITERATE 'Testing is costly - don't do it too often reo&=MAX(6,CEIL(-LOG10(ebest))+1) reo&=MIN(reo&,20) 'More than this many digits are not significant ypverybest=MAX(ypverybest,yp) IF ypverybest AND verybest<>0## THEN GRAPHIC REDRAW: ITERATE maxsig=0 'Determine input strengths for each dimension FOR j&=0 TO nc&: c(j&)=cbest(j&): NEXT j& FOR j0&=ct& TO d& sig(j0&)=bbest(0): sgnsig(j0&)=0 FOR k&=d&+1 TO nmax& sig=0##: IF j0&=0 THEN sig=bbest(0) FOR i&=1 TO n& arg=abest(i&,0) FOR j&=1 TO d& arg=arg+abest(i&,j&)*x(k&-j&) NEXT j& IF j0&=0 THEN 'The constant term in the map (when x=0) sig=sig+bbest(i&)*phi(abest(i&,0),f$) ELSE sig=sig+abest(i&,j0&)*bbest(i&)*dphi(arg,f$) END IF NEXT i& sig(j0&)=sig(j0&)+ABS(sig) sgnsig(j0&)=sgnsig(j0&)+sig NEXT k& sig(j0&)=sig(j0&)/(nmax&-d&) maxsig=MAX(maxsig,sig(j0&)) NEXT j0& dmax&=MIN(d&,20) GRAPHIC BOX(56,410-16*dmax&)-(340,447-16*ct&),,%WHITE,%WHITE FOR j&=ct& TO dmax& 'Plot bar graph of input strengths for each dimension GRAPHIC SET POS(60,430-16*j&) GRAPHIC PRINT j&; GRAPHIC SET POS(290,430-16*j&) GRAPHIC PRINT USING$("##.#### ",SGN(sgnsig(j&))*sig(j&)); IF maxsig<=0## THEN ITERATE 'Nothing to plot (all sigmas are zero) xpp=80+200*sig(j&)/maxsig IF xpp<80## OR xpp>280## THEN ITERATE 'Graph is outside allowed range GRAPHIC BOX(80,443-16*j&)-(xpp,434-16*j&),,%BLACK,%BLACK NEXT j& GRAPHIC SET POS(60,430-16*dmax&-80) GRAPHIC PRINT " "+MCASE$(file$)+" "+f$+TRIM$(STR$(e&))+" " GRAPHIC SET POS(60,430-16*dmax&-64) GRAPHIC PRINT STR$(nmax&)+" points " GRAPHIC SET POS(60,430-16*dmax&-48) IF n&>1 THEN s$="s " ELSE s$=" " GRAPHIC PRINT STR$(n&)+" neuron"+s$ GRAPHIC SET POS(60,430-16*dmax&-32) GRAPHIC PRINT STR$(p&)+"-point fit " GRAPHIC SET POS(560,24) GRAPHIC PRINT"e ="+USING$("##.#^^^^ ",ebest) GRAPHIC REDRAW 'Calculate the largest Lyapunov exponent dr=1e-8## 'Perturbation size dr2=dr*dr FOR j&=1 TO d&: dx(j&)=dr/SQR(d&): NEXT j& ltot=0## FOR k&=d&+1 TO nmax& x=b(0) FOR i&=1 TO n& arg=a(i&,0) FOR j&=1 TO d& arg=arg+a(i&,j&)*x(k&-j&) NEXT j& x=x+b(i&)*phi(arg,f$) NEXT i& xe=b(0) FOR i&=1 TO n& arg=a(i&,0) FOR j&=1 TO d& arg=arg+a(i&,j&)*(x(k&-j&)+dx(j&)) NEXT j& xe=xe+b(i&)*phi(arg,f$) NEXT i& rs=0## FOR j&=d&-1 TO 1 STEP -1 rs=rs+dx(j&)*dx(j&) dx(j&+1)=dx(j&) NEXT j& dx(1)=xe-x rs=rs+dx(1)*dx(1) rs=SQR(rs/dr2) FOR j&=1 TO d& dx(j&)=dx(j&)/rs NEXT j& ltot=ltot+LOG(rs) NEXT k& le=ltot/(nmax&-d&) GRAPHIC SET POS(560,40) GRAPHIC SET FONT symb& GRAPHIC PRINT"l"; 'Greek lambda GRAPHIC SET FONT 1 GRAPHIC PRINT" ="+USING$("* .#### ",le) GRAPHIC REDRAW IF pruning& THEN 'Mark the weak connections for pruning FOR i&=1 TO n& FOR j&=0 TO d& IF abest(i&,j&)<>0## AND ABS(abest(i&,j&)*bbest(i&))<10^-pruning& THEN prune&(i&,j&)=-1 NEXT j& NEXT i& END IF NEXT c& FOR i&=xp TO 639: ypbest(i&)=ypverybest: NEXT i& IF countnd& MOD 2 THEN neurons&=MIN(neurons&+1,n&) 'Increase the number of neurons slowly ELSE dims&=MIN(dims&+1,d&) 'And then increase the number of dimensions END IF INCR countnd& IF yp480 THEN GRAPHIC SET PIXEL(xp,yp) NEXT t& GRAPHIC REDRAW 'Make wav file OPEN LCASE$(file$)+".wav" FOR OUTPUT AS #1 PRINT#1,"RIFF"+MKDWD$(1e5-36)+"WAVE"; PRINT#1,"fmt "+MKDWD$(16)+MKWRD$(1)+MKWRD$(1)+MKDWD$(22050)+MKDWD$(22050)+MKWRD$(1)+MKWRD$(8); PRINT#1,"data"+MKDWD$(1e5); FOR t&=1 TO 1e5 xt=255*(xt(t&)-xtmin)/(xtmax-xtmin) PRINT#1,MKBYT$(xt); NEXT t& CLOSE#1 IF sn& THEN PLAY WAVE file$+".wav" 'Play the sound of the attractor 'Calculate the LE spectrum FOR j&=0 TO nc&: c(j&)=cverybest(j&): NEXT j& CALL lespec(d&,n&,averybest(),bverybest(),nmax&,x(),f$,le(),dky) GRAPHIC SET POS(0, 481) GRAPHIC SET FONT symb& GRAPHIC PRINT"l"; 'Greek lambda GRAPHIC SET FONT 1 GRAPHIC SET WRAP GRAPHIC PRINT " = ("; INCR numle& 'Estimate error in LEs FOR k&=1 TO d& letot(k&)=letot(k&)+le(k&) leav=letot(k&)/numle& levar(k&)=levar(k&)+(le(k&)-leav)^2 ledev&(k&)=100##*SQR(levar(k&))/ABS(leav) 'This is the % error IF numle&>2 THEN ledev$=CHR$(177)+TRIM$(STR$(ledev&(k&)))+"%" ELSE ledev$="" IF k&xmax+100*(xmax-xmin) OR xt(1e5)0 THEN PRINT #1, "c = ("; FOR j&=0 TO nc&-1 PRINT #1, STR$(c(j&))+", "; NEXT j& PRINT #1, STR$(c(nc&));") " END IF PRINT #1,"Bias =";bbest(0) FOR i&=1 TO n& PRINT #1,"Neuron"+STR$(i&)+":" FOR j&=0 TO d& PRINT #1,abest(i&,j&) NEXT j& PRINT #1,bbest(i&) PRINT #1,"" NEXT i& FOR j&=ct& TO d& PRINT #1,"S("+TRIM$(STR$(j&))+") ="+STR$(SGN(sgnsig(j&))*sig(j&)) NEXT j& PRINT #1,"" CLOSE #1 GRAPHIC REDRAW GRAPHIC SAVE LCASE$(file$)+".bmp" LOOP 'Plot and save the prediction GRAPHIC BOX(0,0)-(640,480),0,%WHITE,%WHITE PRINT"The graph shows the last "+TRIM$(STR$(ntest&))+" points from the time series (red dota) and the" PRINT"out-of-sample prediction of the neural network (black line). Don't expect a" PRINT"model optimized for near-term prediction to give a good long-term prediction" PRINT"for a chaotic system." PRINT PRINT" Press to exit the program or any other key to continue training."; FOR j&=1 TO d&+1: xpred(1-j&)=x(nmax&+1-j&): NEXT j& GRAPHIC BOX(40,0)-(639,459) FOR i&=1 TO 2*CEIL(xmax)-1 GRAPHIC LINE(40,230*i&/CEIL(xmax))-(50,230*i&/CEIL(xmax)) GRAPHIC LINE(628,230*i&/CEIL(xmax))-(638,230*i&/CEIL(xmax)) NEXT i& FOR i&=0 TO nts&-1 GRAPHIC LINE(40+599*i&/nts&,1)-(40+599*i&/nts&,11) GRAPHIC LINE(40+599*i&/nts&,449)-(40+599*i&/nts&,459) NEXT i& xm$=TRIM$(STR$(CEIL(xmax))) GRAPHIC TEXT SIZE xm$ TO xmw, xmh GRAPHIC SET POS(30-xmw,0) GRAPHIC PRINT xm$ GRAPHIC SET POS(20,222) GRAPHIC PRINT"X" xm$=TRIM$(STR$(-CEIL(xmax))) GRAPHIC TEXT SIZE xm$ TO xmw, xmh GRAPHIC SET POS(30-xmw,444) GRAPHIC PRINT TRIM$(STR$(-CEIL(xmax))) GRAPHIC SET POS(40,462) GRAPHIC PRINT"0" xm$="Time" GRAPHIC TEXT SIZE xm$ TO xmw, xmh GRAPHIC SET POS(340-xmw/2,462) GRAPHIC PRINT xm$ xm$=TRIM$(STR$(nts&)) GRAPHIC TEXT SIZE xm$ TO xmw, xmh GRAPHIC SET POS(640-xmw,462) GRAPHIC PRINT xm$ FOR j&=0 TO nc&: c(j&)=cverybest(j&): NEXT j& OPEN LCASE$(file$)+".net" FOR OUTPUT AS #1 'Save predictions to file FOR k&=0 TO nts& xreal=x(nmax&+k&) xpred=bverybest(0) FOR i&=1 TO n& arg=averybest(i&,0) FOR j&=1 TO d& arg=arg+averybest(i&,j&)*xpred(k&-j&) NEXT j& xpred=xpred+bverybest(i&)*phi(arg,f$) NEXT i& xpred(k&)=xpred xp=40+k&*(599)/nts& IF k&<=ntest& THEN 'Plot the out-of-sample data yp=230-230*xreal/CEIL(xmax) IF k&>0 THEN GRAPHIC ELLIPSE(xp-3,yp-3)-(xp+3,yp+3),%RED,%RED END IF yp=230-230*xpred/CEIL(xmax) IF k&=0 THEN GRAPHIC SET PIXEL(xp,yp) ELSE IF k&<=nsave& THEN GRAPHIC LINE(xpast,ypast)-(xp,yp) PRINT#1,xpred END IF END IF xpast=xp: ypast=yp NEXT k& CLOSE #1 GRAPHIC REDRAW q$=WAITKEY$ IF q$<>$ESC THEN GOTO 95 EXIT FUNCTION 20 'set file IF fil$="" THEN INPUT"Filename: ",fil$ IF fil$="" THEN PRINT"Usage: lagspace filename": WAITKEY$: EXIT FUNCTION IF ISFILE(fil$)=0 THEN PRINT"That file does not exist": fil$="": GOTO 20 IF UCASE$(RIGHT$(fil$,4))=".TXT" THEN PRINT"Data file cannot have a .txt extension": fil$="": GOTO 20 IF UCASE$(RIGHT$(fil$,4))=".NET" THEN PRINT"Data file cannot have a .net extension": fil$="": GOTO 20 IF UCASE$(RIGHT$(fil$,4))=".BMP" THEN PRINT"Data file cannot have a .bmp extension": fil$="": GOTO 20 RETURN 30 'set number of neurons INPUT"Number of neurons (default 6): ",q$ IF q$="" THEN n&=6 ELSE n&=VAL(q$) IF n&>100 THEN n&=100 'Maximum of 100 neurons allowed RETURN 40 'set number of time lags INPUT"Number of time lags (default 5): ",q$ IF q$="" THEN d&=5 ELSE d&=VAL(q$) RETURN 50 'set exponent of errors INPUT"Exponent of errors (default 2): ",q$ IF q$="" THEN e&=2 ELSE e&=VAL(q$) RETURN 60 'set number of training points INPUT"Training on points (default 1): ",q$ IF q$="" THEN p&=1 ELSE p&=VAL(q$) RETURN 70 'set nonlinear function PRINT"Choose a nonlinear function (default A):" PRINT ,"A: hyperbolic tangent" PRINT ,"B: signum" PRINT ,"C: sigmoid" PRINT ,"D: heaviside" PRINT ,"E: sine" PRINT ,"F: cosine" PRINT ,"G: gaussian" PRINT ,"H: gaussian derivative" PRINT ,"I: absolute value" PRINT ,"J: linear" PRINT ,"K: quadratic" PRINT ,"L: cubic" PRINT ,"M: logistic" PRINT ,"N: inverse" PRINT ,"O: quartic" PRINT ,"P: polynomial (3rd order)" PRINT ,"Q: polynomial (6th order)" PRINT ,"R: rational function" PRINT ,"S: special function" PRINT ,"T: binary shift" PRINT ,"U: piecewise linear" PRINT ,"V: arctangent" PRINT ,"W: exponential" f$=WAITKEY$ IF f$="" OR f$=$CR THEN f$="A" ELSE f$=UCASE$(f$) SELECT CASE AS CONST$ f$ CASE "P": nc&=3 CASE "Q": nc&=6 CASE "R": nc&=6 CASE "S": nc&=3 CASE ELSE: nc&=0 END SELECT REDIM c(nc&) IF phi(0,f$)=9999## THEN GOTO 70 RETURN 71 'set the pruning level PRINT"Input connections will be pruned if |a(i,j)*b(i)| < 10^-p" PRINT"Set p = 0 for no pruning" PRINT"Pruning is now set at";pruning& PRINT INPUT"Pruning level (default 0): ",q$ IF q$="" THEN pruning&=0 ELSE pruning&=VAL(q$) CLS RETURN 72 'set number of data points to withhold for testing INPUT"Number of data points to withhold for testing (default 18): ",q$ IF q$="" THEN ntest&=18 ELSE ntest&=VAL(q$) CLS RETURN 73 'set number of predicted points to save INPUT"Number of predicted points to save (default 18): ",q$ IF q$="" THEN nsave&=18 ELSE nsave&=VAL(q$) CLS: q$="" RETURN 74 'Toggle sound on and off sn&=(sn&+1) MOD 2 IF sn& THEN PRINT"Sound is now on" ELSE PRINT"Sound is now off" IF sn& THEN PLAY WAVE file$+".wav" RETURN 75 'set range of trials INPUT"Range of trials in decades (default 7): ",q$ IF q$="" THEN cmax&=10^7 ELSE cmax&=10^VAL(q$) CLS RETURN 80 'set error range INPUT"Error range in decades (default 8): ",q$ IF q$="" THEN re&=8 ELSE re&=VAL(q$) CLS RETURN 85 'get help LOCATE 1,28: PRINT"LagSpace Documentation" PRINT PRINT"LagSpace is a Windows program for finding the sensitivity of points in a scalar" PRINT"time series to earlier points in some embedding dimension d. The set of time" PRINT"lags for which the sensitivity is non-zero is called the 'lag space.' Finding" PRINT"the lag space is the first step in time series analysis." PRINT PRINT"It does this by training an artificial neural network on the data to optimize" PRINT"the next few step prediction in terms of the previous time lags. The training" PRINT"uses a random search method with a slowly shrinking neighborhood in parameter" PRINT"space similar to simulated annealing. A byproduct is an estimate of the" PRINT"Lyapunov exponent spectrum and Kaplan-Yorke dimension using the local Jacobian" PRINT"of the trained network. Since the method generates a nonlinear global" PRINT"deterministic model, it works especially well with short and/or noisy data" PRINT"sets, although it may train slowly or very poorly in many cases." PRINT PRINT"You must supply data to the program in the form of a text file containing a" PRINT"list of the time-ordered numerical values in ASCII format. It is recommended" PRINT"that you use an extension .dat for this file. When you launch the program, you" PRINT"will be prompted for the filename of the data file, the number of neurons, the" PRINT"number of time lags to consider, the exponent of the error, the number of" PRINT"points ahead to predict, and the activation function to use. If in doubt, use" PRINT"the default values. Increasing any of these values will further slow the" PRINT"training." PRINT PRINT"During training, the program displays the training history (mean square error" PRINT"versus number of trials) and the sensitivity of the prediction to each of the d" PRINT"time lags along with the attractor for the best-trained artificial neural" PRINT"network (in black) superimposed on the data (red circles with the size of the" PRINT"circles indicating the prediction error for each point) projected onto two" PRINT"dimensions. Don't expect a model optimized for near-term prediction to" PRINT"replicate the attractor. The largest Lyapunov exponent (lambda) is continually" PRINT"calculated, and at the of each training cycle, the entire Lyapunov exponent" PRINT"specrum is calculated along with the Kaplan-Yorke dimension." PRINT PRINT"The training continues for as long as you are willing to wait, but at the end" PRINT"of each training cycle, there is a sound whenever a better solution has been" PRINT"found, and the graphic window is saved to a file *.bmp where * is the file name" PRINT"of the data file (without the .dat extension), overwriting any existing file" PRINT"with the same name. The program also keeps a record of the progress, including" PRINT"all the parameters for the neural network in a file *.txt. Thus you cannot use" PRINT"an extension .txt for your data file. Each time a better solution is found, a" PRINT"text file *.net is generated giving a time series of 100,000 points from" PRINT"repeated iteration of the neural network." PRINT PRINT"This program is unsupported freeware and should be used at your own risk." PRINT"Further information, including links to sample data files and relevant" PRINT"publications, is at http://sprott.physics.wisc.edu/chaos/maus/lagspace.htm." PRINT PRINT" Press any key to continue"; LOCATE 1,1: CURSOR OFF WAITKEY$ CLS RETURN END FUNCTION FUNCTION gauss2 'Returns the product of two normally (Gaussian) distributed random deviates 'with mean of zero and standard deviation of 1.0 DO V1 = 2## * RND - 1## V2 = 2## * RND - 1## arg = V1 * V1 + V2 * V2 LOOP WHILE arg >= 1## OR arg = 0## GAUSS2 = V1 * V2 * (-2## + LOG(arg) / arg) END FUNCTION FUNCTION phi (arg, f$) 'Returns the nonlinear function f$ SELECT CASE AS CONST$ f$ CASE "A" 'hyperbolic tangent 'phi=tanh(arg) IF ABS(arg)<22## THEN phi = 1## - 2## / (EXP(2## * arg) + 1##) ELSE phi=SGN(arg) END IF CASE "B" 'signum phi=SGN(arg) CASE "C" 'sigmoid IF arg<-44## THEN phi=0## ELSEIF arg>44## THEN phi = 1## ELSE phi = 1## / (1## + EXP(-arg)) END IF CASE "D" 'heaviside phi=1##+SGN(arg) CASE "E" 'sine phi=SIN(arg) CASE "F" 'cosine phi=COS(arg) CASE "G" 'gaussian phi=EXP(-arg*arg) CASE "H" 'gaussian derivative phi=-arg*EXP(-arg*arg) CASE "I" 'absolute value phi=ABS(arg) CASE "J" 'linear phi=arg CASE "K" 'quadratic phi=arg*arg CASE "L" 'cubic phi=arg*arg*arg CASE "M" 'logistic phi=arg*(1##-arg) CASE "N" 'inverse phi=1##/arg CASE "O" 'quartic phi=arg*arg*arg*arg CASE "P" 'polynomial (3rd order) phi=c(0)+arg*(c(1)+arg*(c(2)+arg*c(3))) CASE "Q" 'polynomial (6th order) phi=c(0)+arg*(c(1)+arg*(c(2)+arg*(c(3)+arg*(c(4)+arg*(c(5)+arg*c(6)))))) CASE "R" 'rational function phi=(c(0)+arg*(c(1)+arg*(c(2)+arg*c(3))))/(1##+arg*(c(4)+arg*(c(5)+arg*c(6)))) CASE "S" 'Special function IF ABS(arg)<22## THEN phi = c(0)+arg*(c(1)+arg*c(2)) + c(3)*(1## - 2## / (EXP(2## * arg) + 1##)) ELSE phi=c(0)+arg*(c(1)+arg*c(2)) + c(3)*SGN(arg) END IF CASE "T" 'binary shift phi=arg MOD 1## CASE "U" 'piecewise linear IF ABS(arg)<1## THEN phi=arg ELSE phi=SGN(arg) CASE "V" 'arctangent phi=ATN(arg) CASE "W" 'exponential phi=EXP(arg) CASE ELSE BEEP CLS phi=9999## PRINT f$+" is not an allowed function!"': WAITKEY$ END SELECT END FUNCTION FUNCTION dphi (arg, f$) 'Returns the derivative of the nonlinear function f$ SELECT CASE AS CONST$ f$ CASE "A" 'hyperbolic secant squared dphi=sech(arg)^2 CASE "B" 'constant dphi=0## CASE "C" 'sigmoid derivative IF ABS(arg)>44## THEN dphi=0## ELSE dphi = EXP(arg)/(1##+EXP(arg))^2 END IF CASE "D" 'constant dphi=0## CASE "E" 'cosine dphi=COS(arg) CASE "F" '-sine dphi=-SIN(arg) CASE "G" 'derivative of gaussian dphi=-2##*arg*EXP(-arg*arg) CASE "H" 'derivative of gaussian derivative dphi=(2##*arg-1##)*EXP(-arg*arg) CASE "I" 'signum dphi=SGN(arg) CASE "J" 'constant dphi=1## CASE "K" 'linear dphi=2##*arg CASE "L" 'quadratic dphi=3##*arg*arg CASE "M" 'derivative of logistic dphi=1##-2##*arg CASE "N" 'inverse square dphi=-1##/(arg*arg) CASE "O" 'cubic dphi=4*arg*arg*arg CASE "P" 'derivative of polynomial (3rd order) dphi=c(1)+arg*(2##*c(2)+arg*3##*c(3)) CASE "Q" 'derivative of polynomial (6th order) dphi=c(1)+arg*(2##*c(2)+arg*(3##*c(3)+arg*(4##*c(4)+arg*(5##*c(5)+arg*6##*c(6))))) CASE "R" 'derivative of rational function f=c(0)+arg*(c(1)+arg*(c(2)+arg*c(3))) df=c(1)+arg*(2##*c(2)+arg*3##*c(3)) g=1##+arg*(c(4)+arg*(c(5)+arg*c(6))) dg=c(4)+arg*(2##*c(5)+arg*3##*c(6)) dphi=(g*df-f*dg)/(g*g) CASE "S" 'derivative of special function dphi=c(1)+arg*2##*c(2)+c(3)*sech(arg)^2 CASE "T" 'binary shift dphi=1## CASE "U" 'piecewise constant IF ABS(arg)<1## THEN dphi=1## ELSE dphi=0## CASE "V" dphi=1##/(1##+arg*arg) CASE "W" 'exponential dphi=EXP(arg) CASE ELSE BEEP CLS PRINT f$+" is not an allowed function!"': WAITKEY$ END SELECT END FUNCTION FUNCTION sech (arg) 'Returns hyperbolic secant of arg IF ABS(arg)<22## THEN sech = 2## / (EXP(arg) + EXP(-arg)) ELSE arg=0## END IF END FUNCTION SUB screen12(file$,ver$) 'Set up graphic and text windows (640x480 mode) CONSOLE SET SCREEN 60,80 '300 rows (scrolls) X 80 columns CLS GRAPHIC GET LOC TO x&, y& GRAPHIC REDRAW GRAPHIC WINDOW END GRAPHIC WINDOW EXE.NAME$+" "+ver$+" - "+file$,x&,y&,640,480+640 TO hwin??? GRAPHIC ATTACH hwin???,0,REDRAW GRAPHIC COLOR %BLACK,%WHITE GRAPHIC CLEAR GRAPHIC REDRAW CONSOLE NAME EXE.NAME$+" "+ver$+" - "+file$ IF x&=0 THEN CONSOLE SET LOC x&+640,y& CONSOLE SET FOCUS END SUB SUB lespec(d&, n&, a(), b(), nmax&, xdata(), f$, le(), dky) 'Layapunov specrum calculation adapted from Wolf, et. al., Physica D 16, 285-317 (1985) nn&=d&*(d&+1) 'total number of variables (nonlinear + linear) DIM x(nn&),xnew(nn&),v(nn&),ltot(d&),znorm(d&),gsc(d&) irate&=1 'integration steps per reorthonormalization io&=nmax&-d& 'number of iterations of the map FOR i&=1 TO d& 'initial conditions for nonlinear maps v(i&)=0 'must be within the basin of attraction NEXT i& FOR i&=1 TO d& 'initial conditions for nonlinear map v(i&)=xdata(d&-i&+1) NEXT i& t=0## FOR i&=d&+1 TO nn& 'initial conditions for linearized maps v(i&)=0## 'Don't mess with these; they are problem independent! NEXT i& FOR i&=1 TO d& v((d&+1)*i&)=1## ltot(i&)=0## NEXT i& FOR m&=1 TO io& FOR j&=1 TO irate& FOR i&=1 TO nn& x(i&)=v(i&) NEXT i& FOR i&=1 TO d& 'Use actual data rather than iterated data x(i&)=xdata(d&-i&+m&) NEXT i& CALL DERIVS(x(), xnew(), d&, n&, a(), b(), f$) FOR i&=1 TO nn& v(i&)=xnew(i&) NEXT i& INCR t NEXT j& 'construct new orthonormal basis by gram-schmidt: znorm(1)=0## 'normalize first vector FOR j&=1 TO d& znorm(1)=znorm(1)+v(d&*j&+1)^2 NEXT j& znorm(1)=SQR(znorm(1)) FOR j&=1 TO d& v(d&*j&+1)=v(d&*j&+1)/znorm(1) NEXT j& 'generate new orthonormal set: FOR j&=2 TO d& 'make j-1 gsr coefficients FOR k&=1 TO j&-1 gsc(k&)=0## FOR l&=1 TO d& gsc(k&)=gsc(k&)+v(d&*l&+j&)*v(d&*l&+k&) NEXT l& NEXT k& FOR k&=1 TO d& 'construct a new vector FOR l&=1 TO j&-1 v(d&*k&+j&)=v(d&*k&+j&)-gsc(l&)*v(d&*k&+l&) NEXT l& NEXT k& znorm(j&)=0## 'calculate the vector's norm FOR k&=1 TO d& znorm(j&)=znorm(j&)+v(d&*k&+j&)^2 NEXT k& znorm(j&)=SQR(znorm(j&)) FOR k&=1 TO d& 'normalize the new vector v(d&*k&+j&)=v(d&*k&+j&)/znorm(j&) NEXT k& NEXT j& FOR k&=1 TO d& 'update running vector magnitudes IF znorm(k&)>0 THEN ltot(k&)=ltot(k&)+LOG(znorm(k&)) NEXT k& NEXT m& lsum=0##: kmax&=0 FOR k&=1 TO d& le=ltot(k&)/t le(k&)=le lsum=lsum+le IF lsum>0 THEN lsum0=lsum: kmax&=k& NEXT k& IF ltot(1)>0 AND kmax&+1<=d& THEN dky=kmax&-t*lsum0/ltot(kmax&+1) ELSE dky=0## END SUB SUB DERIVS(x(), xnew(), d&, n&, a(), b(), f$) ' Nonlinear neural net equations: DIM df(d&) xnew(1)=b(0) FOR i&=1 TO n& arg=a(i&,0) FOR j&=1 TO d& arg=arg+a(i&,j&)*x(j&) NEXT j& xnew(1)=xnew(1)+b(i&)*phi(arg,f$) NEXT i& FOR j&=2 TO d& xnew(j&)=x(j&-1) NEXT j& ' Linearized neural net equations: FOR k&=1 TO d& df(k&)=0## FOR i&=1 TO n& arg=a(i&,0) FOR j&=1 TO d& arg=arg+a(i&,j&)*x(j&) NEXT j& df(k&)=df(k&)+b(i&)*a(i&,k&)*dphi(arg,f$) NEXT i& NEXT k& FOR k&=d&+1 TO 2*d& xnew(k&)=0## FOR j&=1 TO d& xnew(k&)=xnew(k&)+df(j&)*x(k&+d&*(j&-1)) NEXT j& NEXT k& FOR k&=2*d&+1 TO d&*(d&+1) xnew(k&)=x(k&-d&) NEXT k& END SUB