OP
Member
Joined: Sep 2017
Posts: 164
|
Proportional Rule-Switching Agents (PRSA)Imagine a living manuscript written by a hundred hands at once. Each hand writes a short line, then pauses, then writes again—never alone, always listening to the murmur of the others. The manuscript isn’t fixed; it’s an evolving chorus of lines that lean toward one another, soften their edges, and find a shared cadence. At the center is a quiet pulse. It doesn’t command; it suggests. Think of it as a tide moving through a branching shoreline. The shoreline is a tree of small decisions—depths and rates, paths and forks—whose shape guides how the tide spreads. Higher branches respond lightly, closer roots sway more; together they create a rhythmic backdrop the whole chorus can feel. Each line in the chorus follows a simple grammar: a memory of where it just was, a curiosity for two neighboring lines, a sensitivity to the tide, and an ear for the room’s overall hum. The neighbors are not chosen by proximity on a page, but by a subtle kinship: branches of similar depth, currents of similar speed. That kinship becomes a weight—stronger for close cousins on the tree, lighter for distant relatives. In this way, the manuscript prefers coherence without requiring uniformity. But there is also a conductor, and it is not a person. It’s a small, rule-making mechanism that learns how the chorus tends to sing. It listens to compact snapshots of the room—the average tone, the collective energy, the pulse—and proposes how each line should bend its next note. These proposals are not arbitrary. They are piecewise: “in rooms like this, with tones like these, bend this way.” Over time, the conductor not only adjusts the lines; it also redesigns the seating chart—who listens to whom—and even assigns proportions, a fair share of influence, so that the ensemble does not tilt toward a single loud voice. There is a discipline to this play. Every adjustment is bounded; every tendency is balanced by a counter-tendency. Momentum smooths sudden jolts. Proportions are normalized so that attention remains a scarce resource, not a runaway gift. The results are logged, line by line, in separate books—one book per voice—yet each book quotes the others. Open any page and you’ll find a self-contained verse that still points outward, referencing the tide it felt, the neighbors it heard, the seat it was given, and the short rule that shaped its choice. Sometimes, at appointed moments, the seating, the rules, and the proportions are reconsidered. The chorus does not dissolve; it molts. Kinships are re-weighed; alliances shift; the grammar is rewritten in the margins. This is not chaos—more like seasons. The same tree; new leaves. The same tide; a different shore. What emerges is not a single melody, but a texture: local phrases that brighten or darken together, clusters that coordinate without collapsing, solos that rise only when the room invites them. The manuscript remains legible because it keeps explaining itself—every verse carries its own recipe—yet it stays surprising because the recipes are learned, not imposed. In the end, the system is a study in measured togetherness. It suggests how separate lines can become mutually informative without losing their character; how guidance can be learned rather than declared; how proportion can prevent dominance; how memory can soften change. It does not promise an endpoint. It promises a way of moving—iterative, attentive, shaped by a shared structure yet open to revision—so that the whole becomes more than a sum, and the path, though unknowable in advance, feels quietly inevitable as it unfolds.
// ================= PARAMETERS =================
#define MAX_BRANCHES 3
#define MAX_DEPTH 4
#define NWIN 256
#define NET_EQNS 100
#define DEGREE 4
#define KPROJ 16
#define REWIRE_EVERY 127
#define LOG_EVERY 1
// DTREE-driven rewiring candidates per neighbor slot
#define CAND_NEIGH 8
// Feature sizes for DTREE calls
#define ADV_EQ_NF 10 // per-equation features
#define ADV_PAIR_NF 12 // pair features
// ================ HARMONIC D-TREE (structural context) ================
typedef struct Node {
var v; // state
var r; // intrinsic rate
void* c; // array of child Node* (cast on access)
int n; // number of children
int d; // depth
} Node;
Node* Root;
// D-tree index
Node** G_TreeIdx; // [cap]
int G_TreeN; // count
int G_TreeCap; // capacity
var G_DTreeExp; // exponent for evaluateNode() attenuation
// --------- helpers ----------
// Zorro: random(Max) ? uniform [0..Max), abs() for absolute value, clamp() builtin.
// uniform integer in [lo..hi]
int randint(int lo, int hi)
{
return lo + (int)random(hi - lo + 1);
}
// uniform var in [a..b)
var randu(var a, var b)
{
return a + random(b - a);
}
// return ±1 with 50/50 probability (guaranteed nonzero)
var randsign()
{
return ifelse(random(1) < 0.5, -1.0, 1.0);
}
// map u?[-1,1] to [lo,hi]
var mapUnit(var u, var lo, var hi)
{
if(u < -1) u = -1;
if(u > 1) u = 1;
var t = 0.5*(u + 1.0);
return lo + t*(hi - lo);
}
void pushTreeNode(Node* u){ if(G_TreeN < G_TreeCap) G_TreeIdx[G_TreeN++] = u; }
void indexTreeDFS(Node* u){ if(!u) return; pushTreeNode(u); int i; for(i=0;i<u->n;i++) indexTreeDFS(((Node**)u->c)[i]); }
Node* createNode(int depth)
{
Node* u = (Node*)malloc(sizeof(Node));
u->v = 2*random(1) - 1; // [-1..1)
u->r = 0.01 + 0.02*depth + random(1)*0.005; // small positive
u->d = depth;
if(depth > 0){
u->n = randint(1, MAX_BRANCHES);
u->c = malloc(u->n * sizeof(void*));
int i; for(i=0;i<u->n;i++) ((Node**)u->c)[i] = createNode(depth - 1);
} else { u->n = 0; u->c = 0; }
return u;
}
var evaluateNode(Node* u)
{
if(!u) return 0;
var sum=0; int i; for(i=0;i<u->n;i++) sum += evaluateNode(((Node**)u->c)[i]);
var phase = sin(u->r * Bar + sum);
var weight = 1.0 / pow(u->d + 1, G_DTreeExp);
u->v = (1 - weight)*u->v + weight*phase;
return u->v;
}
int countNodes(Node* u){ if(!u) return 0; int c=1,i; for(i=0;i<u->n;i++) c += countNodes(((Node**)u->c)[i]); return c; }
void freeTree(Node* u){ if(!u) return; int i; for(i=0;i<u->n;i++) freeTree(((Node**)u->c)[i]); if(u->c) free(u->c); free(u); }
// =========== NETWORK STATE & COEFFICIENTS ===========
int G_N = NET_EQNS;
int G_D = DEGREE;
int G_K = KPROJ;
// states
var* G_State; // [G_N]
var* G_Prev; // [G_N]
var* G_Vel; // [G_N]
// sparse adjacency
int* G_Adj; // [G_N*G_D]
// random projection + features
var* G_RP; // [G_K*G_N]
var* G_Z; // [G_K]
// weights (will be DTREE-synthesized each epoch)
int* G_Mode; // 0..3 selects nonlinearity combo
var* G_WSelf; // self
var* G_WN1; // neighbor 1
var* G_WN2; // neighbor 2
var* G_WGlob1; // global term 1
var* G_WGlob2; // global term 2
var* G_WMom; // momentum
var* G_WTree; // DTree-coupling weight
var* G_WAdv; // built-in DTREE advice weight
// argument coefficients for the two nonlinearities
var* A1x; var* A1lam; var* A1mean; var* A1E; var* A1P; var* A1i; var* A1c;
var* A2x; var* A2lam; var* A2mean; var* A2E; var* A2P; var* A2i; var* A2c;
// global-term coeffs
var* G1mean; var* G1E;
var* G2P; var* G2lam;
// DTree (structural) coupling diagnostics & parameters
var* G_TreeTerm; // DT(i) numeric
int* G_TopEq; // strongest partner index
var* G_TopW; // strongest partner normalized weight
int* G_EqTreeId; // eq -> tree node id
var* TAlpha; // per-eq depth penalty
var* TBeta; // per-eq rate penalty
// predictability and DTREE advice score
var* G_Pred; // [0..1]
var* G_AdvScore; // [-1..1]
// DTREE-created proportions (sum to 1 across equations)
var* G_PropRaw;
var* G_Prop;
// symbolic equation string per equation
string* G_Sym;
// epoch/context & feedback
int G_Epoch = 0;
int G_CtxID = 0;
var G_FB_A = 0.7;
var G_FB_B = 0.3;
// ---------- predictability from D-tree (0..1) ----------
var nodePredictability(Node* t)
{
if(!t) return 0.5;
var disp=0; int n=t->n, i;
for(i=0;i<n;i++){ Node* c=((Node**)t->c)[i]; disp += abs(c->v - t->v); } // abs(var)
if(n>0) disp /= n;
var depthFac = 1.0/(1+t->d);
var rateBase = 0.01 + 0.02*t->d;
var rateFac = exp(-25.0*abs(t->r - rateBase));
var p = 0.5*(depthFac + rateFac);
p = 0.5*p + 0.5*(1.0/(1.0 + disp));
return clamp(p,0,1); // built-in clamp
}
// filenames
void buildEqFileName(int idx, char* outName /*>=64*/)
{
strcpy(outName, "Log\\Alpha01_eq_");
string idxs = strf("%03i", idx);
strcat(outName, idxs);
strcat(outName, ".csv");
}
// --------- allocation ----------
void allocateNet()
{
int N=G_N, D=G_D, K=G_K;
G_State=(var*)malloc(N*sizeof(var)); G_Prev=(var*)malloc(N*sizeof(var)); G_Vel=(var*)malloc(N*sizeof(var));
G_Adj=(int*)malloc(N*D*sizeof(int));
G_RP=(var*)malloc(K*N*sizeof(var)); G_Z=(var*)malloc(K*sizeof(var));
G_Mode=(int*)malloc(N*sizeof(int));
G_WSelf=(var*)malloc(N*sizeof(var)); G_WN1=(var*)malloc(N*sizeof(var)); G_WN2=(var*)malloc(N*sizeof(var));
G_WGlob1=(var*)malloc(N*sizeof(var)); G_WGlob2=(var*)malloc(N*sizeof(var));
G_WMom=(var*)malloc(N*sizeof(var)); G_WTree=(var*)malloc(N*sizeof(var)); G_WAdv=(var*)malloc(N*sizeof(var));
A1x=(var*)malloc(N*sizeof(var)); A1lam=(var*)malloc(N*sizeof(var)); A1mean=(var*)malloc(N*sizeof(var));
A1E=(var*)malloc(N*sizeof(var)); A1P=(var*)malloc(N*sizeof(var)); A1i=(var*)malloc(N*sizeof(var)); A1c=(var*)malloc(N*sizeof(var));
A2x=(var*)malloc(N*sizeof(var)); A2lam=(var*)malloc(N*sizeof(var)); A2mean=(var*)malloc(N*sizeof(var));
A2E=(var*)malloc(N*sizeof(var)); A2P=(var*)malloc(N*sizeof(var)); A2i=(var*)malloc(N*sizeof(var)); A2c=(var*)malloc(N*sizeof(var));
G1mean=(var*)malloc(N*sizeof(var)); G1E=(var*)malloc(N*sizeof(var));
G2P=(var*)malloc(N*sizeof(var)); G2lam=(var*)malloc(N*sizeof(var));
G_TreeTerm=(var*)malloc(N*sizeof(var)); G_TopEq=(int*)malloc(N*sizeof(int)); G_TopW=(var*)malloc(N*sizeof(var));
TAlpha=(var*)malloc(N*sizeof(var)); TBeta=(var*)malloc(N*sizeof(var));
G_Pred=(var*)malloc(N*sizeof(var)); G_AdvScore=(var*)malloc(N*sizeof(var));
G_PropRaw=(var*)malloc(N*sizeof(var)); G_Prop=(var*)malloc(N*sizeof(var));
G_Sym=(string*)malloc(N*sizeof(string));
int i;
for(i=0;i<N;i++){
G_State[i]=2*random(1)-1; G_Prev[i]=G_State[i]; G_Vel[i]=0;
// initialize; will be overwritten by DTREE synthesis
G_Mode[i]=0;
G_WSelf[i]=0.5; G_WN1[i]=0.2; G_WN2[i]=0.2; G_WGlob1[i]=0.1; G_WGlob2[i]=0.1; G_WMom[i]=0.05; G_WTree[i]=0.15; G_WAdv[i]=0.15;
A1x[i]=1; A1lam[i]=0.1; A1mean[i]=0; A1E[i]=0; A1P[i]=0; A1i[i]=0; A1c[i]=0;
A2x[i]=1; A2lam[i]=0.1; A2mean[i]=0; A2E[i]=0; A2P[i]=0; A2i[i]=0; A2c[i]=0;
G1mean[i]=1.0; G1E[i]=0.001;
G2P[i]=0.6; G2lam[i]=0.3;
TAlpha[i]=0.8; TBeta[i]=25.0;
G_TreeTerm[i]=0; G_TopEq[i]=-1; G_TopW[i]=0;
G_Pred[i]=0.5; G_AdvScore[i]=0;
G_PropRaw[i]=1; G_Prop[i]=1.0/G_N;
G_Sym[i]=(char*)malloc(1024); strcpy(G_Sym[i],"");
}
// D-tree index & mapping buffers
G_TreeCap=512; G_TreeIdx=(Node**)malloc(G_TreeCap*sizeof(Node*)); G_TreeN=0;
G_EqTreeId=(int*)malloc(N*sizeof(int));
}
void freeNet()
{
int i;
if(G_State)free(G_State); if(G_Prev)free(G_Prev); if(G_Vel)free(G_Vel);
if(G_Adj)free(G_Adj); if(G_RP)free(G_RP); if(G_Z)free(G_Z);
if(G_Mode)free(G_Mode); if(G_WSelf)free(G_WSelf); if(G_WN1)free(G_WN1); if(G_WN2)free(G_WN2);
if(G_WGlob1)free(G_WGlob1); if(G_WGlob2)free(G_WGlob2); if(G_WMom)free(G_WMom);
if(G_WTree)free(G_WTree); if(G_WAdv)free(G_WAdv);
if(A1x)free(A1x); if(A1lam)free(A1lam); if(A1mean)free(A1mean); if(A1E)free(A1E); if(A1P)free(A1P); if(A1i)free(A1i); if(A1c)free(A1c);
if(A2x)free(A2x); if(A2lam)free(A2lam); if(A2mean)free(A2mean); if(A2E)free(A2E); if(A2P)free(A2P); if(A2i)free(A2i); if(A2c)free(A2c);
if(G1mean)free(G1mean); if(G1E)free(G1E); if(G2P)free(G2P); if(G2lam)free(G2lam);
if(G_TreeTerm)free(G_TreeTerm); if(G_TopEq)free(G_TopEq); if(G_TopW)free(G_TopW);
if(TAlpha)free(TAlpha); if(TBeta)free(TBeta);
if(G_Pred)free(G_Pred); if(G_AdvScore)free(G_AdvScore);
if(G_PropRaw)free(G_PropRaw); if(G_Prop)free(G_Prop);
if(G_Sym){ for(i=0;i<G_N;i++) if(G_Sym[i]) free(G_Sym[i]); free(G_Sym); }
if(G_TreeIdx)free(G_TreeIdx); if(G_EqTreeId)free(G_EqTreeId);
}
// --------- random projection ----------
void randomizeRP(){
int K=G_K,N=G_N,k,j;
for(k=0;k<K;k++)
for(j=0;j<N;j++)
G_RP[k*N+j] = ifelse(random(1) < 0.5, -1.0, 1.0); // unbiased ±1
}
void computeProjection(){ int K=G_K,N=G_N,k,j; for(k=0;k<K;k++){ var acc=0; for(j=0;j<N;j++) acc+=G_RP[k*N+j]*(G_State[j]*G_State[j]); G_Z[k]=acc; }}
// --------- build features for DTREE ----------
void buildEqFeatures(int i, var lambda, var mean, var energy, var power, var* S /*ADV_EQ_NF*/)
{
Node* t=G_TreeIdx[G_EqTreeId[i]];
S[0]=G_State[i];
S[1]=mean;
S[2]=power;
S[3]=energy;
S[4]=lambda;
S[5]=G_Pred[i];
S[6]=t->d;
S[7]=t->r;
S[8]=G_TreeTerm[i];
S[9]=G_Mode[i];
}
void buildPairFeatures(int i,int j, var lambda, var mean, var energy, var power, var* P /*ADV_PAIR_NF*/)
{
Node* ti=G_TreeIdx[G_EqTreeId[i]];
Node* tj=G_TreeIdx[G_EqTreeId[j]];
P[0]=G_State[i]; P[1]=G_State[j];
P[2]=ti->d; P[3]=tj->d;
P[4]=ti->r; P[5]=tj->r;
P[6]=abs(P[2]-P[3]); P[7]=abs(P[4]-P[5]); // abs(var)
P[8]=G_Pred[i]*G_Pred[j];
P[9]=lambda; P[10]=mean; P[11]=power;
}
// --------- DTREE advice wrappers ----------
var adviseEq(int i, var lambda, var mean, var energy, var power)
{
var S[ADV_EQ_NF]; buildEqFeatures(i,lambda,mean,energy,power,S);
var a = adviseLong(DTREE, 0, S, ADV_EQ_NF); // ~[-100,100]
return a/100.;
}
var advisePair(int i,int j, var lambda, var mean, var energy, var power)
{
var P[ADV_PAIR_NF]; buildPairFeatures(i,j,lambda,mean,energy,power,P);
var a = adviseLong(DTREE, 0, P, ADV_PAIR_NF);
return a/100.;
}
// --------- DTREE-driven adjacency selection ----------
void rewireAdjacency_DTREE(var lambda, var mean, var energy, var power)
{
int N=G_N, D=G_D, i, d, c, best, cand;
for(i=0;i<N;i++){
for(d=0; d<D; d++){
var bestScore = -2; best = -1;
for(c=0;c<CAND_NEIGH;c++){
cand = randint(0,N-1);
if(cand==i) continue;
// avoid duplicates already chosen for this row
int clash=0, k; for(k=0;k<d;k++) if(G_Adj[i*D+k]==cand){clash=1; break;}
if(clash) continue;
var s = advisePair(i,cand,lambda,mean,energy,power); // [-1,1]
if(s > bestScore){ bestScore=s; best=cand; }
}
if(best<0){ // fallback
do{ best = randint(0,N-1);} while(best==i);
}
G_Adj[i*D + d] = best;
}
}
}
// --------- DTREE-created coefficients, modes & proportions ----------
void synthesizeEquationFromDTREE(int i, var lambda, var mean, var energy, var power)
{
// multiple advice calls; each mapped to a coefficient range
var a_mode = adviseEq(i,lambda,mean,energy,power);
G_Mode[i] = (int)(abs(a_mode*1000)) & 3;
var a_wself = adviseEq(i,lambda,mean,energy,power);
var a_wn1 = adviseEq(i,lambda,mean,energy,power);
var a_wn2 = adviseEq(i,lambda,mean,energy,power);
var a_g1 = adviseEq(i,lambda,mean,energy,power);
var a_g2 = adviseEq(i,lambda,mean,energy,power);
var a_mom = adviseEq(i,lambda,mean,energy,power);
var a_tree = adviseEq(i,lambda,mean,energy,power);
var a_adv = adviseEq(i,lambda,mean,energy,power);
G_WSelf[i] = mapUnit(a_wself, 0.15, 0.85);
G_WN1[i] = mapUnit(a_wn1, 0.05, 0.35);
G_WN2[i] = mapUnit(a_wn2, 0.05, 0.35);
G_WGlob1[i] = mapUnit(a_g1, 0.05, 0.30);
G_WGlob2[i] = mapUnit(a_g2, 0.05, 0.30);
G_WMom[i] = mapUnit(a_mom, 0.02, 0.15);
G_WTree[i] = mapUnit(a_tree, 0.05, 0.35);
G_WAdv[i] = mapUnit(a_adv, 0.05, 0.35);
// argument coefficients (range chosen to be stable)
var a1 = adviseEq(i,lambda,mean,energy,power);
var a2 = adviseEq(i,lambda,mean,energy,power);
var a3 = adviseEq(i,lambda,mean,energy,power);
var a4 = adviseEq(i,lambda,mean,energy,power);
var a5 = adviseEq(i,lambda,mean,energy,power);
var a6 = adviseEq(i,lambda,mean,energy,power);
var a7 = adviseEq(i,lambda,mean,energy,power);
A1x[i] = randsign()*mapUnit(a1, 0.6, 1.2);
A1lam[i] = randsign()*mapUnit(a2, 0.05,0.35);
A1mean[i]= mapUnit(a3,-0.30,0.30);
A1E[i] = mapUnit(a4,-0.0015,0.0015);
A1P[i] = mapUnit(a5,-0.30,0.30);
A1i[i] = mapUnit(a6,-0.02,0.02);
A1c[i] = mapUnit(a7,-0.20,0.20);
// second nonlinearity args
var b1 = adviseEq(i,lambda,mean,energy,power);
var b2 = adviseEq(i,lambda,mean,energy,power);
var b3 = adviseEq(i,lambda,mean,energy,power);
var b4 = adviseEq(i,lambda,mean,energy,power);
var b5 = adviseEq(i,lambda,mean,energy,power);
var b6 = adviseEq(i,lambda,mean,energy,power);
var b7 = adviseEq(i,lambda,mean,energy,power);
A2x[i] = randsign()*mapUnit(b1, 0.6, 1.2);
A2lam[i] = randsign()*mapUnit(b2, 0.05,0.35);
A2mean[i]= mapUnit(b3,-0.30,0.30);
A2E[i] = mapUnit(b4,-0.0015,0.0015);
A2P[i] = mapUnit(b5,-0.30,0.30);
A2i[i] = mapUnit(b6,-0.02,0.02);
A2c[i] = mapUnit(b7,-0.20,0.20);
// global-term coeffs
var c1 = adviseEq(i,lambda,mean,energy,power);
var c2 = adviseEq(i,lambda,mean,energy,power);
var d1 = adviseEq(i,lambda,mean,energy,power);
var d2 = adviseEq(i,lambda,mean,energy,power);
G1mean[i] = mapUnit(c1, 0.4, 1.6);
G1E[i] = mapUnit(c2,-0.004,0.004);
G2P[i] = mapUnit(d1, 0.1, 1.2);
G2lam[i] = mapUnit(d2, 0.05,0.7);
// per-equation alpha/beta penalties (for structural DTree kernel)
var e1 = adviseEq(i,lambda,mean,energy,power);
var e2 = adviseEq(i,lambda,mean,energy,power);
TAlpha[i] = mapUnit(e1, 0.3, 1.5);
TBeta[i] = mapUnit(e2, 6.0, 50.0);
// DTREE-created raw proportion; normalized later
var p = adviseEq(i,lambda,mean,energy,power); // [-1,1]
G_PropRaw[i] = 0.01 + 0.99 * (0.5*(p+1.0)); // in (0.01..1.0]
}
// normalize proportions so sum_i Prop[i] = 1
void normalizeProportions()
{
int N=G_N,i; var s=0; for(i=0;i<N;i++) s += G_PropRaw[i];
if(s<=0) { for(i=0;i<N;i++) G_Prop[i] = 1.0/N; return; }
for(i=0;i<N;i++) G_Prop[i] = G_PropRaw[i]/s;
}
// --------- DTree proportional coupling: DT(i) with Proportion & Predictability ----------
var dtreeTerm(int i, int* outTopEq, var* outTopW)
{
int N=G_N,j;
int tid_i=G_EqTreeId[i]; Node* ti=G_TreeIdx[tid_i]; int di=ti->d; var ri=ti->r;
var alpha=TAlpha[i], beta=TBeta[i];
var sumw=0, acc=0, bestW=-1; int bestJ=-1;
for(j=0;j<N;j++){
if(j==i) continue;
int tid_j=G_EqTreeId[j]; Node* tj=G_TreeIdx[tid_j]; int dj=tj->d; var rj=tj->r;
var w = exp(-alpha*abs(di-dj)) * exp(-beta*abs(ri-rj));
var predBoost = 0.5 + 0.5*(G_Pred[i]*G_Pred[j]);
var propBoost = 0.5 + 0.5*( (G_Prop[i] + G_Prop[j]) ); // favors high-proportion participants
w *= predBoost * propBoost;
// Optional: DTREE pair advice boost
var pairAdv = advisePair(i,j,0,0,0,0); // safe call; if untrained ? ~0
w *= (0.75 + 0.25*(0.5*(pairAdv+1.0))); // 0.75..1.0 range
sumw += w; acc += w*G_State[j];
if(w>bestW){bestW=w; bestJ=j;}
}
if(outTopEq) *outTopEq = bestJ;
if(outTopW) *outTopW = ifelse(sumw>0, bestW/sumw, 0);
if(sumw>0) return acc/sumw; return 0;
}
// --------- symbolic expression builder (now includes Prop[i]) ----------
void buildSymbolicExpr(int i, int n1, int n2)
{
string s = G_Sym[i]; strcpy(s,"");
string a1 = strf("(%.3f*x[%i] + %.3f*lam + %.3f*mean + %.5f*E + %.3f*P + %.3f*i + %.3f)",
A1x[i], n1, A1lam[i], A1mean[i], A1E[i], A1P[i], A1i[i], A1c[i]);
string a2 = strf("(%.3f*x[%i] + %.3f*lam + %.3f*mean + %.5f*E + %.3f*P + %.3f*i + %.3f)",
A2x[i], n2, A2lam[i], A2mean[i], A2E[i], A2P[i], A2i[i], A2c[i]);
strcat(s, "x[i]_next = ");
strcat(s, strf("%.3f*x[i] + ", G_WSelf[i]));
if(G_Mode[i]==0){ strcat(s, strf("%.3f*sin%s + ", G_WN1[i], a1)); strcat(s, strf("%.3f*cos%s + ", G_WN2[i], a2)); }
else if(G_Mode[i]==1){ strcat(s, strf("%.3f*tanh%s + ", G_WN1[i], a1)); strcat(s, strf("%.3f*sin%s + ", G_WN2[i], a2)); }
else if(G_Mode[i]==2){ strcat(s, strf("%.3f*cos%s + ", G_WN1[i], a1)); strcat(s, strf("%.3f*tanh%s + ", G_WN2[i], a2)); }
else { strcat(s, strf("%.3f*sin%s + ", G_WN1[i], a1)); strcat(s, strf("%.3f*cos%s + ", G_WN2[i], a2)); }
strcat(s, strf("%.3f*tanh(%.3f*mean + %.5f*E) + ", G_WGlob1[i], G1mean[i], G1E[i]));
strcat(s, strf("%.3f*sin(%.3f*P + %.3f*lam) + ", G_WGlob2[i], G2P[i], G2lam[i]));
strcat(s, strf("%.3f*(x[i]-x_prev[i]) + ", G_WMom[i]));
strcat(s, strf("Prop[i]=%.4f; ", G_Prop[i]));
strcat(s, strf("%.3f*DT(i) + ", G_WTree[i]));
strcat(s, strf("%.3f*DTREE(i)", G_WAdv[i] ));
}
// --------- one-time rewire init (build mapping) ----------
void rewireInit()
{
randomizeRP(); computeProjection();
// Build D-tree index and eq->tree mapping
G_TreeN=0; indexTreeDFS(Root);
int i; for(i=0;i<G_N;i++) G_EqTreeId[i] = i % G_TreeN;
}
// --------- full "rewire epoch": adjacency by DTREE + coefficients by DTREE + proportions ----------
void rewireEpoch(var lambda, var mean, var energy, var power)
{
// 1) refresh predictability before synthesis
int i;
for(i=0;i<G_N;i++){ Node* t=G_TreeIdx[G_EqTreeId[i]]; G_Pred[i]=nodePredictability(t); }
// 2) topology chosen by DTREE
rewireAdjacency_DTREE(lambda,mean,energy,power);
// 3) coefficients/modes/penalties/proportions created by DTREE
for(i=0;i<G_N;i++) synthesizeEquationFromDTREE(i,lambda,mean,energy,power);
// 4) normalize proportions across equations
normalizeProportions();
// 5) update context id (from adjacency)
int D=G_D; int h=0; for(i=0;i<G_N*D;i++) h = (h*1315423911) ^ G_Adj[i];
G_CtxID = (h ^ (G_Epoch<<8)) & 0x7FFFFFFF;
// 6) rebuild symbolic strings with current neighbors
for(i=0;i<G_N;i++){ int n1=G_Adj[i*G_D+0], n2=G_Adj[i*G_D+1]; buildSymbolicExpr(i,n1,n2); }
}
// --------- update step (per bar) ----------
var projectNet()
{
int N=G_N,i; var sum=0,sumsq=0,cross=0;
for(i=0;i<N;i++){ sum+=G_State[i]; sumsq+=G_State[i]*G_State[i]; if(i+1<N) cross+=G_State[i]*G_State[i+1]; }
var mean=sum/N, corr=cross/(N-1);
return 0.6*tanh(mean + 0.001*sumsq) + 0.4*sin(corr);
}
void updateNet(var driver, var* outMean, var* outEnergy, var* outPower, int writeMeta)
{
int N=G_N, D=G_D, i;
// aggregates for this bar (before update)
var sum=0,sumsq=0; for(i=0;i<N;i++){ sum+=G_State[i]; sumsq+=G_State[i]*G_State[i]; }
var mean=sum/N, energy=sumsq, power=sumsq/N;
// refresh predictability & (optional) cached DT advice per equation
for(i=0;i<N;i++){ Node* t=G_TreeIdx[G_EqTreeId[i]]; G_Pred[i]=nodePredictability(t); }
// update each equation
for(i=0;i<N;i++){
int n1=G_Adj[i*D+0], n2=G_Adj[i*D+1];
var xi=G_State[i], xn1=G_State[n1], xn2=G_State[n2], mom=xi-G_Prev[i];
// structural consensus first (uses proportions & predictability internally)
int topEq=-1; var topW=0;
var dt = dtreeTerm(i,&topEq,&topW);
G_TreeTerm[i]=dt; G_TopEq[i]=topEq; G_TopW[i]=topW;
// built-in DTREE advice from current features
var adv = adviseEq(i, driver, mean, energy, power);
G_AdvScore[i] = adv;
// nonlinear arguments (from DTREE-generated coeffs)
var arg1=A1x[i]*xn1 + A1lam[i]*driver + A1mean[i]*mean + A1E[i]*energy + A1P[i]*power + A1i[i]*i + A1c[i];
var arg2=A2x[i]*xn2 + A2lam[i]*driver + A2mean[i]*mean + A2E[i]*energy + A2P[i]*power + A2i[i]*i + A2c[i];
var nl1,nl2;
if(G_Mode[i]==0){ nl1=sin(arg1); nl2=cos(arg2); }
else if(G_Mode[i]==1){ nl1=tanh(arg1); nl2=sin(arg2); }
else if(G_Mode[i]==2){ nl1=cos(arg1); nl2=tanh(arg2); }
else { nl1=sin(arg1); nl2=cos(arg2); }
var glob1=tanh(G1mean[i]*mean + G1E[i]*energy);
var glob2=sin (G2P[i]*power + G2lam[i]*driver);
var xNew =
G_WSelf[i]*xi +
G_WN1[i]*nl1 +
G_WN2[i]*nl2 +
G_WGlob1[i]*glob1 +
G_WGlob2[i]*glob2 +
G_WMom[i]*mom +
G_WTree[i]*dt +
G_WAdv[i] *adv;
G_Prev[i]=xi; G_Vel[i]=xNew-xi; G_State[i]=xNew;
// META on rewire bars
if(writeMeta){
char fname[64]; buildEqFileName(i,fname);
int tid=G_EqTreeId[i]; Node* t=G_TreeIdx[tid];
int nn1=G_Adj[i*D+0], nn2=G_Adj[i*D+1];
file_append(fname,
strf("META,%i,%i,%i,%i,%i,%i,%i,%i,%.6f,Pred=%.4f,Adv=%.4f,Prop=%.6f,Mode=%i,WAdv=%.3f,WTree=%.3f,\"%s\"\n",
G_Epoch, G_CtxID, NET_EQNS, i, nn1, nn2, tid, t->d, t->r,
G_Pred[i], G_AdvScore[i], G_Prop[i], G_Mode[i], G_WAdv[i], G_WTree[i], G_Sym[i]));
}
}
if(outMean) *outMean=mean; if(outEnergy) *outEnergy=energy; if(outPower) *outPower=power;
}
// ----------------- MAIN -----------------
function run()
{
static int initialized=0;
static var lambda;
if(is(INITRUN) && !initialized){
if(LookBack < NWIN) LookBack = NWIN;
Root=createNode(MAX_DEPTH);
allocateNet();
G_DTreeExp = randu(1.10,1.60);
G_FB_A = randu(0.60,0.85);
G_FB_B = 1.0 - G_FB_A;
randomizeRP(); computeProjection();
// Build tree index + mapping once
rewireInit();
// First epoch synthesis (uses current states as context)
G_Epoch = 0;
rewireEpoch(0,0,0,0);
// Prepare files: header per equation
char fname[64]; int i;
for(i=0;i<NET_EQNS;i++){
buildEqFileName(i,fname);
file_append(fname,
"Bar,lambda,gamma,i,State,n1,n2,mean,energy,power,Vel,Mode,WAdv,WSelf,WN1,WN2,WGlob1,WGlob2,WMom,WTree,Pred,Adv,Prop,TreeTerm,TopEq,TopW,TreeId,Depth,Rate\n");
}
// Initial META dump (epoch 0)
for(i=0;i<G_N;i++){
int n1=G_Adj[i*G_D+0], n2=G_Adj[i*G_D+1]; int tid=G_EqTreeId[i]; Node* t=G_TreeIdx[tid];
char fname2[64]; buildEqFileName(i,fname2);
file_append(fname2,
strf("META,%i,%i,%i,%i,%i,%i,%i,%i,%.6f,Pred=%.4f,Adv=%.4f,Prop=%.6f,Mode=%i,WAdv=%.3f,WTree=%.3f,\"%s\"\n",
G_Epoch, G_CtxID, NET_EQNS, i, n1, n2, tid, t->d, t->r,
G_Pred[i], G_AdvScore[i], G_Prop[i], G_Mode[i], G_WAdv[i], G_WTree[i], G_Sym[i]));
}
initialized=1;
printf("\nRoot nodes: %i | Net equations: %i (deg=%i, kproj=%i)", countNodes(Root), G_N, G_D, G_K);
}
// 1) Tree ? lambda
lambda = evaluateNode(Root);
// 2) Rewire epoch?
int doRewire = ((Bar % REWIRE_EVERY) == 0);
if(doRewire){
G_Epoch++;
// Use current aggregates as context for synthesis
// quick pre-aggregates for better guidance
int i; var sum=0; for(i=0;i<G_N;i++) sum += G_State[i];
var mean = sum/G_N;
var energy=0; for(i=0;i<G_N;i++) energy += G_State[i]*G_State[i];
var power = energy/G_N;
rewireEpoch(lambda,mean,energy,power);
}
// 3) Update net this bar (write META only if rewired)
var meanB, energyB, powerB;
updateNet(lambda, &meanB, &energyB, &powerB, doRewire);
// 4) Feedback blend
var gamma = projectNet();
lambda = G_FB_A*lambda + G_FB_B*gamma;
// 5) Plots
plot("lambda", lambda, LINE, 0);
plot("gamma", gamma, LINE, 0);
plot("P_win", powerB, LINE, 0);
// 6) Numeric logging
if(Bar % LOG_EVERY == 0){
char fname[64]; int i;
for(i=0;i<NET_EQNS;i++){
int n1=G_Adj[i*G_D+0], n2=G_Adj[i*G_D+1]; int tid=G_EqTreeId[i]; Node* t=G_TreeIdx[tid];
buildEqFileName(i,fname);
file_append(fname,
strf("%i,%.9f,%.9f,%i,%.9f,%i,%i,%.9f,%.9f,%.9f,%.9f,%i,%.6f,%.6f,%.6f,%.6f,%.6f,%.6f,%.6f,%.6f,%.4f,%.4f,%.6f,%.9f,%i,%.6f,%i,%i,%.6f\n",
Bar, lambda, gamma, i, G_State[i], n1, n2,
meanB, energyB, powerB, G_Vel[i], G_Mode[i],
G_WAdv[i], G_WSelf[i], G_WN1[i], G_WN2[i], G_WGlob1[i], G_WGlob2[i], G_WMom[i], G_WTree[i],
G_Pred[i], G_AdvScore[i], G_Prop[i], G_TreeTerm[i], G_TopEq[i], G_TopW[i], tid, t->d, t->r));
}
}
if(lambda > 0.9) enterLong();
}
// Clean up memory
function cleanup()
{
if(Root) freeTree(Root);
freeNet();
}
Last edited by TipmyPip; 09/04/25 16:58.
|