NexusWeave Compact Dominant (NCD-v2) is a multi-asset FX strategy that ranks currency pairs by how “structurally coherent” their recent behavior looks, using a two-layer graph architecture. Instead of relying on a single indicator or a fixed regime filter, it treats each market as a small network of interacting features, then evaluates how tightly that network holds together over time. The strategy runs on an hourly bar schedule and updates its rankings periodically (every few bars) to stay responsive without becoming overly noisy.

At the first layer, each currency pair builds an aspect graph from nine rolling features (e.g., short and medium log returns, moving-average slope, volatility, momentum, RSI-like proxy, Bollinger-position pressure, volatility-of-volatility, and a simple Markov-style up/down persistence proxy). Over a fixed window, the strategy measures correlations between these features and converts correlation strength into a distance metric. When feature relationships remain stable and mutually reinforcing, the graph’s shortest-path structure becomes “compact.” That compactness is summarized into a score (derived from the graph’s overall distance structure), alongside supporting diagnostics such as average edge weight, edge variance (used as an entropy-like stability proxy), a volatility centrality measure, a bandwidth ratio, and a lightweight regime probability based on short-horizon entropy.

The second layer is a universe graph connecting all traded pairs. Here, each edge blends two ideas: (1) similarity of recent pair returns (correlation distance) and (2) a currency-exposure distance that recognizes when two pairs share base/quote currency structure. This meta-graph produces portfolio-level signals: a coupling penalty (how tightly everything moves together), a crude cluster score (how fragmented the market is), and a USD exposure gauge weighted by pair compactness.

Finally, NCD-v2 forms a dominance score per asset: it rewards pair compactness, lightly incorporates a simple regime compactness term, and penalizes high market coupling (to avoid crowded, highly synchronized conditions). Scores are passed through a sigmoid to normalize them, then the strategy selects the top-ranked pairs (Top-K) as its active focus set. The result is a self-contained selection engine designed to prefer pairs whose internal feature structure is orderly while the broader market remains sufficiently diversified.

Code
// TGr06A_CompactDominant_v2.cpp - Zorro64 Strategy DLL (C++)
// Strategy A v2: Compactness-Dominant with Enhanced Graph Architecture

#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <string.h>

#define INF 1e30
#define EPS 1e-12

const char* ASSET_NAMES[] = {
  "EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
  "CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
  "GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28

static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;

static const double alpha = 0.1;
static const double beta  = 0.2;
static const double gamma = 3.0;

static const double LAMBDA_META = 0.7;

static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) { if(x > 30) return 1.0; if(x < -30) return 0.0; return 1.0/(1.0 + exp(-x)); }

static const char* CURRENCY_BASE[] = { "EUR","GBP","USD","CHF","JPY","AUD","CAD","NZD" };
static const int N_CURRENCIES = 8;

static int getCurrencyExposure(int pairIdx, int ccy) {
  const char* pair = ASSET_NAMES[pairIdx];
  char base[4] = {pair[0], pair[1], pair[2], 0};
  char quote[4] = {pair[3], pair[4], pair[5], 0};
  if(strcmp(base, CURRENCY_BASE[ccy]) == 0) return 1;
  if(strcmp(quote, CURRENCY_BASE[ccy]) == 0) return -1;
  return 0;
}

static double exposureDist(int i, int j) {
  double dist = 0.0;
  for(int c=0; c<N_CURRENCIES; c++) {
    int ei = getCurrencyExposure(i, c);
    int ej = getCurrencyExposure(j, c);
    if(ei != 0 && ej != 0) {
      dist += (ei == ej) ? 0.0 : 1.0;
    } else if(ei != 0 || ej != 0) {
      dist += 0.5;
    }
  }
  return dist / (double)N_CURRENCIES;
}

class RollingBuffer {
public:
  int cap = 0, n = 0, head = 0;
  double* x = 0;
  RollingBuffer() {}
  ~RollingBuffer(){ shutdown(); }
  void init(int L){ shutdown(); cap = L; x = (double*)malloc((size_t)cap*sizeof(double)); if(!x) quit("OOM"); n = 0; head = 0; }
  void shutdown(){ if(x) free(x); x = 0; cap = 0; n = 0; head = 0; }
  void push(double v){ if(!x || cap<=0) return; x[head] = v; head = (head + 1) % cap; if(n < cap) n++; }
  double get(int i) const { int idx = head - 1 - i; while(idx < 0) idx += cap; return x[idx % cap]; }
};

static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L) {
  if(a.n < L || b.n < L || L < 5) return 0.0;
  double mx=0,my=0;
  for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
  mx /= (double)L; my /= (double)L;
  double sxx=0, syy=0, sxy=0;
  for(int i=0;i<L;i++){ double dx = a.get(i)-mx; double dy = b.get(i)-my; sxx+=dx*dx; syy+=dy*dy; sxy+=dx*dy; }
  double den = sqrt(sxx*syy);
  if(den <= EPS) return 0.0;
  return sxy/den;
}

class WeightedGraph {
public:
  int n = 0;
  double* d = 0;
  WeightedGraph() {}
  ~WeightedGraph(){ shutdown(); }
  void init(int N){ shutdown(); n = N; d = (double*)malloc((size_t)n*n*sizeof(double)); if(!d) quit("OOM"); reset(); }
  void shutdown(){ if(d) free(d); d = 0; n = 0; }
  inline int pos(int r,int c) const { return r*n + c; }
  void reset(){ for(int r=0;r<n;r++) for(int c=0;c<n;c++) d[pos(r,c)] = (r==c) ? 0.0 : INF; }
  void allPairsShortest(){ for(int k=0;k<n;k++) for(int i=0;i<n;i++) for(int j=0;j<n;j++){ double cand = d[pos(i,k)] + d[pos(k,j)]; if(cand < d[pos(i,j)]) d[pos(i,j)] = cand; } }
  double wienerUndirectedLike() const { double W=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ double dij = d[pos(i,j)], dji = d[pos(j,i)]; W += 0.5*(dij + dji); } return W; }
  double meanEdgeWeight() const { double sum=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ sum += d[pos(i,j)]; cnt++; }} return cnt>0 ? sum/cnt : 0.0; }
  double edgeVariance() const { double mean = meanEdgeWeight(); double var=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ double diff = d[pos(i,j)] - mean; var += diff*diff; cnt++; }} return cnt>0 ? var/cnt : 0.0; }
  double centrality(int node) const { double sum=0; for(int j=0;j<n;j++) if(j!=node && d[pos(node,j)] < INF) sum += d[pos(node,j)]; return sum; }
};

struct PairFeatures {
  double compactness;
  double meanEdge;
  double entropy;
  double volCentrality;
  double regimeProb;
  double pBullNext;
  double entropyStat;
  double bandwidth;
};

class PairAspectGraph {
public:
  WeightedGraph G;
  RollingBuffer feat[FEAT_N];
  RollingBuffer featPrev[FEAT_N];
  PairFeatures pf;
  int prevRegime;

  PairAspectGraph() : prevRegime(0) { pf.compactness=0; pf.meanEdge=0; pf.entropy=0; pf.volCentrality=0; pf.regimeProb=0; pf.pBullNext=0; pf.entropyStat=0; pf.bandwidth=0; }

  void init(){
    G.init(FEAT_N);
    for(int k=0;k<FEAT_N;k++){ feat[k].init(FEAT_WINDOW); featPrev[k].init(FEAT_WINDOW); }
  }
  void shutdown(){
    for(int k=0;k<FEAT_N;k++){ feat[k].shutdown(); featPrev[k].shutdown(); }
    G.shutdown();
  }

  static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }
  static double signedCorrToDist(double corr){ return 0.5 - 0.5*corr; }

  void pushFeature(int k, double v){ feat[k].push(v); }
  void pushFeaturePrev(int k, double v){ featPrev[k].push(v); }

  double computeEntropy(int L) {
    if(feat[0].n < L) return 0.0;
    int bins = 10;
    int* hist = (int*)calloc(bins, sizeof(int));
    for(int i=0;i<L && i<feat[0].n;i++) {
      double v = feat[0].get(i);
      int bin = (int)(v * bins);
      if(bin < 0) bin = 0; if(bin >= bins) bin = bins-1;
      hist[bin]++;
    }
    double H = 0.0;
    for(int b=0;b<bins;b++) {
      if(hist[b] > 0) {
        double p = (double)hist[b] / (double)L;
        H -= p * log(p + EPS);
      }
    }
    free(hist);
    return H;
  }

  void rebuildIfReady(){
    if(feat[0].n < FEAT_WINDOW){ pf.compactness = 0; return; }
    G.reset();

    for(int i=0;i<FEAT_N;i++){
      for(int j=i+1;j<FEAT_N;j++){
        double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
        double w = corrToDist(c);
        if(i >= 7 && j >= 8) {
          double cp = corrOf(featPrev[i], feat[j], min(FEAT_WINDOW, featPrev[i].n));
          w = 0.5 * w + 0.5 * corrToDist(cp);
        }
        G.d[G.pos(i,j)] = w;
        G.d[G.pos(j,i)] = w;
      }
    }

    G.allPairsShortest();
    double W = G.wienerUndirectedLike();
    pf.compactness = 1.0/(1.0 + W);
    pf.meanEdge = G.meanEdgeWeight();
    pf.entropy = G.edgeVariance();
    pf.volCentrality = G.centrality(3) / (FEAT_N - 1);

    if(feat[8].n >= 20) {
      int upCurrent = 0, upNext = 0;
      for(int i=0;i<19;i++){ if(feat[0].get(i) > 0) upCurrent++; if(feat[0].get(i+1) > 0) upNext++; }
      pf.pBullNext = (double)upNext / 19.0;
      pf.entropyStat = computeEntropy(20);
      pf.regimeProb = (pf.entropyStat < 2.0) ? 1.0 : 0.0;
      pf.bandwidth = feat[3].get(0) / (feat[3].get(10) + EPS);
    }
  }
};

class PairUniverseGraph {
public:
  WeightedGraph G;
  double couplingPenalty;
  double clusterScore;
  double usdExposure;

  PairUniverseGraph() : couplingPenalty(0), clusterScore(0), usdExposure(0) {}

  void init(){ G.init(N_ASSET_NAMES); }
  void shutdown(){ G.shutdown(); }

  static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }

  void rebuild(PairAspectGraph* pairs){
    G.reset();
    for(int i=0;i<N_ASSET_NAMES;i++){
      for(int j=i+1;j<N_ASSET_NAMES;j++){
        double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
        double corrDist = corrToDist(c);
        double expDist = exposureDist(i, j);
        double w = LAMBDA_META * corrDist + (1.0 - LAMBDA_META) * expDist;
        G.d[G.pos(i,j)] = w;
        G.d[G.pos(j,i)] = w;
      }
    }
    G.allPairsShortest();
    double W = G.wienerUndirectedLike();
    couplingPenalty = 1.0/(1.0 + W);
    clusterScore = computeClusterScore();
    usdExposure = computeUSDExposure(pairs);
  }

  double computeClusterScore() {
    int clusters = 0;
    for(int i=0;i<N_ASSET_NAMES;i++) {
      bool hasConnection = false;
      for(int j=0;j<N_ASSET_NAMES;j++) {
        if(i != j && G.d[G.pos(i,j)] < 0.3) { hasConnection = true; break; }
      }
      if(!hasConnection) clusters++;
    }
    return (double)clusters / N_ASSET_NAMES;
  }

  double computeUSDExposure(PairAspectGraph* pairs) {
    double totalExp = 0.0;
    for(int i=0;i<N_ASSET_NAMES;i++) {
      double exp = fabs((double)getCurrencyExposure(i, 2));
      totalExp += exp * pairs[i].pf.compactness;
    }
    return clamp01(totalExp / 5.0);
  }
};

static double computeRegimeCompactness() {
  double W = ((Bar % 2) == 0) ? 1.0 : 2.5;
  return 1.0/(1.0 + W);
}

class CompactDominantStrategyV2 {
public:
  PairAspectGraph pairG[N_ASSET_NAMES];
  PairUniverseGraph metaG;
  int lastUpdateBar;
  double stateVector[N_ASSET_NAMES * 8 + 5];

  CompactDominantStrategyV2() : lastUpdateBar(-999999) {}

  void init(){
    for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init();
    metaG.init();
  }
  void shutdown(){
    metaG.shutdown();
    for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown();
  }

  void updateFeaturesForAsset(int a) {
    asset((char*)ASSET_NAMES[a]);
    vars C = series(priceClose(0));
    if(Bar < 20) return;

    double c0 = C[0], c1 = C[1], c12 = C[12];
    double ret1 = log(c0/c1);
    double retN = log(c0/c12);
    
    double ma3 = (C[0]+C[1]+C[2])/3.0;
    double ma12 = 0; for(int i=0;i<12;i++) ma12 += C[i]; ma12 /= 12.0;
    double slope = ma3 - ma12;

    double m=0, s=0;
    for(int i=0;i<20;i++){ double ri = log(C[i]/C[i+1]); m += ri; }
    m /= 20.0;
    for(int i=0;i<20;i++){ double ri = log(C[i]/C[i+1]); double d = ri - m; s += d*d; }
    double vol = sqrt(s/20.0);

    double mom = retN;
    double rsiProxy = tanh(10.0*retN);

    double m20=0, s20=0;
    for(int i=0;i<20;i++) m20 += C[i];
    m20 /= 20.0;
    for(int i=0;i<20;i++){ double d=C[i]-m20; s20 += d*d; }
    s20 = sqrt(s20/20.0) + 1e-12;
    double bollPos = (c0 - m20)/s20;
    double rangePress = tanh(0.5*bollPos);

    double m20b=0, s20b=0;
    for(int i=0;i<20;i++) m20b += (C[i]-C[i+1]);
    m20b /= 20.0;
    for(int i=0;i<20;i++){ double d=(C[i]-C[i+1])-m20b; s20b += d*d; }
    double volOfVol = sqrt(s20b/20.0);

    double flowProxy = fabs(ret1);

    int up=0;
    for(int i=0;i<20;i++) if(C[i] > C[i+1]) up++;
    double pBull = (double)up/20.0;
    double markovProxy = 2.0*(pBull - 0.5);

    pairG[a].pushFeature(0, ret1);
    pairG[a].pushFeature(1, retN);
    pairG[a].pushFeature(2, slope);
    pairG[a].pushFeature(3, vol);
    pairG[a].pushFeature(4, mom);
    pairG[a].pushFeature(5, rsiProxy);
    pairG[a].pushFeature(6, rangePress);
    pairG[a].pushFeature(7, volOfVol);
    pairG[a].pushFeature(8, markovProxy);

    pairG[a].pushFeaturePrev(0, ret1);
  }

  void buildStateVector() {
    int idx = 0;
    for(int a=0;a<N_ASSET_NAMES;a++) {
      stateVector[idx++] = pairG[a].pf.compactness;
      stateVector[idx++] = pairG[a].pf.meanEdge;
      stateVector[idx++] = pairG[a].pf.entropy;
      stateVector[idx++] = pairG[a].pf.volCentrality;
      stateVector[idx++] = pairG[a].pf.regimeProb;
      stateVector[idx++] = pairG[a].pf.pBullNext;
      stateVector[idx++] = pairG[a].pf.entropyStat;
      stateVector[idx++] = pairG[a].pf.bandwidth;
    }
    stateVector[idx++] = metaG.couplingPenalty;
    stateVector[idx++] = metaG.clusterScore;
    stateVector[idx++] = metaG.usdExposure;
    stateVector[idx++] = computeRegimeCompactness();
    stateVector[idx++] = Bar;
  }

  void onBar(){
    for(int a=0;a<N_ASSET_NAMES;a++) updateFeaturesForAsset(a);
    if(UPDATE_EVERY > 1 && (Bar % UPDATE_EVERY) != 0) return;
    if(lastUpdateBar == Bar) return;
    lastUpdateBar = Bar;

    for(int a=0;a<N_ASSET_NAMES;a++) pairG[a].rebuildIfReady();
    metaG.rebuild(pairG);

    double Creg = computeRegimeCompactness();
    double Pcouple = metaG.couplingPenalty;

    double score[N_ASSET_NAMES];
    int idx[N_ASSET_NAMES];
    for(int a=0;a<N_ASSET_NAMES;a++){
      double CA = pairG[a].pf.compactness;
      double x = alpha*Creg + gamma*CA - beta*Pcouple;
      score[a] = sigmoid(x);
      idx[a] = a;
    }

    for(int i=0;i<TOP_K;i++){
      for(int j=i+1;j<N_ASSET_NAMES;j++){
        if(score[idx[j]] > score[idx[i]]){
          int t = idx[i]; idx[i] = idx[j]; idx[j] = t;
        }
      }
    }

    buildStateVector();

    if((Bar % 50) == 0){
      printf("\n[CompactDominant_v2] Bar=%d Creg=%.4f Pcouple=%.4f Cluster=%.4f USDExp=%.4f",
        Bar, Creg, Pcouple, metaG.clusterScore, metaG.usdExposure);
      for(int k=0;k<TOP_K;k++){
        int a = idx[k];
        printf("\n  #%d %s CA=%.4f Ent=%.4f BW=%.4f Score=%.4f",
          k+1, ASSET_NAMES[a], pairG[a].pf.compactness, pairG[a].pf.entropyStat, pairG[a].pf.bandwidth, score[a]);
      }
    }
  }
};

static CompactDominantStrategyV2* S = 0;

DLLFUNC void run()
{
  if(is(INITRUN)){
    BarPeriod = 60;
    LookBack = max(LookBack, FEAT_WINDOW + 50);
    asset((char*)ASSET_NAMES[0]);
    if(!S){ S = new CompactDominantStrategyV2(); S->init(); }
  }
  if(is(EXITRUN)){
    if(S){ S->shutdown(); delete S; S = 0; }
    return;
  }
  if(!S || Bar < LookBack) return;
  S->onBar();
}

Last edited by TipmyPip; 02/23/26 19:02.