|
3 registered members (TipmyPip, Grant, 1 invisible),
5,556
guests, and 2
spiders. |
|
Key:
Admin,
Global Mod,
Mod
|
|
|
Decision Making Graph Geometry
[Re: TipmyPip]
#489205
02/21/26 19:15
02/21/26 19:15
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
This strategy is an abstract example of decision-making from coupled graph geometry. It models the market using two different weighted graphs that represent two distinct kinds of structure: 1) A regime dynamics graph (Graph A)Graph A represents the market as a finite set of latent regimes (states). A directed edge from state ???? to state ???? encodes the likelihood that the market transitions from ???? to ????. Probabilities ???????????? are converted into edge “lengths” using an information-geometric map: wij =?log(pij) This turns the transition system into a metric-like space where likely transitions are short and unlikely transitions are long. Once edge lengths are defined, Floyd–Warshall computes shortest path distances between all state pairs, allowing indirect multi-step transitions to determine effective connectivity. The strategy then summarizes the global geometry of the regime space using a Wiener-like index—the sum of distances across all unordered state pairs. Conceptually, this measures whether the regime system is compact (states are mutually reachable through short, high-probability pathways) or diffuse (states are separated by long or improbable paths). A compact regime graph corresponds to more orderly, predictable evolution; a diffuse one corresponds to fragmented, noisy evolution. 2) An asset coupling graph (Graph B) Graph B represents relationships among assets as an undirected weighted graph. Here, edge weights are derived from correlation: wij ?=1?? corrij ?? Strongly correlated assets are “close” (small distance), while weakly related assets are “far.” Again, shortest paths produce an effective distance structure, and a Wiener-like sum over all asset pairs becomes a scalar measure of system-wide coupling. Low Wiener (tight graph) means assets behave as a single cluster—diversification is weak, and portfolio risk concentrates. Higher Wiener implies more separation and potentially better diversification. 3) Coupling the graphs into a single control variable The strategy’s mathematical core is a coupling rule that treats: Regime compactness as a proxy for signal quality / predictability, and Asset coupling as a proxy for systemic risk / lack of diversification. It combines them through a logistic squashing function: Score=?(??Compactness(StateGraph)???Coupling(AssetGraph)) The sigmoid ?(?) maps the result into [0,1], producing a risk throttle. Abstractly, the throttle increases when the regime space is geometrically “tight” (predictable transitions) and decreases when the asset space is geometrically “tight” (high correlation concentration). The outcome is not a specific entry/exit rule, but a mathematically grounded allocator: a mechanism that scales risk based on two interacting notions of structure—temporal structure in regime evolution and cross-sectional structure in asset dependence. In short, the code demonstrates how global graph metrics (via shortest-path geometry and Wiener-like sums) can be used as compact, interpretable features to modulate trading aggressiveness in a principled, system-level way. // TGr05.cpp - Zorro64 Strategy DLL (C++) - Two coupled graphs for algo trading
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define INF 1e30
// =============================== Graph (OO) ===============================
class WeightedGraph {
public:
int n = 0;
double* d = 0; // adjacency/shortest-path matrix (n*n)
WeightedGraph(int N) { init(N); }
~WeightedGraph() { shutdown(); }
void init(int N) {
shutdown();
n = N;
d = (double*)malloc((size_t)n*(size_t)n*sizeof(double));
if(!d) quit("OOM: WeightedGraph matrix");
reset();
}
void shutdown() {
if(d) free(d);
d = 0;
n = 0;
}
inline int pos(int r,int c) const { return r*n + c; }
void reset() {
for(int r=0;r<n;r++)
for(int c=0;c<n;c++)
d[pos(r,c)] = (r==c) ? 0.0 : INF;
}
// Floyd-Warshall shortest paths
void allPairsShortest() {
for(int k=0;k<n;k++)
for(int i=0;i<n;i++)
for(int j=0;j<n;j++) {
double cand = d[pos(i,k)] + d[pos(k,j)];
if(cand < d[pos(i,j)]) d[pos(i,j)] = cand;
}
}
// Wiener-like sum over unordered pairs (symmetrized)
double wienerUndirectedLike() const {
double W = 0.0;
for(int i=0;i<n;i++)
for(int j=i+1;j<n;j++) {
double dij = d[pos(i,j)];
double dji = d[pos(j,i)];
W += 0.5*(dij + dji);
}
return W;
}
void dump(const char* title) const {
printf("\n%s", title);
for(int r=0;r<n;r++) {
printf("\n");
for(int c=0;c<n;c++) {
double v = d[pos(r,c)];
if(v > 1e20) printf(" INF ");
else printf("%5.2f ", v);
}
}
}
};
// ========================== Graph A: Markov State Graph ===================
// Edge weight = -log(p_ij) (probability -> distance)
class RegimeMarkovGraph {
public:
WeightedGraph G;
RegimeMarkovGraph(int states) : G(states) {}
static double probToDist(double p) {
if(p <= 0.0) return INF;
if(p > 1.0) p = 1.0;
return -log(p);
}
void setTransitionProb(int i,int j,double p) {
G.d[G.pos(i,j)] = probToDist(p);
}
// Example presets: directional vs choppy (toy)
void buildDirectional4() {
G.reset();
setTransitionProb(0,0,0.65); setTransitionProb(0,1,0.30); setTransitionProb(0,2,0.04); setTransitionProb(0,3,0.01);
setTransitionProb(1,0,0.25); setTransitionProb(1,1,0.60); setTransitionProb(1,2,0.12); setTransitionProb(1,3,0.03);
setTransitionProb(2,0,0.03); setTransitionProb(2,1,0.12); setTransitionProb(2,2,0.60); setTransitionProb(2,3,0.25);
setTransitionProb(3,0,0.01); setTransitionProb(3,1,0.04); setTransitionProb(3,2,0.30); setTransitionProb(3,3,0.65);
}
void buildChoppy4() {
G.reset();
setTransitionProb(0,0,0.28); setTransitionProb(0,1,0.24); setTransitionProb(0,2,0.25); setTransitionProb(0,3,0.23);
setTransitionProb(1,0,0.23); setTransitionProb(1,1,0.27); setTransitionProb(1,2,0.26); setTransitionProb(1,3,0.24);
setTransitionProb(2,0,0.24); setTransitionProb(2,1,0.26); setTransitionProb(2,2,0.27); setTransitionProb(2,3,0.23);
setTransitionProb(3,0,0.25); setTransitionProb(3,1,0.23); setTransitionProb(3,2,0.24); setTransitionProb(3,3,0.28);
}
// Compactness: lower Wiener => more compact; convert to a score in [0,1]
double compactnessScore() {
G.allPairsShortest();
double W = G.wienerUndirectedLike();
// simple squash: smaller W => larger score
return 1.0/(1.0 + W);
}
};
// ========================== Graph B: Asset Relationship Graph =============
// Edge weight = 1 - |corr| (strong corr => short distance)
class AssetCorrelationGraph {
public:
WeightedGraph G;
AssetCorrelationGraph(int assets) : G(assets) {}
static double corrToDist(double corr) {
double a = fabs(corr);
if(a > 1.0) a = 1.0;
return 1.0 - a; // in [0,1]
}
void setCorr(int i,int j,double corr) {
double w = corrToDist(corr);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
// Example 3-asset correlation structure (toy)
// 0=EUR/USD, 1=GBP/USD, 2=USD/JPY
void buildToyFX() {
G.reset();
// self already 0
setCorr(0,1,0.85); // EURUSD ~ GBPUSD high positive corr -> short distance
setCorr(0,2,-0.30); // EURUSD vs USDJPY mild negative -> larger distance
setCorr(1,2,-0.25); // GBPUSD vs USDJPY mild negative
}
// Coupling: lower Wiener => assets tightly coupled; convert to risk penalty in [0,1]
double couplingPenalty() {
G.allPairsShortest();
double W = G.wienerUndirectedLike();
// smaller W => higher coupling => higher penalty; invert & squash
return 1.0/(1.0 + W);
}
};
// ========================== Strategy: Coupled Two-Graph Logic =============
class TwoGraphStrategy {
public:
// Hyperparameters
double alpha = 2.0; // weight for regime compactness
double beta = 1.5; // weight for asset coupling penalty
double baseRisk = 1.0;
// Graphs
RegimeMarkovGraph RG;
AssetCorrelationGraph AG;
TwoGraphStrategy() : RG(4), AG(3) {}
static double sigmoid(double x) {
// stable-ish sigmoid
if(x > 30) return 1.0;
if(x < -30) return 0.0;
return 1.0/(1.0 + exp(-x));
}
// In a real system:
// - RG transitions come from online Markov counts over price-action states
// - AG correlations come from rolling returns correlations of assets
// Here: toy graphs to demonstrate the coupling logic.
void buildToyInputs(int useDirectional) {
if(useDirectional) RG.buildDirectional4();
else RG.buildChoppy4();
AG.buildToyFX();
}
// Coupling between two graphs in "mathematical context":
// we treat RG.compactness as signal quality and AG.coupling as diversification risk.
double riskThrottle() {
double comp = RG.compactnessScore(); // higher = better / more predictable
double coup = AG.couplingPenalty(); // higher = more coupled / more dangerous
// combined score -> throttle in [0,1]
double x = alpha*comp - beta*coup;
return sigmoid(x);
}
void explain(double thr) {
printf("\n\n[TwoGraph] Regime compactness score = %.4f", RG.compactnessScore());
printf("\n[TwoGraph] Asset coupling penalty = %.4f", AG.couplingPenalty());
printf("\n[TwoGraph] Risk throttle (0..1) = %.4f", thr);
printf("\n[TwoGraph] Suggested Lots scale = %.3f", baseRisk * (0.25 + 0.75*thr));
}
// Demo run: compare two regimes (directional vs choppy) under same asset coupling
void demo() {
// Case A: directional
buildToyInputs(1);
double thrA = riskThrottle();
printf("\n=== CASE A: Directional regime ===");
RG.G.dump("\nRegime graph shortest distances:");
AG.G.dump("\nAsset graph shortest distances:");
explain(thrA);
// Case B: choppy
buildToyInputs(0);
double thrB = riskThrottle();
printf("\n\n=== CASE B: Choppy regime ===");
RG.G.dump("\nRegime graph shortest distances:");
AG.G.dump("\nAsset graph shortest distances:");
explain(thrB);
printf("\n\nInterpretation:");
printf("\n- If regime transitions are compact (predictable), throttle increases.");
printf("\n- If assets are tightly coupled (low diversification), throttle decreases.");
printf("\n- The trading engine can use throttle to scale order size / aggression.");
}
};
// =============================== Entry ===================================
static TwoGraphStrategy* S = 0;
DLLFUNC void run()
{
if(is(INITRUN))
{
if(!S) S = new TwoGraphStrategy();
S->demo();
quit("Done.");
}
}
Last edited by TipmyPip; 02/21/26 20:08.
|
|
|
KnotScope FX
[Re: TipmyPip]
#489210
02/23/26 18:47
02/23/26 18:47
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
KnotScope FX is a multi-asset ranking strategy designed to find currency pairs whose internal behavior is unusually “compact” — meaning their own feature set moves in a tightly coordinated way. Instead of predicting direction directly, it scores each pair by how structurally consistent its recent dynamics are, then reports the top candidates. On every bar (set to 60 minutes), the strategy cycles through a fixed universe of 28 major FX pairs. For each pair it computes nine lightweight features from recent closes, including short and medium log-returns, a moving-average slope proxy, rolling volatility, momentum, an RSI-like proxy, a Bollinger-style position/range pressure measure, a simple flow proxy (absolute return), and a Markov-style persistence proxy (ratio of up closes). Each feature is pushed into a rolling window (200 samples), forming a small “behavioral fingerprint” for that pair. Once enough history is accumulated, the strategy builds a feature graph per pair: every feature is a node, and edges are weighted by distance derived from correlation magnitude (high absolute correlation ? small distance). It then runs an all-pairs shortest path routine and computes a Wiener-like sum of path lengths. The result is converted into a compactness score: lower overall graph distance implies higher compactness. Intuitively, this favors pairs whose internal indicators agree with each other instead of drifting independently. A second, much lighter layer computes a meta coupling penalty across pairs, based only on correlations of the first feature stream (the 1-bar return). This creates a “universe graph” that estimates how tightly the market is moving together. However, the strategy is intentionally compactness-dominant: the meta term is down-weighted so it only gently discourages selection when everything is moving as one. Finally, a simple regime factor is blended in, and each pair gets a score passed through a sigmoid to keep values bounded and comparable. The strategy then ranks all pairs and prints the top 5 periodically, showing their compactness and final score. In short: KnotScope FX hunts for pairs with the cleanest, most internally coherent structure, while keeping cross-market influence minimal. // TGr06A_CompactDominant.cpp - Zorro64 Strategy DLL (C++)
// Strategy A: Compactness-Dominant
// Focuses purely on per-pair compactness with minimal meta-graph influence.
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define INF 1e30
#define EPS 1e-12
const char* ASSET_NAMES[] = {
"EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
"CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
"GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28
static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;
// Compactness-dominant weights: minimal coupling penalty
static const double alpha = 0.1;
static const double beta = 0.2;
static const double gamma = 3.0;
static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) {
if(x > 30) return 1.0;
if(x < -30) return 0.0;
return 1.0/(1.0 + exp(-x));
}
class RollingBuffer {
public:
int cap = 0;
int n = 0;
int head = 0;
double* x = 0;
RollingBuffer() {}
~RollingBuffer(){ shutdown(); }
void init(int L){
shutdown();
cap = L;
x = (double*)malloc((size_t)cap*sizeof(double));
if(!x) quit("OOM: RollingBuffer");
n = 0; head = 0;
}
void shutdown(){
if(x) free(x);
x = 0; cap = 0; n = 0; head = 0;
}
void push(double v){
if(!x || cap<=0) return;
x[head] = v;
head = (head + 1) % cap;
if(n < cap) n++;
}
double get(int i) const {
int idx = head - 1 - i;
while(idx < 0) idx += cap;
return x[idx % cap];
}
};
static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L)
{
if(a.n < L || b.n < L || L < 5) return 0.0;
double mx=0,my=0;
for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
mx /= (double)L; my /= (double)L;
double sxx=0, syy=0, sxy=0;
for(int i=0;i<L;i++){
double dx = a.get(i) - mx;
double dy = b.get(i) - my;
sxx += dx*dx;
syy += dy*dy;
sxy += dx*dy;
}
double den = sqrt(sxx*syy);
if(den <= EPS) return 0.0;
return sxy/den;
}
class WeightedGraph {
public:
int n = 0;
double* d = 0;
WeightedGraph() {}
~WeightedGraph(){ shutdown(); }
void init(int N){
shutdown();
n = N;
d = (double*)malloc((size_t)n*(size_t)n*sizeof(double));
if(!d) quit("OOM: WeightedGraph");
reset();
}
void shutdown(){
if(d) free(d);
d = 0; n = 0;
}
inline int pos(int r,int c) const { return r*n + c; }
void reset(){
for(int r=0;r<n;r++)
for(int c=0;c<n;c++)
d[pos(r,c)] = (r==c) ? 0.0 : INF;
}
void allPairsShortest(){
for(int k=0;k<n;k++)
for(int i=0;i<n;i++)
for(int j=0;j<n;j++){
double cand = d[pos(i,k)] + d[pos(k,j)];
if(cand < d[pos(i,j)]) d[pos(i,j)] = cand;
}
}
double wienerUndirectedLike() const {
double W=0;
for(int i=0;i<n;i++)
for(int j=i+1;j<n;j++){
double dij = d[pos(i,j)];
double dji = d[pos(j,i)];
W += 0.5*(dij + dji);
}
return W;
}
};
class PairAspectGraph {
public:
WeightedGraph G;
RollingBuffer feat[FEAT_N];
double compactness = 0.0;
PairAspectGraph() {}
void init(){
G.init(FEAT_N);
for(int k=0;k<FEAT_N;k++) feat[k].init(FEAT_WINDOW);
}
void shutdown(){
for(int k=0;k<FEAT_N;k++) feat[k].shutdown();
G.shutdown();
}
static double corrToDist(double corr){
double a = fabs(corr);
if(a > 1.0) a = 1.0;
return 1.0 - a;
}
void pushFeature(int k, double v){
feat[k].push(v);
}
void rebuildIfReady(){
if(feat[0].n < FEAT_WINDOW) { compactness = 0.0; return; }
G.reset();
for(int i=0;i<FEAT_N;i++){
for(int j=i+1;j<FEAT_N;j++){
double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
compactness = 1.0/(1.0 + W);
}
};
class PairUniverseGraph {
public:
WeightedGraph G;
double couplingPenalty = 0;
PairUniverseGraph(){}
void init(){ G.init(N_ASSET_NAMES); }
void shutdown(){ G.shutdown(); }
static double corrToDist(double corr){
double a = fabs(corr);
if(a > 1.0) a = 1.0;
return 1.0 - a;
}
void rebuild(PairAspectGraph* pairs){
G.reset();
for(int i=0;i<N_ASSET_NAMES;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
couplingPenalty = 1.0/(1.0 + W);
}
};
static double computeRegimeCompactness()
{
double W = ((Bar % 2) == 0) ? 1.0 : 2.5;
return 1.0/(1.0 + W);
}
class CompactDominantStrategy {
public:
PairAspectGraph pairG[N_ASSET_NAMES];
PairUniverseGraph metaG;
int lastUpdateBar = -999999;
void init(){
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init();
metaG.init();
}
void shutdown(){
metaG.shutdown();
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown();
}
void updateFeaturesForAsset(int a)
{
asset((char*)ASSET_NAMES[a]);
vars C = series(priceClose(0));
if(Bar < 20) return;
double c0 = C[0];
double c1 = C[1];
double c12 = C[12];
double ret1 = log(c0/c1);
double retN = log(c0/c12);
double ma3 = (C[0]+C[1]+C[2]) / 3.0;
double ma12 = 0;
for(int i=0;i<12;i++) ma12 += C[i];
ma12 /= 12.0;
double slope = ma3 - ma12;
double m=0, s=0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
m += ri;
}
m /= 20.0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
double d = ri - m;
s += d*d;
}
double vol = sqrt(s/20.0);
double mom = retN;
double rsiProxy = tanh(10.0*retN);
double m20=0, s20=0;
for(int i=0;i<20;i++) m20 += C[i];
m20 /= 20.0;
for(int i=0;i<20;i++){ double d=C[i]-m20; s20 += d*d; }
s20 = sqrt(s20/20.0) + 1e-12;
double bollPos = (c0 - m20)/s20;
double rangePress = tanh(0.5*bollPos);
double flowProxy = fabs(ret1);
int up=0;
for(int i=0;i<20;i++){
if(C[i] > C[i+1]) up++;
}
double pBull = (double)up/20.0;
double markovProxy = 2.0*(pBull - 0.5);
pairG[a].pushFeature(0, ret1);
pairG[a].pushFeature(1, retN);
pairG[a].pushFeature(2, slope);
pairG[a].pushFeature(3, vol);
pairG[a].pushFeature(4, mom);
pairG[a].pushFeature(5, rsiProxy);
pairG[a].pushFeature(6, rangePress);
pairG[a].pushFeature(7, flowProxy);
pairG[a].pushFeature(8, markovProxy);
}
void onBar()
{
for(int a=0;a<N_ASSET_NAMES;a++)
updateFeaturesForAsset(a);
if(UPDATE_EVERY > 1 && (Bar % UPDATE_EVERY) != 0) return;
if(lastUpdateBar == Bar) return;
lastUpdateBar = Bar;
for(int a=0;a<N_ASSET_NAMES;a++)
pairG[a].rebuildIfReady();
metaG.rebuild(pairG);
double Creg = computeRegimeCompactness();
double Pcouple = metaG.couplingPenalty;
double score[N_ASSET_NAMES];
int idx[N_ASSET_NAMES];
for(int a=0;a<N_ASSET_NAMES;a++){
double CA = pairG[a].compactness;
double x = alpha*Creg + gamma*CA - beta*Pcouple;
score[a] = sigmoid(x);
idx[a] = a;
}
for(int i=0;i<TOP_K;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
if(score[idx[j]] > score[idx[i]]){
int t = idx[i]; idx[i] = idx[j]; idx[j] = t;
}
}
}
if((Bar % 50) == 0){
printf("\n[CompactDominant] Bar=%d Creg=%.4f Pcouple=%.4f", Bar, Creg, Pcouple);
for(int k=0;k<TOP_K;k++){
int a = idx[k];
printf("\n #%d %s CA=%.4f Score=%.4f",
k+1, ASSET_NAMES[a], pairG[a].compactness, score[a]);
}
}
}
};
static CompactDominantStrategy* S = 0;
DLLFUNC void run()
{
if(is(INITRUN))
{
BarPeriod = 60;
LookBack = max(LookBack, FEAT_WINDOW + 50);
asset((char*)ASSET_NAMES[0]);
if(!S) {
S = new CompactDominantStrategy();
S->init();
}
}
if(is(EXITRUN))
{
if(S){
S->shutdown();
delete S;
S = 0;
}
return;
}
if(!S) return;
if(Bar < LookBack) return;
S->onBar();
}
|
|
|
CrowdAverse
[Re: TipmyPip]
#489211
02/23/26 18:50
02/23/26 18:50
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
CrowdAverse is a multi-asset selection strategy designed to avoid crowded, highly synchronized markets and instead focus attention on currency pairs whose recent behavior looks more distinctive and internally consistent. It runs as a 64-bit Zorro Strategy DLL and evaluates a fixed universe of 28 major FX pairs on an hourly bar schedule (BarPeriod = 60). Rather than generating direct trade entries in this snippet, it produces a ranked short list (TOP_K = 5) of the most “attractive” pairs according to a crowd-avoidance score. The strategy builds two layers of graph-based structure. First, each individual pair maintains a rolling feature history (window = 200) across nine engineered signals: short and medium log returns, moving-average slope, realized volatility, momentum, an RSI-like proxy (tanh-scaled return), Bollinger-style position and range pressure, a simple “flow” proxy (absolute 1-bar return), and a Markov-style bias derived from the fraction of up-closes. These features are continuously pushed into rolling buffers, then periodically (every 5 bars) used to compute a pair-specific “aspect graph.” In that graph, edges connect features based on correlation-derived distance (1 ? |corr|). After running an all-pairs shortest-path routine, the graph’s total distance (Wiener-like sum) is inverted into a compactness score: the tighter and more coherent the feature relationships, the higher the compactness. Second, the strategy builds a meta “pair universe” graph across all pairs, again using correlation-derived distances (here using one key feature series as the coupling proxy). This produces a global coupling penalty that rises when the overall universe becomes tightly coupled (i.e., many pairs move together). That’s the core “crowd-averse” lever: the strategy heavily penalizes selection during periods of widespread synchronization. Finally, a regime term (a simple placeholder in this version) is combined with pair compactness and the coupling penalty through weighted coefficients (alpha = 1.0, gamma = 1.5, beta = 4.0). The result is passed through a sigmoid to map it into a stable 0–1 score, and the top-ranked pairs are logged periodically. In short: prefer coherent individual opportunities, but step away when the whole market starts marching in lockstep. // TGr06B_CrowdAverse.cpp - Zorro64 Strategy DLL (C++)
// Strategy B: Crowd-Averse
// Heavily penalizes crowded pairs via high coupling weight.
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define INF 1e30
#define EPS 1e-12
const char* ASSET_NAMES[] = {
"EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
"CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
"GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28
static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;
// Crowd-averse weights: high coupling penalty
static const double alpha = 1.0;
static const double beta = 4.0;
static const double gamma = 1.5;
static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) {
if(x > 30) return 1.0;
if(x < -30) return 0.0;
return 1.0/(1.0 + exp(-x));
}
class RollingBuffer {
public:
int cap = 0;
int n = 0;
int head = 0;
double* x = 0;
RollingBuffer() {}
~RollingBuffer(){ shutdown(); }
void init(int L){
shutdown();
cap = L;
x = (double*)malloc((size_t)cap*sizeof(double));
if(!x) quit("OOM: RollingBuffer");
n = 0; head = 0;
}
void shutdown(){
if(x) free(x);
x = 0; cap = 0; n = 0; head = 0;
}
void push(double v){
if(!x || cap<=0) return;
x[head] = v;
head = (head + 1) % cap;
if(n < cap) n++;
}
double get(int i) const {
int idx = head - 1 - i;
while(idx < 0) idx += cap;
return x[idx % cap];
}
};
static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L)
{
if(a.n < L || b.n < L || L < 5) return 0.0;
double mx=0,my=0;
for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
mx /= (double)L; my /= (double)L;
double sxx=0, syy=0, sxy=0;
for(int i=0;i<L;i++){
double dx = a.get(i) - mx;
double dy = b.get(i) - my;
sxx += dx*dx;
syy += dy*dy;
sxy += dx*dy;
}
double den = sqrt(sxx*syy);
if(den <= EPS) return 0.0;
return sxy/den;
}
class WeightedGraph {
public:
int n = 0;
double* d = 0;
WeightedGraph() {}
~WeightedGraph(){ shutdown(); }
void init(int N){
shutdown();
n = N;
d = (double*)malloc((size_t)n*(size_t)n*sizeof(double));
if(!d) quit("OOM: WeightedGraph");
reset();
}
void shutdown(){
if(d) free(d);
d = 0; n = 0;
}
inline int pos(int r,int c) const { return r*n + c; }
void reset(){
for(int r=0;r<n;r++)
for(int c=0;c<n;c++)
d[pos(r,c)] = (r==c) ? 0.0 : INF;
}
void allPairsShortest(){
for(int k=0;k<n;k++)
for(int i=0;i<n;i++)
for(int j=0;j<n;j++){
double cand = d[pos(i,k)] + d[pos(k,j)];
if(cand < d[pos(i,j)]) d[pos(i,j)] = cand;
}
}
double wienerUndirectedLike() const {
double W=0;
for(int i=0;i<n;i++)
for(int j=i+1;j<n;j++){
double dij = d[pos(i,j)];
double dji = d[pos(j,i)];
W += 0.5*(dij + dji);
}
return W;
}
};
class PairAspectGraph {
public:
WeightedGraph G;
RollingBuffer feat[FEAT_N];
double compactness = 0.0;
PairAspectGraph() {}
void init(){
G.init(FEAT_N);
for(int k=0;k<FEAT_N;k++) feat[k].init(FEAT_WINDOW);
}
void shutdown(){
for(int k=0;k<FEAT_N;k++) feat[k].shutdown();
G.shutdown();
}
static double corrToDist(double corr){
double a = fabs(corr);
if(a > 1.0) a = 1.0;
return 1.0 - a;
}
void pushFeature(int k, double v){
feat[k].push(v);
}
void rebuildIfReady(){
if(feat[0].n < FEAT_WINDOW) { compactness = 0.0; return; }
G.reset();
for(int i=0;i<FEAT_N;i++){
for(int j=i+1;j<FEAT_N;j++){
double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
compactness = 1.0/(1.0 + W);
}
};
class PairUniverseGraph {
public:
WeightedGraph G;
double couplingPenalty = 0;
PairUniverseGraph(){}
void init(){ G.init(N_ASSET_NAMES); }
void shutdown(){ G.shutdown(); }
static double corrToDist(double corr){
double a = fabs(corr);
if(a>1.0) a = 1.0;
return 1.0 - a;
}
void rebuild(PairAspectGraph* pairs){
G.reset();
for(int i=0;i<N_ASSET_NAMES;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
couplingPenalty = 1.0/(1.0 + W);
}
};
static double computeRegimeCompactness()
{
double W = ((Bar % 2) == 0) ? 1.0 : 2.5;
return 1.0/(1.0 + W);
}
class CrowdAverseStrategy {
public:
PairAspectGraph pairG[N_ASSET_NAMES];
PairUniverseGraph metaG;
int lastUpdateBar = -999999;
void init(){
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init();
metaG.init();
}
void shutdown(){
metaG.shutdown();
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown();
}
void updateFeaturesForAsset(int a)
{
asset((char*)ASSET_NAMES[a]);
vars C = series(priceClose(0));
if(Bar < 20) return;
double c0 = C[0];
double c1 = C[1];
double c12 = C[12];
double ret1 = log(c0/c1);
double retN = log(c0/c12);
double ma3 = (C[0]+C[1]+C[2]) / 3.0;
double ma12 = 0;
for(int i=0;i<12;i++) ma12 += C[i];
ma12 /= 12.0;
double slope = ma3 - ma12;
double m=0, s=0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
m += ri;
}
m /= 20.0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
double d = ri - m;
s += d*d;
}
double vol = sqrt(s/20.0);
double mom = retN;
double rsiProxy = tanh(10.0*retN);
double m20=0, s20=0;
for(int i=0;i<20;i++) m20 += C[i];
m20 /= 20.0;
for(int i=0;i<20;i++){ double d=C[i]-m20; s20 += d*d; }
s20 = sqrt(s20/20.0) + 1e-12;
double bollPos = (c0 - m20)/s20;
double rangePress = tanh(0.5*bollPos);
double flowProxy = fabs(ret1);
int up=0;
for(int i=0;i<20;i++){
if(C[i] > C[i+1]) up++;
}
double pBull = (double)up/20.0;
double markovProxy = 2.0*(pBull - 0.5);
pairG[a].pushFeature(0, ret1);
pairG[a].pushFeature(1, retN);
pairG[a].pushFeature(2, slope);
pairG[a].pushFeature(3, vol);
pairG[a].pushFeature(4, mom);
pairG[a].pushFeature(5, rsiProxy);
pairG[a].pushFeature(6, rangePress);
pairG[a].pushFeature(7, flowProxy);
pairG[a].pushFeature(8, markovProxy);
}
void onBar()
{
for(int a=0;a<N_ASSET_NAMES;a++)
updateFeaturesForAsset(a);
if(UPDATE_EVERY > 1 && (Bar % UPDATE_EVERY) != 0) return;
if(lastUpdateBar == Bar) return;
lastUpdateBar = Bar;
for(int a=0;a<N_ASSET_NAMES;a++)
pairG[a].rebuildIfReady();
metaG.rebuild(pairG);
double Creg = computeRegimeCompactness();
double Pcouple = metaG.couplingPenalty;
double score[N_ASSET_NAMES];
int idx[N_ASSET_NAMES];
for(int a=0;a<N_ASSET_NAMES;a++){
double CA = pairG[a].compactness;
double x = alpha*Creg + gamma*CA - beta*Pcouple;
score[a] = sigmoid(x);
idx[a] = a;
}
for(int i=0;i<TOP_K;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
if(score[idx[j]] > score[idx[i]]){
int t = idx[i]; idx[i] = idx[j]; idx[j] = t;
}
}
}
if((Bar % 50) == 0){
printf("\n[CrowdAverse] Bar=%d Creg=%.4f Pcouple=%.4f", Bar, Creg, Pcouple);
for(int k=0;k<TOP_K;k++){
int a = idx[k];
printf("\n #%d %s CA=%.4f Score=%.4f",
k+1, ASSET_NAMES[a], pairG[a].compactness, score[a]);
}
}
}
};
static CrowdAverseStrategy* S = 0;
DLLFUNC void run()
{
if(is(INITRUN))
{
BarPeriod = 60;
LookBack = max(LookBack, FEAT_WINDOW + 50);
asset((char*)ASSET_NAMES[0]);
if(!S) {
S = new CrowdAverseStrategy();
S->init();
}
}
if(is(EXITRUN))
{
if(S){
S->shutdown();
delete S;
S = 0;
}
return;
}
if(!S) return;
if(Bar < LookBack) return;
S->onBar();
}
|
|
|
RegimeAtlas
[Re: TipmyPip]
#489212
02/23/26 18:53
02/23/26 18:53
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
RegimeAtlas is a regime-switching portfolio selector designed for multi-asset FX trading, built as a Zorro64 strategy DLL in C++. Instead of relying on a single indicator or a fixed set of market rules, it continuously “reads the room” by measuring how internally consistent each asset’s behavior is, and how tightly the overall market universe is moving together. The core idea is simple: when markets shift between trending, choppy, volatile, and mean-reverting conditions, a strategy should adapt by favoring instruments whose recent structure best matches the current regime. Every bar (configured here as 60 minutes), the strategy loops through a broad basket of 28 currency pairs and extracts a compact feature set that captures multiple market dimensions. These features include short- and medium-horizon log returns, a moving-average slope proxy, realized volatility, momentum, an RSI-like compression, Bollinger-style position/range pressure, a flow proxy based on absolute short return, and a simple Markov-style “bullish persistence” measure based on up/down counts. Each feature is stored in a rolling window so that the strategy can compare how the features co-move through time. The distinguishing mechanism is graph-based. For each asset, the strategy builds a “feature graph” where edges represent similarity (via correlation) between feature streams, converted into distances. By running an all-pairs shortest-path calculation, it derives a compactness score: assets whose features form a more cohesive, mutually consistent structure receive higher compactness. In parallel, a “universe graph” evaluates cross-asset coupling using correlations (a proxy for crowding or systemic linkage). High coupling becomes a penalty, discouraging selection when the whole market is moving as one. Finally, a regime score blends three forces with heavy emphasis on the global regime: global compactness (regime), asset compactness, and universe coupling penalty. A sigmoid squashes the result into a clean 0–1 ranking, and the strategy prints the top candidates periodically. In short, RegimeAtlas is an adaptive selector: it aims to surface the instruments most structurally aligned with the current market regime while de-emphasizing crowded, highly coupled conditions. // TGr06C_RegimeSwitcher.cpp - Zorro64 Strategy DLL (C++)
// Strategy C: Regime-Switching
// Heavily weights global regime for adaptive market behavior.
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define INF 1e30
#define EPS 1e-12
const char* ASSET_NAMES[] = {
"EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
"CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
"GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28
static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;
// Regime-switching weights: high alpha
static const double alpha = 4.0;
static const double beta = 1.5;
static const double gamma = 1.5;
static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) {
if(x > 30) return 1.0;
if(x < -30) return 0.0;
return 1.0/(1.0 + exp(-x));
}
class RollingBuffer {
public:
int cap = 0;
int n = 0;
int head = 0;
double* x = 0;
RollingBuffer() {}
~RollingBuffer(){ shutdown(); }
void init(int L){
shutdown();
cap = L;
x = (double*)malloc((size_t)cap*sizeof(double));
if(!x) quit("OOM: RollingBuffer");
n = 0; head = 0;
}
void shutdown(){
if(x) free(x);
x = 0; cap = 0; n = 0; head = 0;
}
void push(double v){
if(!x || cap<=0) return;
x[head] = v;
head = (head + 1) % cap;
if(n < cap) n++;
}
double get(int i) const {
int idx = head - 1 - i;
while(idx < 0) idx += cap;
return x[idx % cap];
}
};
static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L)
{
if(a.n < L || b.n < L || L < 5) return 0.0;
double mx=0,my=0;
for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
mx /= (double)L; my /= (double)L;
double sxx=0, syy=0, sxy=0;
for(int i=0;i<L;i++){
double dx = a.get(i) - mx;
double dy = b.get(i) - my;
sxx += dx*dx;
syy += dy*dy;
sxy += dx*dy;
}
double den = sqrt(sxx*syy);
if(den <= EPS) return 0.0;
return sxy/den;
}
class WeightedGraph {
public:
int n = 0;
double* d = 0;
WeightedGraph() {}
~WeightedGraph(){ shutdown(); }
void init(int N){
shutdown();
n = N;
d = (double*)malloc((size_t)n*(size_t)n*sizeof(double));
if(!d) quit("OOM: WeightedGraph");
reset();
}
void shutdown(){
if(d) free(d);
d = 0; n = 0;
}
inline int pos(int r,int c) const { return r*n + c; }
void reset(){
for(int r=0;r<n;r++)
for(int c=0;c<n;c++)
d[pos(r,c)] = (r==c) ? 0.0 : INF;
}
void allPairsShortest(){
for(int k=0;k<n;k++)
for(int i=0;i<n;i++)
for(int j=0;j<n;j++){
double cand = d[pos(i,k)] + d[pos(k,j)];
if(cand < d[pos(i,j)]) d[pos(i,j)] = cand;
}
}
double wienerUndirectedLike() const {
double W=0;
for(int i=0;i<n;i++)
for(int j=i+1;j<n;j++){
double dij = d[pos(i,j)];
double dji = d[pos(j,i)];
W += 0.5*(dij + dji);
}
return W;
}
};
class PairAspectGraph {
public:
WeightedGraph G;
RollingBuffer feat[FEAT_N];
double compactness = 0.0;
PairAspectGraph() {}
void init(){
G.init(FEAT_N);
for(int k=0;k<FEAT_N;k++) feat[k].init(FEAT_WINDOW);
}
void shutdown(){
for(int k=0;k<FEAT_N;k++) feat[k].shutdown();
G.shutdown();
}
static double corrToDist(double corr){
double a = fabs(corr);
if(a > 1.0) a = 1.0;
return 1.0 - a;
}
void pushFeature(int k, double v){
feat[k].push(v);
}
void rebuildIfReady(){
if(feat[0].n < FEAT_WINDOW) { compactness = 0.0; return; }
G.reset();
for(int i=0;i<FEAT_N;i++){
for(int j=i+1;j<FEAT_N;j++){
double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
compactness = 1.0/(1.0 + W);
}
};
class PairUniverseGraph {
public:
WeightedGraph G;
double couplingPenalty = 0;
PairUniverseGraph(){}
void init(){ G.init(N_ASSET_NAMES); }
void shutdown(){ G.shutdown(); }
static double corrToDist(double corr){
double a = fabs(corr);
if(a>1.0) a = 1.0;
return 1.0 - a;
}
void rebuild(PairAspectGraph* pairs){
G.reset();
for(int i=0;i<N_ASSET_NAMES;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
couplingPenalty = 1.0/(1.0 + W);
}
};
static double computeRegimeCompactness()
{
double W = ((Bar % 2) == 0) ? 1.0 : 2.5;
return 1.0/(1.0 + W);
}
class RegimeSwitcherStrategy {
public:
PairAspectGraph pairG[N_ASSET_NAMES];
PairUniverseGraph metaG;
int lastUpdateBar = -999999;
void init(){
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init();
metaG.init();
}
void shutdown(){
metaG.shutdown();
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown();
}
void updateFeaturesForAsset(int a)
{
asset((char*)ASSET_NAMES[a]);
vars C = series(priceClose(0));
if(Bar < 20) return;
double c0 = C[0];
double c1 = C[1];
double c12 = C[12];
double ret1 = log(c0/c1);
double retN = log(c0/c12);
double ma3 = (C[0]+C[1]+C[2]) / 3.0;
double ma12 = 0;
for(int i=0;i<12;i++) ma12 += C[i];
ma12 /= 12.0;
double slope = ma3 - ma12;
double m=0, s=0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
m += ri;
}
m /= 20.0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
double d = ri - m;
s += d*d;
}
double vol = sqrt(s/20.0);
double mom = retN;
double rsiProxy = tanh(10.0*retN);
double m20=0, s20=0;
for(int i=0;i<20;i++) m20 += C[i];
m20 /= 20.0;
for(int i=0;i<20;i++){ double d=C[i]-m20; s20 += d*d; }
s20 = sqrt(s20/20.0) + 1e-12;
double bollPos = (c0 - m20)/s20;
double rangePress = tanh(0.5*bollPos);
double flowProxy = fabs(ret1);
int up=0;
for(int i=0;i<20;i++){
if(C[i] > C[i+1]) up++;
}
double pBull = (double)up/20.0;
double markovProxy = 2.0*(pBull - 0.5);
pairG[a].pushFeature(0, ret1);
pairG[a].pushFeature(1, retN);
pairG[a].pushFeature(2, slope);
pairG[a].pushFeature(3, vol);
pairG[a].pushFeature(4, mom);
pairG[a].pushFeature(5, rsiProxy);
pairG[a].pushFeature(6, rangePress);
pairG[a].pushFeature(7, flowProxy);
pairG[a].pushFeature(8, markovProxy);
}
void onBar()
{
for(int a=0;a<N_ASSET_NAMES;a++)
updateFeaturesForAsset(a);
if(UPDATE_EVERY > 1 && (Bar % UPDATE_EVERY) != 0) return;
if(lastUpdateBar == Bar) return;
lastUpdateBar = Bar;
for(int a=0;a<N_ASSET_NAMES;a++)
pairG[a].rebuildIfReady();
metaG.rebuild(pairG);
double Creg = computeRegimeCompactness();
double Pcouple = metaG.couplingPenalty;
double score[N_ASSET_NAMES];
int idx[N_ASSET_NAMES];
for(int a=0;a<N_ASSET_NAMES;a++){
double CA = pairG[a].compactness;
double x = alpha*Creg + gamma*CA - beta*Pcouple;
score[a] = sigmoid(x);
idx[a] = a;
}
for(int i=0;i<TOP_K;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
if(score[idx[j]] > score[idx[i]]){
int t = idx[i]; idx[i] = idx[j]; idx[j] = t;
}
}
}
if((Bar % 50) == 0){
printf("\n[RegimeSwitcher] Bar=%d Creg=%.4f Pcouple=%.4f", Bar, Creg, Pcouple);
for(int k=0;k<TOP_K;k++){
int a = idx[k];
printf("\n #%d %s CA=%.4f Score=%.4f",
k+1, ASSET_NAMES[a], pairG[a].compactness, score[a]);
}
}
}
};
static RegimeSwitcherStrategy* S = 0;
DLLFUNC void run()
{
if(is(INITRUN))
{
BarPeriod = 60;
LookBack = max(LookBack, FEAT_WINDOW + 50);
asset((char*)ASSET_NAMES[0]);
if(!S) {
S = new RegimeSwitcherStrategy();
S->init();
}
}
if(is(EXITRUN))
{
if(S){
S->shutdown();
delete S;
S = 0;
}
return;
}
if(!S) return;
if(Bar < LookBack) return;
S->onBar();
}
|
|
|
VolSieve
[Re: TipmyPip]
#489213
02/23/26 18:56
02/23/26 18:56
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
VolSieve is a volatility-adjusted, multi-asset ranking engine designed to continuously scan a broad FX universe and surface the most “structurally attractive” pairs while automatically de-emphasizing those experiencing excessive turbulence. Rather than relying on a single indicator, the strategy builds a compact, information-rich fingerprint for every currency pair using a small set of price-derived features calculated on each bar. These features capture short- and medium-horizon returns, trend structure, volatility, momentum, mean-reversion pressure, and a simple regime persistence proxy. Each feature is stored in a rolling buffer so the strategy maintains a recent history of how the pair has behaved. The core idea is graph-based: for every pair, VolSieve treats the nine feature streams as nodes in a weighted graph. It measures how strongly features co-move by computing correlations across a fixed window and converts correlation strength into “distance.” Highly related features become close neighbors; unrelated features are farther apart. The strategy then runs an all-pairs shortest-path calculation across this feature graph and summarizes its overall connectivity via a Wiener-style distance total. A more tightly connected graph (lower total distance) is interpreted as higher internal coherence—i.e., the pair’s recent behavior is more consistent across different “views” of the market (returns, trend, pressure, and so on). This becomes the pair’s compactness score. On top of individual pair structure, the strategy also builds a meta-graph across the entire universe. Here, the nodes are the currency pairs themselves, and the edges are determined by correlations of a chosen feature stream across assets. This produces a coupling penalty that represents how entangled the market currently is: when many pairs behave similarly, diversification and signal uniqueness tend to degrade, so the strategy reduces its enthusiasm. Finally, VolSieve applies a direct volatility adjustment. Each pair’s score is multiplied by a volatility factor that shrinks toward zero as volatility rises, preventing high-variance pairs from dominating purely because their signals look “strong.” The final score blends three components—market regime compactness, pair compactness, and market coupling—then compresses it through a sigmoid into a stable 0–1 ranking metric. At regular update intervals, the strategy sorts the universe and reports the top candidates, producing a clean, risk-aware shortlist suitable for downstream execution or position sizing logic. // TGr06D_VolAdjuster.cpp - Zorro64 Strategy DLL (C++)
// Strategy D: Volatility-Adjusted
// Adjusts position sizing based on per-pair volatility.
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define INF 1e30
#define EPS 1e-12
const char* ASSET_NAMES[] = {
"EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
"CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
"GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28
static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;
// Standard weights, volatility adjustment done in scoring
static const double alpha = 2.0;
static const double beta = 1.5;
static const double gamma = 2.0;
static const double VOL_SCALE = 0.5;
static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) {
if(x > 30) return 1.0;
if(x < -30) return 0.0;
return 1.0/(1.0 + exp(-x));
}
class RollingBuffer {
public:
int cap = 0;
int n = 0;
int head = 0;
double* x = 0;
RollingBuffer() {}
~RollingBuffer(){ shutdown(); }
void init(int L){
shutdown();
cap = L;
x = (double*)malloc((size_t)cap*sizeof(double));
if(!x) quit("OOM: RollingBuffer");
n = 0; head = 0;
}
void shutdown(){
if(x) free(x);
x = 0; cap = 0; n = 0; head = 0;
}
void push(double v){
if(!x || cap<=0) return;
x[head] = v;
head = (head + 1) % cap;
if(n < cap) n++;
}
double get(int i) const {
int idx = head - 1 - i;
while(idx < 0) idx += cap;
return x[idx % cap];
}
};
static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L)
{
if(a.n < L || b.n < L || L < 5) return 0.0;
double mx=0,my=0;
for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
mx /= (double)L; my /= (double)L;
double sxx=0, syy=0, sxy=0;
for(int i=0;i<L;i++){
double dx = a.get(i) - mx;
double dy = b.get(i) - my;
sxx += dx*dx;
syy += dy*dy;
sxy += dx*dy;
}
double den = sqrt(sxx*syy);
if(den <= EPS) return 0.0;
return sxy/den;
}
class WeightedGraph {
public:
int n = 0;
double* d = 0;
WeightedGraph() {}
~WeightedGraph(){ shutdown(); }
void init(int N){
shutdown();
n = N;
d = (double*)malloc((size_t)n*(size_t)n*sizeof(double));
if(!d) quit("OOM: WeightedGraph");
reset();
}
void shutdown(){
if(d) free(d);
d = 0; n = 0;
}
inline int pos(int r,int c) const { return r*n + c; }
void reset(){
for(int r=0;r<n;r++)
for(int c=0;c<n;c++)
d[pos(r,c)] = (r==c) ? 0.0 : INF;
}
void allPairsShortest(){
for(int k=0;k<n;k++)
for(int i=0;i<n;i++)
for(int j=0;j<n;j++){
double cand = d[pos(i,k)] + d[pos(k,j)];
if(cand < d[pos(i,j)]) d[pos(i,j)] = cand;
}
}
double wienerUndirectedLike() const {
double W=0;
for(int i=0;i<n;i++)
for(int j=i+1;j<n;j++){
double dij = d[pos(i,j)];
double dji = d[pos(j,i)];
W += 0.5*(dij + dji);
}
return W;
}
};
class PairAspectGraph {
public:
WeightedGraph G;
RollingBuffer feat[FEAT_N];
double compactness = 0.0;
double volatility = 0.0;
PairAspectGraph() {}
void init(){
G.init(FEAT_N);
for(int k=0;k<FEAT_N;k++) feat[k].init(FEAT_WINDOW);
}
void shutdown(){
for(int k=0;k<FEAT_N;k++) feat[k].shutdown();
G.shutdown();
}
static double corrToDist(double corr){
double a = fabs(corr);
if(a > 1.0) a = 1.0;
return 1.0 - a;
}
void pushFeature(int k, double v){
feat[k].push(v);
}
void rebuildIfReady(){
if(feat[0].n < FEAT_WINDOW) { compactness = 0.0; return; }
G.reset();
for(int i=0;i<FEAT_N;i++){
for(int j=i+1;j<FEAT_N;j++){
double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
compactness = 1.0/(1.0 + W);
volatility = feat[3].get(0);
}
};
class PairUniverseGraph {
public:
WeightedGraph G;
double couplingPenalty = 0;
PairUniverseGraph(){}
void init(){ G.init(N_ASSET_NAMES); }
void shutdown(){ G.shutdown(); }
static double corrToDist(double corr){
double a = fabs(corr);
if(a>1.0) a = 1.0;
return 1.0 - a;
}
void rebuild(PairAspectGraph* pairs){
G.reset();
for(int i=0;i<N_ASSET_NAMES;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
couplingPenalty = 1.0/(1.0 + W);
}
};
static double computeRegimeCompactness()
{
double W = ((Bar % 2) == 0) ? 1.0 : 2.5;
return 1.0/(1.0 + W);
}
class VolAdjusterStrategy {
public:
PairAspectGraph pairG[N_ASSET_NAMES];
PairUniverseGraph metaG;
int lastUpdateBar = -999999;
void init(){
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init();
metaG.init();
}
void shutdown(){
metaG.shutdown();
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown();
}
void updateFeaturesForAsset(int a)
{
asset((char*)ASSET_NAMES[a]);
vars C = series(priceClose(0));
if(Bar < 20) return;
double c0 = C[0];
double c1 = C[1];
double c12 = C[12];
double ret1 = log(c0/c1);
double retN = log(c0/c12);
double ma3 = (C[0]+C[1]+C[2]) / 3.0;
double ma12 = 0;
for(int i=0;i<12;i++) ma12 += C[i];
ma12 /= 12.0;
double slope = ma3 - ma12;
double m=0, s=0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
m += ri;
}
m /= 20.0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
double d = ri - m;
s += d*d;
}
double vol = sqrt(s/20.0);
double mom = retN;
double rsiProxy = tanh(10.0*retN);
double m20=0, s20=0;
for(int i=0;i<20;i++) m20 += C[i];
m20 /= 20.0;
for(int i=0;i<20;i++){ double d=C[i]-m20; s20 += d*d; }
s20 = sqrt(s20/20.0) + 1e-12;
double bollPos = (c0 - m20)/s20;
double rangePress = tanh(0.5*bollPos);
double flowProxy = fabs(ret1);
int up=0;
for(int i=0;i<20;i++){
if(C[i] > C[i+1]) up++;
}
double pBull = (double)up/20.0;
double markovProxy = 2.0*(pBull - 0.5);
pairG[a].pushFeature(0, ret1);
pairG[a].pushFeature(1, retN);
pairG[a].pushFeature(2, slope);
pairG[a].pushFeature(3, vol);
pairG[a].pushFeature(4, mom);
pairG[a].pushFeature(5, rsiProxy);
pairG[a].pushFeature(6, rangePress);
pairG[a].pushFeature(7, flowProxy);
pairG[a].pushFeature(8, markovProxy);
}
void onBar()
{
for(int a=0;a<N_ASSET_NAMES;a++)
updateFeaturesForAsset(a);
if(UPDATE_EVERY > 1 && (Bar % UPDATE_EVERY) != 0) return;
if(lastUpdateBar == Bar) return;
lastUpdateBar = Bar;
for(int a=0;a<N_ASSET_NAMES;a++)
pairG[a].rebuildIfReady();
metaG.rebuild(pairG);
double Creg = computeRegimeCompactness();
double Pcouple = metaG.couplingPenalty;
double score[N_ASSET_NAMES];
int idx[N_ASSET_NAMES];
for(int a=0;a<N_ASSET_NAMES;a++){
double CA = pairG[a].compactness;
double vol = pairG[a].volatility;
double volFactor = clamp01(1.0 - vol * VOL_SCALE);
double x = (alpha*Creg + gamma*CA - beta*Pcouple) * volFactor;
score[a] = sigmoid(x);
idx[a] = a;
}
for(int i=0;i<TOP_K;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
if(score[idx[j]] > score[idx[i]]){
int t = idx[i]; idx[i] = idx[j]; idx[j] = t;
}
}
}
if((Bar % 50) == 0){
printf("\n[VolAdjuster] Bar=%d Creg=%.4f Pcouple=%.4f", Bar, Creg, Pcouple);
for(int k=0;k<TOP_K;k++){
int a = idx[k];
printf("\n #%d %s CA=%.4f Vol=%.4f Score=%.4f",
k+1, ASSET_NAMES[a], pairG[a].compactness, pairG[a].volatility, score[a]);
}
}
}
};
static VolAdjusterStrategy* S = 0;
DLLFUNC void run()
{
if(is(INITRUN))
{
BarPeriod = 60;
LookBack = max(LookBack, FEAT_WINDOW + 50);
asset((char*)ASSET_NAMES[0]);
if(!S) {
S = new VolAdjusterStrategy();
S->init();
}
}
if(is(EXITRUN))
{
if(S){
S->shutdown();
delete S;
S = 0;
}
return;
}
if(!S) return;
if(Bar < LookBack) return;
S->onBar();
}
|
|
|
MomentumBias Nexus
[Re: TipmyPip]
#489214
02/23/26 18:59
02/23/26 18:59
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
MomentumBias Nexus is a multi-asset ranking and selection strategy designed for a broad FX universe (28 major and cross pairs). Instead of placing trades directly, it behaves like a “market scanner” that continuously evaluates each pair’s current quality and then surfaces the most attractive candidates. The core idea is to combine structure (how coherent the internal features of a pair look) with environment (how tightly the whole universe is moving together), and then apply a strict momentum gate so the system only considers pairs that are already moving in the preferred direction. On every bar (set to hourly bars via BarPeriod = 60), the strategy updates a compact set of nine features per asset. These features capture short-term return, multi-bar return, moving-average slope, volatility, a momentum proxy, an RSI-like nonlinear transform, a Bollinger-style position measure, a simple “flow” proxy (absolute 1-bar move), and a Markov-style persistence proxy (how often the last 20 closes were higher than the previous close). Each feature is stored in rolling buffers, allowing the strategy to measure how these signals behave together over time. The unique part is the use of graph-based “compactness.” For each asset, the strategy builds a feature graph where edges represent how strongly two features are correlated over a window (200 bars). High correlation reduces edge distance, low correlation increases it. By running an all-pairs shortest-path calculation, the strategy produces a single compactness value: a higher compactness implies the features are behaving in a more internally consistent, mutually reinforcing way, which is treated as a favorable condition. In parallel, a meta-graph is constructed across all assets using correlations of the primary return feature between pairs. This yields a “coupling penalty” that rises when the universe becomes tightly synchronized (a regime where diversification and pair selection advantages may shrink). The final score blends three forces: (1) a regime compactness term, (2) the asset’s own compactness, and (3) the universe coupling penalty (subtracted). Finally, a momentum filter requires the most recent 1-bar log return to be positive; otherwise the asset receives a zero score regardless of other conditions. Every few bars, the strategy ranks all pairs by a sigmoid-compressed score and prints the top candidates, producing a disciplined, momentum-aligned shortlist for execution or downstream portfolio logic. // TGr06E_MomentumBias.cpp - Zorro64 Strategy DLL (C++)
// Strategy E: Momentum-Biased
// Adds momentum filter: only trades pairs with positive Ret1.
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define INF 1e30
#define EPS 1e-12
const char* ASSET_NAMES[] = {
"EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
"CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
"GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28
static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;
// Standard weights, momentum filter applied in scoring
static const double alpha = 2.0;
static const double beta = 1.5;
static const double gamma = 2.0;
static const double MOMENTUM_THRESHOLD = 0.0;
static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) {
if(x > 30) return 1.0;
if(x < -30) return 0.0;
return 1.0/(1.0 + exp(-x));
}
class RollingBuffer {
public:
int cap = 0;
int n = 0;
int head = 0;
double* x = 0;
RollingBuffer() {}
~RollingBuffer(){ shutdown(); }
void init(int L){
shutdown();
cap = L;
x = (double*)malloc((size_t)cap*sizeof(double));
if(!x) quit("OOM: RollingBuffer");
n = 0; head = 0;
}
void shutdown(){
if(x) free(x);
x = 0; cap = 0; n = 0; head = 0;
}
void push(double v){
if(!x || cap<=0) return;
x[head] = v;
head = (head + 1) % cap;
if(n < cap) n++;
}
double get(int i) const {
int idx = head - 1 - i;
while(idx < 0) idx += cap;
return x[idx % cap];
}
};
static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L)
{
if(a.n < L || b.n < L || L < 5) return 0.0;
double mx=0,my=0;
for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
mx /= (double)L; my /= (double)L;
double sxx=0, syy=0, sxy=0;
for(int i=0;i<L;i++){
double dx = a.get(i) - mx;
double dy = b.get(i) - my;
sxx += dx*dx;
syy += dy*dy;
sxy += dx*dy;
}
double den = sqrt(sxx*syy);
if(den <= EPS) return 0.0;
return sxy/den;
}
class WeightedGraph {
public:
int n = 0;
double* d = 0;
WeightedGraph() {}
~WeightedGraph(){ shutdown(); }
void init(int N){
shutdown();
n = N;
d = (double*)malloc((size_t)n*(size_t)n*sizeof(double));
if(!d) quit("OOM: WeightedGraph");
reset();
}
void shutdown(){
if(d) free(d);
d = 0; n = 0;
}
inline int pos(int r,int c) const { return r*n + c; }
void reset(){
for(int r=0;r<n;r++)
for(int c=0;c<n;c++)
d[pos(r,c)] = (r==c) ? 0.0 : INF;
}
void allPairsShortest(){
for(int k=0;k<n;k++)
for(int i=0;i<n;i++)
for(int j=0;j<n;j++){
double cand = d[pos(i,k)] + d[pos(k,j)];
if(cand < d[pos(i,j)]) d[pos(i,j)] = cand;
}
}
double wienerUndirectedLike() const {
double W=0;
for(int i=0;i<n;i++)
for(int j=i+1;j<n;j++){
double dij = d[pos(i,j)];
double dji = d[pos(j,i)];
W += 0.5*(dij + dji);
}
return W;
}
};
class PairAspectGraph {
public:
WeightedGraph G;
RollingBuffer feat[FEAT_N];
double compactness = 0.0;
double momentum = 0.0;
PairAspectGraph() {}
void init(){
G.init(FEAT_N);
for(int k=0;k<FEAT_N;k++) feat[k].init(FEAT_WINDOW);
}
void shutdown(){
for(int k=0;k<FEAT_N;k++) feat[k].shutdown();
G.shutdown();
}
static double corrToDist(double corr){
double a = fabs(corr);
if(a > 1.0) a = 1.0;
return 1.0 - a;
}
void pushFeature(int k, double v){
feat[k].push(v);
}
void rebuildIfReady(){
if(feat[0].n < FEAT_WINDOW) { compactness = 0.0; return; }
G.reset();
for(int i=0;i<FEAT_N;i++){
for(int j=i+1;j<FEAT_N;j++){
double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
compactness = 1.0/(1.0 + W);
momentum = feat[0].get(0);
}
};
class PairUniverseGraph {
public:
WeightedGraph G;
double couplingPenalty = 0;
PairUniverseGraph(){}
void init(){ G.init(N_ASSET_NAMES); }
void shutdown(){ G.shutdown(); }
static double corrToDist(double corr){
double a = fabs(corr);
if(a>1.0) a = 1.0;
return 1.0 - a;
}
void rebuild(PairAspectGraph* pairs){
G.reset();
for(int i=0;i<N_ASSET_NAMES;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
double w = corrToDist(c);
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
couplingPenalty = 1.0/(1.0 + W);
}
};
static double computeRegimeCompactness()
{
double W = ((Bar % 2) == 0) ? 1.0 : 2.5;
return 1.0/(1.0 + W);
}
class MomentumBiasStrategy {
public:
PairAspectGraph pairG[N_ASSET_NAMES];
PairUniverseGraph metaG;
int lastUpdateBar = -999999;
void init(){
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init();
metaG.init();
}
void shutdown(){
metaG.shutdown();
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown();
}
void updateFeaturesForAsset(int a)
{
asset((char*)ASSET_NAMES[a]);
vars C = series(priceClose(0));
if(Bar < 20) return;
double c0 = C[0];
double c1 = C[1];
double c12 = C[12];
double ret1 = log(c0/c1);
double retN = log(c0/c12);
double ma3 = (C[0]+C[1]+C[2]) / 3.0;
double ma12 = 0;
for(int i=0;i<12;i++) ma12 += C[i];
ma12 /= 12.0;
double slope = ma3 - ma12;
double m=0, s=0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
m += ri;
}
m /= 20.0;
for(int i=0;i<20;i++){
double ri = log(C[i]/C[i+1]);
double d = ri - m;
s += d*d;
}
double vol = sqrt(s/20.0);
double mom = retN;
double rsiProxy = tanh(10.0*retN);
double m20=0, s20=0;
for(int i=0;i<20;i++) m20 += C[i];
m20 /= 20.0;
for(int i=0;i<20;i++){ double d=C[i]-m20; s20 += d*d; }
s20 = sqrt(s20/20.0) + 1e-12;
double bollPos = (c0 - m20)/s20;
double rangePress = tanh(0.5*bollPos);
double flowProxy = fabs(ret1);
int up=0;
for(int i=0;i<20;i++){
if(C[i] > C[i+1]) up++;
}
double pBull = (double)up/20.0;
double markovProxy = 2.0*(pBull - 0.5);
pairG[a].pushFeature(0, ret1);
pairG[a].pushFeature(1, retN);
pairG[a].pushFeature(2, slope);
pairG[a].pushFeature(3, vol);
pairG[a].pushFeature(4, mom);
pairG[a].pushFeature(5, rsiProxy);
pairG[a].pushFeature(6, rangePress);
pairG[a].pushFeature(7, flowProxy);
pairG[a].pushFeature(8, markovProxy);
}
void onBar()
{
for(int a=0;a<N_ASSET_NAMES;a++)
updateFeaturesForAsset(a);
if(UPDATE_EVERY > 1 && (Bar % UPDATE_EVERY) != 0) return;
if(lastUpdateBar == Bar) return;
lastUpdateBar = Bar;
for(int a=0;a<N_ASSET_NAMES;a++)
pairG[a].rebuildIfReady();
metaG.rebuild(pairG);
double Creg = computeRegimeCompactness();
double Pcouple = metaG.couplingPenalty;
double score[N_ASSET_NAMES];
int idx[N_ASSET_NAMES];
for(int a=0;a<N_ASSET_NAMES;a++){
double CA = pairG[a].compactness;
double mom = pairG[a].momentum;
double momFactor = (mom > MOMENTUM_THRESHOLD) ? 1.0 : 0.0;
double x = (alpha*Creg + gamma*CA - beta*Pcouple) * momFactor;
score[a] = sigmoid(x);
idx[a] = a;
}
for(int i=0;i<TOP_K;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
if(score[idx[j]] > score[idx[i]]){
int t = idx[i]; idx[i] = idx[j]; idx[j] = t;
}
}
}
if((Bar % 50) == 0){
printf("\n[MomentumBias] Bar=%d Creg=%.4f Pcouple=%.4f", Bar, Creg, Pcouple);
for(int k=0;k<TOP_K;k++){
int a = idx[k];
printf("\n #%d %s CA=%.4f Mom=%.4f Score=%.4f",
k+1, ASSET_NAMES[a], pairG[a].compactness, pairG[a].momentum, score[a]);
}
}
}
};
static MomentumBiasStrategy* S = 0;
DLLFUNC void run()
{
if(is(INITRUN))
{
BarPeriod = 60;
LookBack = max(LookBack, FEAT_WINDOW + 50);
asset((char*)ASSET_NAMES[0]);
if(!S) {
S = new MomentumBiasStrategy();
S->init();
}
}
if(is(EXITRUN))
{
if(S){
S->shutdown();
delete S;
S = 0;
}
return;
}
if(!S) return;
if(Bar < LookBack) return;
S->onBar();
}
|
|
|
NexusWeave Compact Dominant
[Re: TipmyPip]
#489215
02/23/26 19:02
02/23/26 19:02
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
NexusWeave Compact Dominant (NCD-v2) is a multi-asset FX strategy that ranks currency pairs by how “structurally coherent” their recent behavior looks, using a two-layer graph architecture. Instead of relying on a single indicator or a fixed regime filter, it treats each market as a small network of interacting features, then evaluates how tightly that network holds together over time. The strategy runs on an hourly bar schedule and updates its rankings periodically (every few bars) to stay responsive without becoming overly noisy. At the first layer, each currency pair builds an aspect graph from nine rolling features (e.g., short and medium log returns, moving-average slope, volatility, momentum, RSI-like proxy, Bollinger-position pressure, volatility-of-volatility, and a simple Markov-style up/down persistence proxy). Over a fixed window, the strategy measures correlations between these features and converts correlation strength into a distance metric. When feature relationships remain stable and mutually reinforcing, the graph’s shortest-path structure becomes “compact.” That compactness is summarized into a score (derived from the graph’s overall distance structure), alongside supporting diagnostics such as average edge weight, edge variance (used as an entropy-like stability proxy), a volatility centrality measure, a bandwidth ratio, and a lightweight regime probability based on short-horizon entropy. The second layer is a universe graph connecting all traded pairs. Here, each edge blends two ideas: (1) similarity of recent pair returns (correlation distance) and (2) a currency-exposure distance that recognizes when two pairs share base/quote currency structure. This meta-graph produces portfolio-level signals: a coupling penalty (how tightly everything moves together), a crude cluster score (how fragmented the market is), and a USD exposure gauge weighted by pair compactness. Finally, NCD-v2 forms a dominance score per asset: it rewards pair compactness, lightly incorporates a simple regime compactness term, and penalizes high market coupling (to avoid crowded, highly synchronized conditions). Scores are passed through a sigmoid to normalize them, then the strategy selects the top-ranked pairs (Top-K) as its active focus set. The result is a self-contained selection engine designed to prefer pairs whose internal feature structure is orderly while the broader market remains sufficiently diversified. // TGr06A_CompactDominant_v2.cpp - Zorro64 Strategy DLL (C++)
// Strategy A v2: Compactness-Dominant with Enhanced Graph Architecture
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <string.h>
#define INF 1e30
#define EPS 1e-12
const char* ASSET_NAMES[] = {
"EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
"CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
"GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28
static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;
static const double alpha = 0.1;
static const double beta = 0.2;
static const double gamma = 3.0;
static const double LAMBDA_META = 0.7;
static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) { if(x > 30) return 1.0; if(x < -30) return 0.0; return 1.0/(1.0 + exp(-x)); }
static const char* CURRENCY_BASE[] = { "EUR","GBP","USD","CHF","JPY","AUD","CAD","NZD" };
static const int N_CURRENCIES = 8;
static int getCurrencyExposure(int pairIdx, int ccy) {
const char* pair = ASSET_NAMES[pairIdx];
char base[4] = {pair[0], pair[1], pair[2], 0};
char quote[4] = {pair[3], pair[4], pair[5], 0};
if(strcmp(base, CURRENCY_BASE[ccy]) == 0) return 1;
if(strcmp(quote, CURRENCY_BASE[ccy]) == 0) return -1;
return 0;
}
static double exposureDist(int i, int j) {
double dist = 0.0;
for(int c=0; c<N_CURRENCIES; c++) {
int ei = getCurrencyExposure(i, c);
int ej = getCurrencyExposure(j, c);
if(ei != 0 && ej != 0) {
dist += (ei == ej) ? 0.0 : 1.0;
} else if(ei != 0 || ej != 0) {
dist += 0.5;
}
}
return dist / (double)N_CURRENCIES;
}
class RollingBuffer {
public:
int cap = 0, n = 0, head = 0;
double* x = 0;
RollingBuffer() {}
~RollingBuffer(){ shutdown(); }
void init(int L){ shutdown(); cap = L; x = (double*)malloc((size_t)cap*sizeof(double)); if(!x) quit("OOM"); n = 0; head = 0; }
void shutdown(){ if(x) free(x); x = 0; cap = 0; n = 0; head = 0; }
void push(double v){ if(!x || cap<=0) return; x[head] = v; head = (head + 1) % cap; if(n < cap) n++; }
double get(int i) const { int idx = head - 1 - i; while(idx < 0) idx += cap; return x[idx % cap]; }
};
static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L) {
if(a.n < L || b.n < L || L < 5) return 0.0;
double mx=0,my=0;
for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
mx /= (double)L; my /= (double)L;
double sxx=0, syy=0, sxy=0;
for(int i=0;i<L;i++){ double dx = a.get(i)-mx; double dy = b.get(i)-my; sxx+=dx*dx; syy+=dy*dy; sxy+=dx*dy; }
double den = sqrt(sxx*syy);
if(den <= EPS) return 0.0;
return sxy/den;
}
class WeightedGraph {
public:
int n = 0;
double* d = 0;
WeightedGraph() {}
~WeightedGraph(){ shutdown(); }
void init(int N){ shutdown(); n = N; d = (double*)malloc((size_t)n*n*sizeof(double)); if(!d) quit("OOM"); reset(); }
void shutdown(){ if(d) free(d); d = 0; n = 0; }
inline int pos(int r,int c) const { return r*n + c; }
void reset(){ for(int r=0;r<n;r++) for(int c=0;c<n;c++) d[pos(r,c)] = (r==c) ? 0.0 : INF; }
void allPairsShortest(){ for(int k=0;k<n;k++) for(int i=0;i<n;i++) for(int j=0;j<n;j++){ double cand = d[pos(i,k)] + d[pos(k,j)]; if(cand < d[pos(i,j)]) d[pos(i,j)] = cand; } }
double wienerUndirectedLike() const { double W=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ double dij = d[pos(i,j)], dji = d[pos(j,i)]; W += 0.5*(dij + dji); } return W; }
double meanEdgeWeight() const { double sum=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ sum += d[pos(i,j)]; cnt++; }} return cnt>0 ? sum/cnt : 0.0; }
double edgeVariance() const { double mean = meanEdgeWeight(); double var=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ double diff = d[pos(i,j)] - mean; var += diff*diff; cnt++; }} return cnt>0 ? var/cnt : 0.0; }
double centrality(int node) const { double sum=0; for(int j=0;j<n;j++) if(j!=node && d[pos(node,j)] < INF) sum += d[pos(node,j)]; return sum; }
};
struct PairFeatures {
double compactness;
double meanEdge;
double entropy;
double volCentrality;
double regimeProb;
double pBullNext;
double entropyStat;
double bandwidth;
};
class PairAspectGraph {
public:
WeightedGraph G;
RollingBuffer feat[FEAT_N];
RollingBuffer featPrev[FEAT_N];
PairFeatures pf;
int prevRegime;
PairAspectGraph() : prevRegime(0) { pf.compactness=0; pf.meanEdge=0; pf.entropy=0; pf.volCentrality=0; pf.regimeProb=0; pf.pBullNext=0; pf.entropyStat=0; pf.bandwidth=0; }
void init(){
G.init(FEAT_N);
for(int k=0;k<FEAT_N;k++){ feat[k].init(FEAT_WINDOW); featPrev[k].init(FEAT_WINDOW); }
}
void shutdown(){
for(int k=0;k<FEAT_N;k++){ feat[k].shutdown(); featPrev[k].shutdown(); }
G.shutdown();
}
static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }
static double signedCorrToDist(double corr){ return 0.5 - 0.5*corr; }
void pushFeature(int k, double v){ feat[k].push(v); }
void pushFeaturePrev(int k, double v){ featPrev[k].push(v); }
double computeEntropy(int L) {
if(feat[0].n < L) return 0.0;
int bins = 10;
int* hist = (int*)calloc(bins, sizeof(int));
for(int i=0;i<L && i<feat[0].n;i++) {
double v = feat[0].get(i);
int bin = (int)(v * bins);
if(bin < 0) bin = 0; if(bin >= bins) bin = bins-1;
hist[bin]++;
}
double H = 0.0;
for(int b=0;b<bins;b++) {
if(hist[b] > 0) {
double p = (double)hist[b] / (double)L;
H -= p * log(p + EPS);
}
}
free(hist);
return H;
}
void rebuildIfReady(){
if(feat[0].n < FEAT_WINDOW){ pf.compactness = 0; return; }
G.reset();
for(int i=0;i<FEAT_N;i++){
for(int j=i+1;j<FEAT_N;j++){
double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
double w = corrToDist(c);
if(i >= 7 && j >= 8) {
double cp = corrOf(featPrev[i], feat[j], min(FEAT_WINDOW, featPrev[i].n));
w = 0.5 * w + 0.5 * corrToDist(cp);
}
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
pf.compactness = 1.0/(1.0 + W);
pf.meanEdge = G.meanEdgeWeight();
pf.entropy = G.edgeVariance();
pf.volCentrality = G.centrality(3) / (FEAT_N - 1);
if(feat[8].n >= 20) {
int upCurrent = 0, upNext = 0;
for(int i=0;i<19;i++){ if(feat[0].get(i) > 0) upCurrent++; if(feat[0].get(i+1) > 0) upNext++; }
pf.pBullNext = (double)upNext / 19.0;
pf.entropyStat = computeEntropy(20);
pf.regimeProb = (pf.entropyStat < 2.0) ? 1.0 : 0.0;
pf.bandwidth = feat[3].get(0) / (feat[3].get(10) + EPS);
}
}
};
class PairUniverseGraph {
public:
WeightedGraph G;
double couplingPenalty;
double clusterScore;
double usdExposure;
PairUniverseGraph() : couplingPenalty(0), clusterScore(0), usdExposure(0) {}
void init(){ G.init(N_ASSET_NAMES); }
void shutdown(){ G.shutdown(); }
static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }
void rebuild(PairAspectGraph* pairs){
G.reset();
for(int i=0;i<N_ASSET_NAMES;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
double corrDist = corrToDist(c);
double expDist = exposureDist(i, j);
double w = LAMBDA_META * corrDist + (1.0 - LAMBDA_META) * expDist;
G.d[G.pos(i,j)] = w;
G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
couplingPenalty = 1.0/(1.0 + W);
clusterScore = computeClusterScore();
usdExposure = computeUSDExposure(pairs);
}
double computeClusterScore() {
int clusters = 0;
for(int i=0;i<N_ASSET_NAMES;i++) {
bool hasConnection = false;
for(int j=0;j<N_ASSET_NAMES;j++) {
if(i != j && G.d[G.pos(i,j)] < 0.3) { hasConnection = true; break; }
}
if(!hasConnection) clusters++;
}
return (double)clusters / N_ASSET_NAMES;
}
double computeUSDExposure(PairAspectGraph* pairs) {
double totalExp = 0.0;
for(int i=0;i<N_ASSET_NAMES;i++) {
double exp = fabs((double)getCurrencyExposure(i, 2));
totalExp += exp * pairs[i].pf.compactness;
}
return clamp01(totalExp / 5.0);
}
};
static double computeRegimeCompactness() {
double W = ((Bar % 2) == 0) ? 1.0 : 2.5;
return 1.0/(1.0 + W);
}
class CompactDominantStrategyV2 {
public:
PairAspectGraph pairG[N_ASSET_NAMES];
PairUniverseGraph metaG;
int lastUpdateBar;
double stateVector[N_ASSET_NAMES * 8 + 5];
CompactDominantStrategyV2() : lastUpdateBar(-999999) {}
void init(){
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init();
metaG.init();
}
void shutdown(){
metaG.shutdown();
for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown();
}
void updateFeaturesForAsset(int a) {
asset((char*)ASSET_NAMES[a]);
vars C = series(priceClose(0));
if(Bar < 20) return;
double c0 = C[0], c1 = C[1], c12 = C[12];
double ret1 = log(c0/c1);
double retN = log(c0/c12);
double ma3 = (C[0]+C[1]+C[2])/3.0;
double ma12 = 0; for(int i=0;i<12;i++) ma12 += C[i]; ma12 /= 12.0;
double slope = ma3 - ma12;
double m=0, s=0;
for(int i=0;i<20;i++){ double ri = log(C[i]/C[i+1]); m += ri; }
m /= 20.0;
for(int i=0;i<20;i++){ double ri = log(C[i]/C[i+1]); double d = ri - m; s += d*d; }
double vol = sqrt(s/20.0);
double mom = retN;
double rsiProxy = tanh(10.0*retN);
double m20=0, s20=0;
for(int i=0;i<20;i++) m20 += C[i];
m20 /= 20.0;
for(int i=0;i<20;i++){ double d=C[i]-m20; s20 += d*d; }
s20 = sqrt(s20/20.0) + 1e-12;
double bollPos = (c0 - m20)/s20;
double rangePress = tanh(0.5*bollPos);
double m20b=0, s20b=0;
for(int i=0;i<20;i++) m20b += (C[i]-C[i+1]);
m20b /= 20.0;
for(int i=0;i<20;i++){ double d=(C[i]-C[i+1])-m20b; s20b += d*d; }
double volOfVol = sqrt(s20b/20.0);
double flowProxy = fabs(ret1);
int up=0;
for(int i=0;i<20;i++) if(C[i] > C[i+1]) up++;
double pBull = (double)up/20.0;
double markovProxy = 2.0*(pBull - 0.5);
pairG[a].pushFeature(0, ret1);
pairG[a].pushFeature(1, retN);
pairG[a].pushFeature(2, slope);
pairG[a].pushFeature(3, vol);
pairG[a].pushFeature(4, mom);
pairG[a].pushFeature(5, rsiProxy);
pairG[a].pushFeature(6, rangePress);
pairG[a].pushFeature(7, volOfVol);
pairG[a].pushFeature(8, markovProxy);
pairG[a].pushFeaturePrev(0, ret1);
}
void buildStateVector() {
int idx = 0;
for(int a=0;a<N_ASSET_NAMES;a++) {
stateVector[idx++] = pairG[a].pf.compactness;
stateVector[idx++] = pairG[a].pf.meanEdge;
stateVector[idx++] = pairG[a].pf.entropy;
stateVector[idx++] = pairG[a].pf.volCentrality;
stateVector[idx++] = pairG[a].pf.regimeProb;
stateVector[idx++] = pairG[a].pf.pBullNext;
stateVector[idx++] = pairG[a].pf.entropyStat;
stateVector[idx++] = pairG[a].pf.bandwidth;
}
stateVector[idx++] = metaG.couplingPenalty;
stateVector[idx++] = metaG.clusterScore;
stateVector[idx++] = metaG.usdExposure;
stateVector[idx++] = computeRegimeCompactness();
stateVector[idx++] = Bar;
}
void onBar(){
for(int a=0;a<N_ASSET_NAMES;a++) updateFeaturesForAsset(a);
if(UPDATE_EVERY > 1 && (Bar % UPDATE_EVERY) != 0) return;
if(lastUpdateBar == Bar) return;
lastUpdateBar = Bar;
for(int a=0;a<N_ASSET_NAMES;a++) pairG[a].rebuildIfReady();
metaG.rebuild(pairG);
double Creg = computeRegimeCompactness();
double Pcouple = metaG.couplingPenalty;
double score[N_ASSET_NAMES];
int idx[N_ASSET_NAMES];
for(int a=0;a<N_ASSET_NAMES;a++){
double CA = pairG[a].pf.compactness;
double x = alpha*Creg + gamma*CA - beta*Pcouple;
score[a] = sigmoid(x);
idx[a] = a;
}
for(int i=0;i<TOP_K;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
if(score[idx[j]] > score[idx[i]]){
int t = idx[i]; idx[i] = idx[j]; idx[j] = t;
}
}
}
buildStateVector();
if((Bar % 50) == 0){
printf("\n[CompactDominant_v2] Bar=%d Creg=%.4f Pcouple=%.4f Cluster=%.4f USDExp=%.4f",
Bar, Creg, Pcouple, metaG.clusterScore, metaG.usdExposure);
for(int k=0;k<TOP_K;k++){
int a = idx[k];
printf("\n #%d %s CA=%.4f Ent=%.4f BW=%.4f Score=%.4f",
k+1, ASSET_NAMES[a], pairG[a].pf.compactness, pairG[a].pf.entropyStat, pairG[a].pf.bandwidth, score[a]);
}
}
}
};
static CompactDominantStrategyV2* S = 0;
DLLFUNC void run()
{
if(is(INITRUN)){
BarPeriod = 60;
LookBack = max(LookBack, FEAT_WINDOW + 50);
asset((char*)ASSET_NAMES[0]);
if(!S){ S = new CompactDominantStrategyV2(); S->init(); }
}
if(is(EXITRUN)){
if(S){ S->shutdown(); delete S; S = 0; }
return;
}
if(!S || Bar < LookBack) return;
S->onBar();
}
Last edited by TipmyPip; 02/23/26 19:02.
|
|
|
HermitNet FX
[Re: TipmyPip]
#489216
02/23/26 19:08
02/23/26 19:08
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
HermitNet FX (CrowdAverse_v2) is a multi-asset, graph-driven FX selection engine designed to identify opportunities where a currency pair behaves coherently on its own, but not as part of the broader crowd. Instead of relying on a single indicator or a fixed regime filter, it builds two layers of networks—one inside each pair (feature relationships), and one across the whole universe (pair-to-pair relationships)—and then scores each asset by balancing “internal clarity” against “market-wide coupling.” On every bar (60-minute bars), the strategy updates nine lightweight features per pair using only recent closes: short and multi-bar log returns, simple trend slope (fast vs. slow mean), realized volatility, a compressed momentum proxy, a standardized range pressure measure, a volatility-of-volatility estimate, a flow proxy (absolute return), and a Markov-style “up/down persistence” proxy. These feature streams are stored in rolling buffers so the system can measure how the features interact over time. For each pair, HermitNet FX constructs a feature graph where nodes are features and edges represent similarity based on rolling correlations (converted into distances). It then runs an all-pairs shortest-path pass and extracts structural metrics such as compactness (inverse of total path length), mean/variance of edge weights, and a simple centrality proxy. It also derives quick “state” estimates like short-horizon entropy and a crude next-bar bullishness frequency. In parallel, the strategy builds a universe graph across all pairs. The distance between two pairs blends (1) correlation similarity of their return features and (2) a currency-exposure distance that penalizes overlapping base/quote exposure—so pairs that are essentially the same bet get pulled closer together. A compact universe graph implies “crowded” conditions; this becomes a coupling penalty. Finally, each pair receives a score via a sigmoid of three forces: a regime compactness term, the pair’s own feature-graph compactness reward, and a strong penalty for global coupling. The engine ranks all 28 pairs and periodically prints the Top-K candidates and diagnostics (crowding, clustering, USD exposure). In short: it’s a selector and risk-awareness layer built to avoid herd trades and surface the most structurally self-contained setups. // TGr06B_CrowdAverse_v2.cpp - Zorro64 Strategy DLL (C++)
// Strategy B v2: Crowd-Averse with Enhanced Graph Architecture
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <string.h>
#define INF 1e30
#define EPS 1e-12
const char* ASSET_NAMES[] = {
"EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
"CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
"GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28
#define N_ASSET_NAMES 28
static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;
static const double alpha = 1.0;
static const double beta = 4.0;
static const double gamma = 1.5;
static const double LAMBDA_META = 0.7;
static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) { if(x > 30) return 1.0; if(x < -30) return 0.0; return 1.0/(1.0 + exp(-x)); }
static const char* CURRENCY_BASE[] = { "EUR","GBP","USD","CHF","JPY","AUD","CAD","NZD" };
static const int N_CURRENCIES = 8;
static int getCurrencyExposure(int pairIdx, int ccy) {
const char* pair = ASSET_NAMES[pairIdx];
char base[4] = {pair[0], pair[1], pair[2], 0};
char quote[4] = {pair[3], pair[4], pair[5], 0};
if(strcmp(base, CURRENCY_BASE[ccy]) == 0) return 1;
if(strcmp(quote, CURRENCY_BASE[ccy]) == 0) return -1;
return 0;
}
static double exposureDist(int i, int j) {
double dist = 0.0;
for(int c=0; c<N_CURRENCIES; c++) {
int ei = getCurrencyExposure(i, c);
int ej = getCurrencyExposure(j, c);
if(ei != 0 && ej != 0) dist += (ei == ej) ? 0.0 : 1.0;
else if(ei != 0 || ej != 0) dist += 0.5;
}
return dist / (double)N_CURRENCIES;
}
class RollingBuffer {
public:
int cap = 0, n = 0, head = 0;
double* x = 0;
RollingBuffer() {}
~RollingBuffer(){ shutdown(); }
void init(int L){ shutdown(); cap = L; x = (double*)malloc((size_t)cap*sizeof(double)); if(!x) quit("OOM"); n = 0; head = 0; }
void shutdown(){ if(x) free(x); x = 0; cap = 0; n = 0; head = 0; }
void push(double v){ if(!x || cap<=0) return; x[head] = v; head = (head + 1) % cap; if(n < cap) n++; }
double get(int i) const { int idx = head - 1 - i; while(idx < 0) idx += cap; return x[idx % cap]; }
};
static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L) {
if(a.n < L || b.n < L || L < 5) return 0.0;
double mx=0,my=0;
for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
mx /= (double)L; my /= (double)L;
double sxx=0, syy=0, sxy=0;
for(int i=0;i<L;i++){ double dx = a.get(i)-mx; double dy = b.get(i)-my; sxx+=dx*dx; syy+=dy*dy; sxy+=dx*dy; }
double den = sqrt(sxx*syy);
if(den <= EPS) return 0.0;
return sxy/den;
}
class WeightedGraph {
public:
int n = 0;
double* d = 0;
WeightedGraph() {}
~WeightedGraph(){ shutdown(); }
void init(int N){ shutdown(); n = N; d = (double*)malloc((size_t)n*n*sizeof(double)); if(!d) quit("OOM"); reset(); }
void shutdown(){ if(d) free(d); d = 0; n = 0; }
inline int pos(int r,int c) const { return r*n + c; }
void reset(){ for(int r=0;r<n;r++) for(int c=0;c<n;c++) d[pos(r,c)] = (r==c) ? 0.0 : INF; }
void allPairsShortest(){ for(int k=0;k<n;k++) for(int i=0;i<n;i++) for(int j=0;j<n;j++){ double cand = d[pos(i,k)] + d[pos(k,j)]; if(cand < d[pos(i,j)]) d[pos(i,j)] = cand; } }
double wienerUndirectedLike() const { double W=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ double dij = d[pos(i,j)], dji = d[pos(j,i)]; W += 0.5*(dij + dji); } return W; }
double meanEdgeWeight() const { double sum=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ sum += d[pos(i,j)]; cnt++; }} return cnt>0 ? sum/cnt : 0.0; }
double edgeVariance() const { double mean = meanEdgeWeight(); double var=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ double diff = d[pos(i,j)] - mean; var += diff*diff; cnt++; }} return cnt>0 ? var/cnt : 0.0; }
double centrality(int node) const { double sum=0; for(int j=0;j<n;j++) if(j!=node && d[pos(node,j)] < INF) sum += d[pos(node,j)]; return sum; }
};
struct PairFeatures {
double compactness, meanEdge, entropy, volCentrality;
double regimeProb, pBullNext, entropyStat, bandwidth;
};
class PairAspectGraph {
public:
WeightedGraph G;
RollingBuffer feat[FEAT_N];
RollingBuffer featPrev[FEAT_N];
PairFeatures pf;
PairAspectGraph() { pf.compactness=0; pf.meanEdge=0; pf.entropy=0; pf.volCentrality=0; pf.regimeProb=0; pf.pBullNext=0; pf.entropyStat=0; pf.bandwidth=0; }
void init(){ G.init(FEAT_N); for(int k=0;k<FEAT_N;k++){ feat[k].init(FEAT_WINDOW); featPrev[k].init(FEAT_WINDOW); } }
void shutdown(){ for(int k=0;k<FEAT_N;k++){ feat[k].shutdown(); featPrev[k].shutdown(); } G.shutdown(); }
static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }
void pushFeature(int k, double v){ feat[k].push(v); }
void pushFeaturePrev(int k, double v){ featPrev[k].push(v); }
double computeEntropy(int L) {
if(feat[0].n < L) return 0.0;
int bins = 10; int* hist = (int*)calloc(bins, sizeof(int));
for(int i=0;i<L && i<feat[0].n; i++) { int bin = (int)(feat[0].get(i) * bins); if(bin<0) bin=0; if(bin>=bins) bin=bins-1; hist[bin]++; }
double H = 0.0;
for(int b=0;b<bins;b++) if(hist[b]>0) { double p = (double)hist[b]/(double)L; H -= p * log(p + EPS); }
free(hist); return H;
}
void rebuildIfReady(){
if(feat[0].n < FEAT_WINDOW){ pf.compactness = 0; return; }
G.reset();
for(int i=0;i<FEAT_N;i++){
for(int j=i+1;j<FEAT_N;j++){
double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
double w = corrToDist(c);
if(i >= 7 && j >= 8) {
double cp = (featPrev[i].n > 10) ? corrOf(featPrev[i], feat[j], min(featPrev[i].n, FEAT_WINDOW)) : 0;
w = 0.5 * w + 0.5 * corrToDist(cp);
}
G.d[G.pos(i,j)] = w; G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
pf.compactness = 1.0/(1.0 + W);
pf.meanEdge = G.meanEdgeWeight();
pf.entropy = G.edgeVariance();
pf.volCentrality = G.centrality(3) / (FEAT_N - 1);
if(feat[8].n >= 20) {
int upNext = 0;
for(int i=0;i<19;i++) if(feat[0].get(i+1) > 0) upNext++;
pf.pBullNext = (double)upNext / 19.0;
pf.entropyStat = computeEntropy(20);
pf.regimeProb = (pf.entropyStat < 2.0) ? 1.0 : 0.0;
pf.bandwidth = feat[3].get(0) / (feat[3].get(10) + EPS);
}
}
};
class PairUniverseGraph {
public:
WeightedGraph G;
double couplingPenalty, clusterScore, usdExposure;
PairUniverseGraph() : couplingPenalty(0), clusterScore(0), usdExposure(0) {}
void init(){ G.init(N_ASSET_NAMES); }
void shutdown(){ G.shutdown(); }
static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }
void rebuild(PairAspectGraph* pairs){
G.reset();
for(int i=0;i<N_ASSET_NAMES;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
double w = LAMBDA_META * corrToDist(c) + (1.0 - LAMBDA_META) * exposureDist(i,j);
G.d[G.pos(i,j)] = w; G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
couplingPenalty = 1.0/(1.0 + W);
clusterScore = computeClusterScore();
usdExposure = computeUSDExposure(pairs);
}
double computeClusterScore() { int clusters=0; for(int i=0;i<N_ASSET_NAMES;i++){ bool has=false; for(int j=0;j<N_ASSET_NAMES;j++) if(i!=j && G.d[G.pos(i,j)]<0.3){has=true; break;} if(!has) clusters++; } return (double)clusters/N_ASSET_NAMES; }
double computeUSDExposure(PairAspectGraph* pairs){ double total=0; for(int i=0;i<N_ASSET_NAMES;i++) total += fabs((double)getCurrencyExposure(i,2)) * pairs[i].pf.compactness; return clamp01(total/5.0); }
};
static double computeRegimeCompactness(){ double W = ((Bar % 2)==0)?1.0:2.5; return 1.0/(1.0+W); }
class CrowdAverseStrategyV2 {
public:
PairAspectGraph pairG[N_ASSET_NAMES];
PairUniverseGraph metaG;
int lastUpdateBar;
CrowdAverseStrategyV2():lastUpdateBar(-999999){}
void init(){ for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init(); metaG.init(); }
void shutdown(){ metaG.shutdown(); for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown(); }
void updateFeaturesForAsset(int a){
asset((char*)ASSET_NAMES[a]);
vars C = series(priceClose(0));
if(Bar < 20) return;
double c0=C[0], c1=C[1], c12=C[12];
double ret1=log(c0/c1), retN=log(c0/c12);
double ma3=(C[0]+C[1]+C[2])/3.0, ma12=0; for(int i=0;i<12;i++) ma12+=C[i]; ma12/=12.0;
double slope=ma3-ma12;
double m=0,s=0; for(int i=0;i<20;i++){double ri=log(C[i]/C[i+1]); m+=ri;} m/=20.0;
for(int i=0;i<20;i++){double ri=log(C[i]/C[i+1]),d=ri-m; s+=d*d;} double vol=sqrt(s/20.0);
double rsiProxy=tanh(10.0*retN);
double m20=0,s20=0; for(int i=0;i<20;i++) m20+=C[i]; m20/=20.0;
for(int i=0;i<20;i++){double d=C[i]-m20; s20+=d*d;} s20=sqrt(s20/20.0)+1e-12;
double rangePress=tanh(0.5*(c0-m20)/s20);
double mv20=0,sv20=0; for(int i=0;i<20;i++) mv20+=(C[i]-C[i+1]); mv20/=20.0;
for(int i=0;i<20;i++){double d=(C[i]-C[i+1])-mv20; sv20+=d*d;} double volOfVol=sqrt(sv20/20.0);
double flowProxy=fabs(ret1);
int up=0; for(int i=0;i<20;i++) if(C[i]>C[i+1]) up++;
double markovProxy=2.0*((double)up/20.0-0.5);
pairG[a].pushFeature(0,ret1); pairG[a].pushFeature(1,retN); pairG[a].pushFeature(2,slope);
pairG[a].pushFeature(3,vol); pairG[a].pushFeature(4,retN); pairG[a].pushFeature(5,rsiProxy);
pairG[a].pushFeature(6,rangePress); pairG[a].pushFeature(7,volOfVol); pairG[a].pushFeature(8,markovProxy);
pairG[a].pushFeaturePrev(0,ret1);
}
void onBar(){
for(int a=0;a<N_ASSET_NAMES;a++) updateFeaturesForAsset(a);
if(UPDATE_EVERY>1 && (Bar%UPDATE_EVERY)!=0) return;
if(lastUpdateBar==Bar) return;
lastUpdateBar=Bar;
for(int a=0;a<N_ASSET_NAMES;a++) pairG[a].rebuildIfReady();
metaG.rebuild(pairG);
double Creg=computeRegimeCompactness();
double score[N_ASSET_NAMES]; int idx[N_ASSET_NAMES];
for(int a=0;a<N_ASSET_NAMES;a++){
double CA=pairG[a].pf.compactness;
double x=alpha*Creg + gamma*CA - beta*metaG.couplingPenalty;
score[a]=sigmoid(x); idx[a]=a;
}
for(int i=0;i<TOP_K;i++) for(int j=i+1;j<N_ASSET_NAMES;j++) if(score[idx[j]]>score[idx[i]]){int t=idx[i]; idx[i]=idx[j]; idx[j]=t;}
if((Bar%50)==0){
printf("\n[CrowdAverse_v2] Bar=%d Creg=%.4f Pcouple=%.4f Cluster=%.4f USDExp=%.4f",Bar,Creg,metaG.couplingPenalty,metaG.clusterScore,metaG.usdExposure);
for(int k=0;k<TOP_K;k++){int a=idx[k]; printf("\n #%d %s CA=%.4f Ent=%.4f BW=%.4f Score=%.4f",k+1,ASSET_NAMES[a],pairG[a].pf.compactness,pairG[a].pf.entropyStat,pairG[a].pf.bandwidth,score[a]);}
}
}
};
static CrowdAverseStrategyV2* S = 0;
DLLFUNC void run(){
if(is(INITRUN)){BarPeriod=60; LookBack=max(LookBack,FEAT_WINDOW+50); asset((char*)ASSET_NAMES[0]); if(!S){S=new CrowdAverseStrategyV2();S->init();}}
if(is(EXITRUN)){if(S){S->shutdown(); delete S; S=0;} return;}
if(!S || Bar<LookBack) return;
S->onBar();
}
|
|
|
AstraRegime Nexus
[Re: TipmyPip]
#489217
02/23/26 19:57
02/23/26 19:57
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
This strategy is a multi-asset regime switcher designed for Zorro64 as a C++ DLL, built around the idea that market behavior can be inferred from relationships—not just from single indicators. It monitors a universe of 28 major FX pairs and continuously scores them to identify which pairs are best aligned with the current market regime. At its core are two connected graph layers: Per-pair “Aspect Graph” (micro layer): For every currency pair, the strategy computes a compact set of nine rolling features over a 200-bar window. These include short and medium log returns, a slope proxy (short MA minus longer MA), realized volatility, a momentum/RSI-like compression, range pressure (distance from a 20-bar mean normalized by dispersion), volatility-of-volatility, and a simple Markov-style persistence proxy (how often the price moved up over the last 20 bars). Rather than treating these features independently, the strategy builds a weighted graph where each node is a feature and each edge weight is derived from feature correlations (converted into a “distance”). It then runs an all-pairs shortest-path calculation to extract structural properties of that feature-network: graph compactness, mean edge weight, edge variance (used like an entropy/dispersion measure), and a centrality measure tied to volatility. Additional micro-statistics (like a simple “next-bar bullishness rate” and a short-horizon entropy estimate) provide a lightweight regime flag and a bandwidth proxy. Universe “Meta Graph” (macro layer): Across all 28 pairs, the strategy builds a second graph using correlation of the primary return feature, blended with a currency-exposure distance (based on base/quote currency overlap). This creates a market coupling view: when pairs behave similarly and exposure clusters tighten, the graph becomes more compact and a coupling penalty rises. It also derives rough clustering and USD-exposure diagnostics to describe how concentrated the opportunity set is. Finally, each pair receives a selection score that combines: (a) a coarse global regime compactness proxy, (b) that pair’s own feature-network compactness, and (c) the universe coupling penalty. Scores are passed through a sigmoid to normalize them, and the strategy prints the Top-K (5) ranked pairs periodically. In short: it’s a graph-based market-state radar that surfaces which FX pairs look most “coherent” internally while avoiding overly crowded, tightly coupled market conditions. // TGr06C_RegimeSwitcher_v2.cpp - Zorro64 Strategy DLL (C++)
// Strategy C v2: Regime-Switching with Enhanced Graph Architecture
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <string.h>
#define INF 1e30
#define EPS 1e-12
const char* ASSET_NAMES[] = {
"EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
"CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
"GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28
#define N_ASSET_NAMES 28
static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;
static const double alpha = 4.0;
static const double beta = 1.5;
static const double gamma = 1.5;
static const double LAMBDA_META = 0.7;
static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) { if(x > 30) return 1.0; if(x < -30) return 0.0; return 1.0/(1.0 + exp(-x)); }
static const char* CURRENCY_BASE[] = { "EUR","GBP","USD","CHF","JPY","AUD","CAD","NZD" };
static const int N_CURRENCIES = 8;
static int getCurrencyExposure(int pairIdx, int ccy) {
const char* pair = ASSET_NAMES[pairIdx];
char base[4] = {pair[0], pair[1], pair[2], 0};
char quote[4] = {pair[3], pair[4], pair[5], 0};
if(strcmp(base, CURRENCY_BASE[ccy]) == 0) return 1;
if(strcmp(quote, CURRENCY_BASE[ccy]) == 0) return -1;
return 0;
}
static double exposureDist(int i, int j) {
double dist = 0.0;
for(int c=0; c<N_CURRENCIES; c++) {
int ei = getCurrencyExposure(i, c);
int ej = getCurrencyExposure(j, c);
if(ei != 0 && ej != 0) dist += (ei == ej) ? 0.0 : 1.0;
else if(ei != 0 || ej != 0) dist += 0.5;
}
return dist / (double)N_CURRENCIES;
}
class RollingBuffer {
public:
int cap = 0, n = 0, head = 0;
double* x = 0;
RollingBuffer() {}
~RollingBuffer(){ shutdown(); }
void init(int L){ shutdown(); cap = L; x = (double*)malloc((size_t)cap*sizeof(double)); if(!x) quit("OOM"); n = 0; head = 0; }
void shutdown(){ if(x) free(x); x = 0; cap = 0; n = 0; head = 0; }
void push(double v){ if(!x || cap<=0) return; x[head] = v; head = (head + 1) % cap; if(n < cap) n++; }
double get(int i) const { int idx = head - 1 - i; while(idx < 0) idx += cap; return x[idx % cap]; }
};
static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L) {
if(a.n < L || b.n < L || L < 5) return 0.0;
double mx=0,my=0;
for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
mx /= (double)L; my /= (double)L;
double sxx=0, syy=0, sxy=0;
for(int i=0;i<L;i++){ double dx = a.get(i)-mx; double dy = b.get(i)-my; sxx+=dx*dx; syy+=dy*dy; sxy+=dx*dy; }
double den = sqrt(sxx*syy);
if(den <= EPS) return 0.0;
return sxy/den;
}
class WeightedGraph {
public:
int n = 0;
double* d = 0;
WeightedGraph() {}
~WeightedGraph(){ shutdown(); }
void init(int N){ shutdown(); n = N; d = (double*)malloc((size_t)n*n*sizeof(double)); if(!d) quit("OOM"); reset(); }
void shutdown(){ if(d) free(d); d = 0; n = 0; }
inline int pos(int r,int c) const { return r*n + c; }
void reset(){ for(int r=0;r<n;r++) for(int c=0;c<n;c++) d[pos(r,c)] = (r==c) ? 0.0 : INF; }
void allPairsShortest(){ for(int k=0;k<n;k++) for(int i=0;i<n;i++) for(int j=0;j<n;j++){ double cand = d[pos(i,k)] + d[pos(k,j)]; if(cand < d[pos(i,j)]) d[pos(i,j)] = cand; } }
double wienerUndirectedLike() const { double W=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ double dij = d[pos(i,j)], dji = d[pos(j,i)]; W += 0.5*(dij + dji); } return W; }
double meanEdgeWeight() const { double sum=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ sum += d[pos(i,j)]; cnt++; }} return cnt>0 ? sum/cnt : 0.0; }
double edgeVariance() const { double mean = meanEdgeWeight(); double var=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ double diff = d[pos(i,j)] - mean; var += diff*diff; cnt++; }} return cnt>0 ? var/cnt : 0.0; }
double centrality(int node) const { double sum=0; for(int j=0;j<n;j++) if(j!=node && d[pos(node,j)] < INF) sum += d[pos(node,j)]; return sum; }
};
struct PairFeatures {
double compactness, meanEdge, entropy, volCentrality;
double regimeProb, pBullNext, entropyStat, bandwidth;
};
class PairAspectGraph {
public:
WeightedGraph G;
RollingBuffer feat[FEAT_N];
RollingBuffer featPrev[FEAT_N];
PairFeatures pf;
PairAspectGraph() { pf.compactness=0; pf.meanEdge=0; pf.entropy=0; pf.volCentrality=0; pf.regimeProb=0; pf.pBullNext=0; pf.entropyStat=0; pf.bandwidth=0; }
void init(){ G.init(FEAT_N); for(int k=0;k<FEAT_N;k++){ feat[k].init(FEAT_WINDOW); featPrev[k].init(FEAT_WINDOW); } }
void shutdown(){ for(int k=0;k<FEAT_N;k++){ feat[k].shutdown(); featPrev[k].shutdown(); } G.shutdown(); }
static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }
void pushFeature(int k, double v){ feat[k].push(v); }
void pushFeaturePrev(int k, double v){ featPrev[k].push(v); }
double computeEntropy(int L) {
if(feat[0].n < L) return 0.0;
int bins = 10; int* hist = (int*)calloc(bins, sizeof(int));
for(int i=0;i<L && i<feat[0].n; i++) { int bin = (int)(feat[0].get(i) * bins); if(bin<0) bin=0; if(bin>=bins) bin=bins-1; hist[bin]++; }
double H = 0.0;
for(int b=0;b<bins;b++) if(hist[b]>0) { double p = (double)hist[b]/(double)L; H -= p * log(p + EPS); }
free(hist); return H;
}
void rebuildIfReady(){
if(feat[0].n < FEAT_WINDOW){ pf.compactness = 0; return; }
G.reset();
for(int i=0;i<FEAT_N;i++){
for(int j=i+1;j<FEAT_N;j++){
double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
double w = corrToDist(c);
if(i >= 7 && j >= 8) {
double cp = (featPrev[i].n > 10) ? corrOf(featPrev[i], feat[j], min(featPrev[i].n, FEAT_WINDOW)) : 0;
w = 0.5 * w + 0.5 * corrToDist(cp);
}
G.d[G.pos(i,j)] = w; G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
pf.compactness = 1.0/(1.0 + W);
pf.meanEdge = G.meanEdgeWeight();
pf.entropy = G.edgeVariance();
pf.volCentrality = G.centrality(3) / (FEAT_N - 1);
if(feat[8].n >= 20) {
int upNext = 0;
for(int i=0;i<19;i++) if(feat[0].get(i+1) > 0) upNext++;
pf.pBullNext = (double)upNext / 19.0;
pf.entropyStat = computeEntropy(20);
pf.regimeProb = (pf.entropyStat < 2.0) ? 1.0 : 0.0;
pf.bandwidth = feat[3].get(0) / (feat[3].get(10) + EPS);
}
}
};
class PairUniverseGraph {
public:
WeightedGraph G;
double couplingPenalty, clusterScore, usdExposure;
PairUniverseGraph() : couplingPenalty(0), clusterScore(0), usdExposure(0) {}
void init(){ G.init(N_ASSET_NAMES); }
void shutdown(){ G.shutdown(); }
static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }
void rebuild(PairAspectGraph* pairs){
G.reset();
for(int i=0;i<N_ASSET_NAMES;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
double w = LAMBDA_META * corrToDist(c) + (1.0 - LAMBDA_META) * exposureDist(i,j);
G.d[G.pos(i,j)] = w; G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
couplingPenalty = 1.0/(1.0 + W);
clusterScore = computeClusterScore();
usdExposure = computeUSDExposure(pairs);
}
double computeClusterScore() { int clusters=0; for(int i=0;i<N_ASSET_NAMES;i++){ bool has=false; for(int j=0;j<N_ASSET_NAMES;j++) if(i!=j && G.d[G.pos(i,j)]<0.3){has=true; break;} if(!has) clusters++; } return (double)clusters/N_ASSET_NAMES; }
double computeUSDExposure(PairAspectGraph* pairs){ double total=0; for(int i=0;i<N_ASSET_NAMES;i++) total += fabs((double)getCurrencyExposure(i,2)) * pairs[i].pf.compactness; return clamp01(total/5.0); }
};
static double computeRegimeCompactness(){ double W = ((Bar % 2)==0)?1.0:2.5; return 1.0/(1.0+W); }
class RegimeSwitcherStrategyV2 {
public:
PairAspectGraph pairG[N_ASSET_NAMES];
PairUniverseGraph metaG;
int lastUpdateBar;
RegimeSwitcherStrategyV2():lastUpdateBar(-999999){}
void init(){ for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init(); metaG.init(); }
void shutdown(){ metaG.shutdown(); for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown(); }
void updateFeaturesForAsset(int a){
asset((char*)ASSET_NAMES[a]);
vars C = series(priceClose(0));
if(Bar < 20) return;
double c0=C[0], c1=C[1], c12=C[12];
double ret1=log(c0/c1), retN=log(c0/c12);
double ma3=(C[0]+C[1]+C[2])/3.0, ma12=0; for(int i=0;i<12;i++) ma12+=C[i]; ma12/=12.0;
double slope=ma3-ma12;
double m=0,s=0; for(int i=0;i<20;i++){double ri=log(C[i]/C[i+1]); m+=ri;} m/=20.0;
for(int i=0;i<20;i++){double ri=log(C[i]/C[i+1]),d=ri-m; s+=d*d;} double vol=sqrt(s/20.0);
double rsiProxy=tanh(10.0*retN);
double m20=0,s20=0; for(int i=0;i<20;i++) m20+=C[i]; m20/=20.0;
for(int i=0;i<20;i++){double d=C[i]-m20; s20+=d*d;} s20=sqrt(s20/20.0)+1e-12;
double rangePress=tanh(0.5*(c0-m20)/s20);
double mv20=0,sv20=0; for(int i=0;i<20;i++) mv20+=(C[i]-C[i+1]); mv20/=20.0;
for(int i=0;i<20;i++){double d=(C[i]-C[i+1])-mv20; sv20+=d*d;} double volOfVol=sqrt(sv20/20.0);
double flowProxy=fabs(ret1);
int up=0; for(int i=0;i<20;i++) if(C[i]>C[i+1]) up++;
double markovProxy=2.0*((double)up/20.0-0.5);
pairG[a].pushFeature(0,ret1); pairG[a].pushFeature(1,retN); pairG[a].pushFeature(2,slope);
pairG[a].pushFeature(3,vol); pairG[a].pushFeature(4,retN); pairG[a].pushFeature(5,rsiProxy);
pairG[a].pushFeature(6,rangePress); pairG[a].pushFeature(7,volOfVol); pairG[a].pushFeature(8,markovProxy);
pairG[a].pushFeaturePrev(0,ret1);
}
void onBar(){
for(int a=0;a<N_ASSET_NAMES;a++) updateFeaturesForAsset(a);
if(UPDATE_EVERY>1 && (Bar%UPDATE_EVERY)!=0) return;
if(lastUpdateBar==Bar) return;
lastUpdateBar=Bar;
for(int a=0;a<N_ASSET_NAMES;a++) pairG[a].rebuildIfReady();
metaG.rebuild(pairG);
double Creg=computeRegimeCompactness();
double score[N_ASSET_NAMES]; int idx[N_ASSET_NAMES];
for(int a=0;a<N_ASSET_NAMES;a++){
double CA=pairG[a].pf.compactness;
double x=alpha*Creg + gamma*CA - beta*metaG.couplingPenalty;
score[a]=sigmoid(x); idx[a]=a;
}
for(int i=0;i<TOP_K;i++) for(int j=i+1;j<N_ASSET_NAMES;j++) if(score[idx[j]]>score[idx[i]]){int t=idx[i]; idx[i]=idx[j]; idx[j]=t;}
if((Bar%50)==0){
printf("\n[RegimeSwitcher_v2] Bar=%d Creg=%.4f Pcouple=%.4f Cluster=%.4f USDExp=%.4f",Bar,Creg,metaG.couplingPenalty,metaG.clusterScore,metaG.usdExposure);
for(int k=0;k<TOP_K;k++){int a=idx[k]; printf("\n #%d %s CA=%.4f Reg=%.4f BW=%.4f Score=%.4f",k+1,ASSET_NAMES[a],pairG[a].pf.compactness,pairG[a].pf.regimeProb,pairG[a].pf.bandwidth,score[a]);}
}
}
};
static RegimeSwitcherStrategyV2* S = 0;
DLLFUNC void run(){
if(is(INITRUN)){BarPeriod=60; LookBack=max(LookBack,FEAT_WINDOW+50); asset((char*)ASSET_NAMES[0]); if(!S){S=new RegimeSwitcherStrategyV2();S->init();}}
if(is(EXITRUN)){if(S){S->shutdown(); delete S; S=0;} return;}
if(!S || Bar<LookBack) return;
S->onBar();
}
|
|
|
NexusVol Navigator
[Re: TipmyPip]
#489218
02/23/26 19:59
02/23/26 19:59
|
Joined: Sep 2017
Posts: 250
TipmyPip
OP
Member
|
OP
Member
Joined: Sep 2017
Posts: 250
|
NexusVol Navigator (a.k.a. VolAdjuster v2) is a multi-asset, volatility-aware ranking engine designed for Zorro64 as a C++ strategy DLL. Instead of producing direct trade entries, it continuously scores and selects the most “attractive” FX pairs from a predefined universe of 28 major and cross currency pairs. Its core idea is that market opportunities improve when (1) the current regime is orderly, (2) an individual pair’s internal feature relationships are coherent, and (3) the broader universe is not tightly coupled in a way that amplifies systemic risk. The strategy builds two layers of graph structure. At the pair level, each asset maintains a rolling window of nine engineered features (short- and medium-horizon log returns, moving-average slope, realized volatility, oscillator-style proxies, range pressure, volatility-of-volatility, and a simple Markov-style “up probability” proxy). These features feed an “aspect graph” where nodes are features and edge weights are correlation-derived distances. By running an all-pairs shortest-path calculation, the graph becomes a compact representation of how tightly the features move together. The result is a compactness score (higher when the feature network is more cohesive), plus extra diagnostics such as mean edge weight, variance of edges (used as an entropy-like stability measure), volatility centrality, a regime probability proxy, and a simple bandwidth ratio (current volatility vs. a lagged reference). At the universe level, a second graph links pairs to each other using a blend of (a) return correlation distances and (b) a currency-exposure distance that measures how similar two pairs are in terms of shared base/quote currency risk. This produces a “coupling penalty” (higher when everything moves together) and a rough clustering score. Every few bars, the strategy combines: a lightweight regime compactness signal, each pair’s graph compactness, the universe coupling penalty, and a volatility dampener. The combined value is passed through a sigmoid to create a 0–1 score, then the top 5 pairs are ranked and periodically printed for monitoring. The practical intent is portfolio allocation and risk throttling: favor pairs with structured, consistent behavior while reducing exposure when the market becomes overly correlated or volatile. // TGr06D_VolAdjuster_v2.cpp - Zorro64 Strategy DLL (C++)
// Strategy D v2: VolAdjuster with Enhanced Graph Architecture
#include <zorro.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <string.h>
#define INF 1e30
#define EPS 1e-12
const char* ASSET_NAMES[] = {
"EURUSD","GBPUSD","USDCHF","USDJPY","AUDUSD","AUDCAD","AUDCHF","AUDJPY","AUDNZD",
"CADJPY","CADCHF","EURAUD","EURCAD","EURCHF","EURGBP","EURJPY","EURNZD","GBPAUD",
"GBPCAD","GBPCHF","GBPJPY","GBPNZD","NZDCAD","NZDCHF","NZDJPY","NZDUSD","USDCAD"
};
#define N_ASSET_NAMES 28
#define N_ASSET_NAMES 28
static const int FEAT_N = 9;
static const int FEAT_WINDOW = 200;
static const int META_WINDOW = 200;
static const int UPDATE_EVERY = 5;
static const int TOP_K = 5;
static const double alpha = 2.0;
static const double beta = 1.5;
static const double gamma = 2.0;
static const double VOL_SCALE = 0.5;
static const double LAMBDA_META = 0.7;
static double clamp01(double x) { if(x<0) return 0; if(x>1) return 1; return x; }
static double sigmoid(double x) { if(x > 30) return 1.0; if(x < -30) return 0.0; return 1.0/(1.0 + exp(-x)); }
static const char* CURRENCY_BASE[] = { "EUR","GBP","USD","CHF","JPY","AUD","CAD","NZD" };
static const int N_CURRENCIES = 8;
static int getCurrencyExposure(int pairIdx, int ccy) {
const char* pair = ASSET_NAMES[pairIdx];
char base[4] = {pair[0], pair[1], pair[2], 0};
char quote[4] = {pair[3], pair[4], pair[5], 0};
if(strcmp(base, CURRENCY_BASE[ccy]) == 0) return 1;
if(strcmp(quote, CURRENCY_BASE[ccy]) == 0) return -1;
return 0;
}
static double exposureDist(int i, int j) {
double dist = 0.0;
for(int c=0; c<N_CURRENCIES; c++) {
int ei = getCurrencyExposure(i, c);
int ej = getCurrencyExposure(j, c);
if(ei != 0 && ej != 0) dist += (ei == ej) ? 0.0 : 1.0;
else if(ei != 0 || ej != 0) dist += 0.5;
}
return dist / (double)N_CURRENCIES;
}
class RollingBuffer {
public:
int cap = 0, n = 0, head = 0;
double* x = 0;
RollingBuffer() {}
~RollingBuffer(){ shutdown(); }
void init(int L){ shutdown(); cap = L; x = (double*)malloc((size_t)cap*sizeof(double)); if(!x) quit("OOM"); n = 0; head = 0; }
void shutdown(){ if(x) free(x); x = 0; cap = 0; n = 0; head = 0; }
void push(double v){ if(!x || cap<=0) return; x[head] = v; head = (head + 1) % cap; if(n < cap) n++; }
double get(int i) const { int idx = head - 1 - i; while(idx < 0) idx += cap; return x[idx % cap]; }
};
static double corrOf(const RollingBuffer& a, const RollingBuffer& b, int L) {
if(a.n < L || b.n < L || L < 5) return 0.0;
double mx=0,my=0;
for(int i=0;i<L;i++){ mx += a.get(i); my += b.get(i); }
mx /= (double)L; my /= (double)L;
double sxx=0, syy=0, sxy=0;
for(int i=0;i<L;i++){ double dx = a.get(i)-mx; double dy = b.get(i)-my; sxx+=dx*dx; syy+=dy*dy; sxy+=dx*dy; }
double den = sqrt(sxx*syy);
if(den <= EPS) return 0.0;
return sxy/den;
}
class WeightedGraph {
public:
int n = 0;
double* d = 0;
WeightedGraph() {}
~WeightedGraph(){ shutdown(); }
void init(int N){ shutdown(); n = N; d = (double*)malloc((size_t)n*n*sizeof(double)); if(!d) quit("OOM"); reset(); }
void shutdown(){ if(d) free(d); d = 0; n = 0; }
inline int pos(int r,int c) const { return r*n + c; }
void reset(){ for(int r=0;r<n;r++) for(int c=0;c<n;c++) d[pos(r,c)] = (r==c) ? 0.0 : INF; }
void allPairsShortest(){ for(int k=0;k<n;k++) for(int i=0;i<n;i++) for(int j=0;j<n;j++){ double cand = d[pos(i,k)] + d[pos(k,j)]; if(cand < d[pos(i,j)]) d[pos(i,j)] = cand; } }
double wienerUndirectedLike() const { double W=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ double dij = d[pos(i,j)], dji = d[pos(j,i)]; W += 0.5*(dij + dji); } return W; }
double meanEdgeWeight() const { double sum=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ sum += d[pos(i,j)]; cnt++; }} return cnt>0 ? sum/cnt : 0.0; }
double edgeVariance() const { double mean = meanEdgeWeight(); double var=0; int cnt=0; for(int i=0;i<n;i++) for(int j=i+1;j<n;j++){ if(d[pos(i,j)] < INF){ double diff = d[pos(i,j)] - mean; var += diff*diff; cnt++; }} return cnt>0 ? var/cnt : 0.0; }
double centrality(int node) const { double sum=0; for(int j=0;j<n;j++) if(j!=node && d[pos(node,j)] < INF) sum += d[pos(node,j)]; return sum; }
};
struct PairFeatures {
double compactness, meanEdge, entropy, volCentrality;
double regimeProb, pBullNext, entropyStat, bandwidth;
double volatility;
};
class PairAspectGraph {
public:
WeightedGraph G;
RollingBuffer feat[FEAT_N];
RollingBuffer featPrev[FEAT_N];
PairFeatures pf;
PairAspectGraph() { pf.compactness=0; pf.meanEdge=0; pf.entropy=0; pf.volCentrality=0; pf.regimeProb=0; pf.pBullNext=0; pf.entropyStat=0; pf.bandwidth=0; pf.volatility=0; }
void init(){ G.init(FEAT_N); for(int k=0;k<FEAT_N;k++){ feat[k].init(FEAT_WINDOW); featPrev[k].init(FEAT_WINDOW); } }
void shutdown(){ for(int k=0;k<FEAT_N;k++){ feat[k].shutdown(); featPrev[k].shutdown(); } G.shutdown(); }
static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }
void pushFeature(int k, double v){ feat[k].push(v); }
void pushFeaturePrev(int k, double v){ featPrev[k].push(v); }
double computeEntropy(int L) {
if(feat[0].n < L) return 0.0;
int bins = 10; int* hist = (int*)calloc(bins, sizeof(int));
for(int i=0;i<L && i<feat[0].n; i++) { int bin = (int)(feat[0].get(i) * bins); if(bin<0) bin=0; if(bin>=bins) bin=bins-1; hist[bin]++; }
double H = 0.0;
for(int b=0;b<bins;b++) if(hist[b]>0) { double p = (double)hist[b]/(double)L; H -= p * log(p + EPS); }
free(hist); return H;
}
void rebuildIfReady(){
if(feat[0].n < FEAT_WINDOW){ pf.compactness = 0; return; }
G.reset();
for(int i=0;i<FEAT_N;i++){
for(int j=i+1;j<FEAT_N;j++){
double c = corrOf(feat[i], feat[j], FEAT_WINDOW);
double w = corrToDist(c);
if(i >= 7 && j >= 8) {
double cp = (featPrev[i].n > 10) ? corrOf(featPrev[i], feat[j], min(featPrev[i].n, FEAT_WINDOW)) : 0;
w = 0.5 * w + 0.5 * corrToDist(cp);
}
G.d[G.pos(i,j)] = w; G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
pf.compactness = 1.0/(1.0 + W);
pf.meanEdge = G.meanEdgeWeight();
pf.entropy = G.edgeVariance();
pf.volCentrality = G.centrality(3) / (FEAT_N - 1);
pf.volatility = feat[3].get(0);
if(feat[8].n >= 20) {
int upNext = 0;
for(int i=0;i<19;i++) if(feat[0].get(i+1) > 0) upNext++;
pf.pBullNext = (double)upNext / 19.0;
pf.entropyStat = computeEntropy(20);
pf.regimeProb = (pf.entropyStat < 2.0) ? 1.0 : 0.0;
pf.bandwidth = feat[3].get(0) / (feat[3].get(10) + EPS);
}
}
};
class PairUniverseGraph {
public:
WeightedGraph G;
double couplingPenalty, clusterScore, usdExposure;
PairUniverseGraph() : couplingPenalty(0), clusterScore(0), usdExposure(0) {}
void init(){ G.init(N_ASSET_NAMES); }
void shutdown(){ G.shutdown(); }
static double corrToDist(double corr){ double a = fabs(corr); if(a > 1.0) a = 1.0; return 1.0 - a; }
void rebuild(PairAspectGraph* pairs){
G.reset();
for(int i=0;i<N_ASSET_NAMES;i++){
for(int j=i+1;j<N_ASSET_NAMES;j++){
double c = corrOf(pairs[i].feat[0], pairs[j].feat[0], META_WINDOW);
double w = LAMBDA_META * corrToDist(c) + (1.0 - LAMBDA_META) * exposureDist(i,j);
G.d[G.pos(i,j)] = w; G.d[G.pos(j,i)] = w;
}
}
G.allPairsShortest();
double W = G.wienerUndirectedLike();
couplingPenalty = 1.0/(1.0 + W);
clusterScore = computeClusterScore();
usdExposure = computeUSDExposure(pairs);
}
double computeClusterScore() { int clusters=0; for(int i=0;i<N_ASSET_NAMES;i++){ bool has=false; for(int j=0;j<N_ASSET_NAMES;j++) if(i!=j && G.d[G.pos(i,j)]<0.3){has=true; break;} if(!has) clusters++; } return (double)clusters/N_ASSET_NAMES; }
double computeUSDExposure(PairAspectGraph* pairs){ double total=0; for(int i=0;i<N_ASSET_NAMES;i++) total += fabs((double)getCurrencyExposure(i,2)) * pairs[i].pf.compactness; return clamp01(total/5.0); }
};
static double computeRegimeCompactness(){ double W = ((Bar % 2)==0)?1.0:2.5; return 1.0/(1.0+W); }
class VolAdjusterStrategyV2 {
public:
PairAspectGraph pairG[N_ASSET_NAMES];
PairUniverseGraph metaG;
int lastUpdateBar;
VolAdjusterStrategyV2():lastUpdateBar(-999999){}
void init(){ for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].init(); metaG.init(); }
void shutdown(){ metaG.shutdown(); for(int i=0;i<N_ASSET_NAMES;i++) pairG[i].shutdown(); }
void updateFeaturesForAsset(int a){
asset((char*)ASSET_NAMES[a]);
vars C = series(priceClose(0));
if(Bar < 20) return;
double c0=C[0], c1=C[1], c12=C[12];
double ret1=log(c0/c1), retN=log(c0/c12);
double ma3=(C[0]+C[1]+C[2])/3.0, ma12=0; for(int i=0;i<12;i++) ma12+=C[i]; ma12/=12.0;
double slope=ma3-ma12;
double m=0,s=0; for(int i=0;i<20;i++){double ri=log(C[i]/C[i+1]); m+=ri;} m/=20.0;
for(int i=0;i<20;i++){double ri=log(C[i]/C[i+1]),d=ri-m; s+=d*d;} double vol=sqrt(s/20.0);
double rsiProxy=tanh(10.0*retN);
double m20=0,s20=0; for(int i=0;i<20;i++) m20+=C[i]; m20/=20.0;
for(int i=0;i<20;i++){double d=C[i]-m20; s20+=d*d;} s20=sqrt(s20/20.0)+1e-12;
double rangePress=tanh(0.5*(c0-m20)/s20);
double mv20=0,sv20=0; for(int i=0;i<20;i++) mv20+=(C[i]-C[i+1]); mv20/=20.0;
for(int i=0;i<20;i++){double d=(C[i]-C[i+1])-mv20; sv20+=d*d;} double volOfVol=sqrt(sv20/20.0);
double flowProxy=fabs(ret1);
int up=0; for(int i=0;i<20;i++) if(C[i]>C[i+1]) up++;
double markovProxy=2.0*((double)up/20.0-0.5);
pairG[a].pushFeature(0,ret1); pairG[a].pushFeature(1,retN); pairG[a].pushFeature(2,slope);
pairG[a].pushFeature(3,vol); pairG[a].pushFeature(4,retN); pairG[a].pushFeature(5,rsiProxy);
pairG[a].pushFeature(6,rangePress); pairG[a].pushFeature(7,volOfVol); pairG[a].pushFeature(8,markovProxy);
pairG[a].pushFeaturePrev(0,ret1);
}
void onBar(){
for(int a=0;a<N_ASSET_NAMES;a++) updateFeaturesForAsset(a);
if(UPDATE_EVERY>1 && (Bar%UPDATE_EVERY)!=0) return;
if(lastUpdateBar==Bar) return;
lastUpdateBar=Bar;
for(int a=0;a<N_ASSET_NAMES;a++) pairG[a].rebuildIfReady();
metaG.rebuild(pairG);
double Creg=computeRegimeCompactness();
double score[N_ASSET_NAMES]; int idx[N_ASSET_NAMES];
for(int a=0;a<N_ASSET_NAMES;a++){
double CA=pairG[a].pf.compactness;
double vol=pairG[a].pf.volatility;
double volFactor=clamp01(1.0 - vol * VOL_SCALE);
double x=(alpha*Creg + gamma*CA - beta*metaG.couplingPenalty) * volFactor;
score[a]=sigmoid(x); idx[a]=a;
}
for(int i=0;i<TOP_K;i++) for(int j=i+1;j<N_ASSET_NAMES;j++) if(score[idx[j]]>score[idx[i]]){int t=idx[i]; idx[i]=idx[j]; idx[j]=t;}
if((Bar%50)==0){
printf("\n[VolAdjuster_v2] Bar=%d Creg=%.4f Pcouple=%.4f Cluster=%.4f",Bar,Creg,metaG.couplingPenalty,metaG.clusterScore);
for(int k=0;k<TOP_K;k++){int a=idx[k]; printf("\n #%d %s CA=%.4f Vol=%.4f BW=%.4f Score=%.4f",k+1,ASSET_NAMES[a],pairG[a].pf.compactness,pairG[a].pf.volatility,pairG[a].pf.bandwidth,score[a]);}
}
}
};
static VolAdjusterStrategyV2* S = 0;
DLLFUNC void run(){
if(is(INITRUN)){BarPeriod=60; LookBack=max(LookBack,FEAT_WINDOW+50); asset((char*)ASSET_NAMES[0]); if(!S){S=new VolAdjusterStrategyV2();S->init();}}
if(is(EXITRUN)){if(S){S->shutdown(); delete S; S=0;} return;}
if(!S || Bar<LookBack) return;
S->onBar();
}
|
|
|
|