The Puzzle of Dynamic Currency Connections
Background:
Imagine you’re in charge of monitoring 28 students (representing currency pairs) in a math class. Each student has a different skill level and focus area (representing volatility). Sometimes they perform well, and sometimes they struggle—but there’s a pattern to it. When one student gets stuck on a tough topic, their struggle often spreads to nearby students (like a ripple effect of high volatility). Similarly, when one student is confident, it can boost the confidence of others nearby.
Your job is to figure out these patterns and decide:
Who needs help and who’s ready to move on?
When to focus on certain students to improve the overall class performance.
But here’s the catch:
The students form groups (like clusters), where some groups are more prone to challenges than others.
Each student’s performance is influenced by their past performance (like habits) and their interactions with others in the group.
The Challenge: Track the Patterns:
You have to observe how often each student struggles or excels over time. Are there periods where the same students keep struggling (clustering)? Are there students who quickly bounce back from challenges?
Form Study Groups:
Pair students based on how much they affect each other’s performance. If Student A’s struggles often lead to Student B struggling too, they should be in the same group. Use this to create a "map" of how the class interacts.
Summarize Key Challenges:
Once you’ve mapped the class, find the topics or patterns that explain the majority of struggles. These are your "main challenges" that need solving.
Predict the Next Struggles:
Based on their history and their group’s behavior, predict which students will need help next. This is your chance to act early and make the class stronger!
Decide the Focus:
Each day, decide who you’ll focus on: who needs a boost, who can help others, and who’s ready to move on. Your decisions will affect the overall class performance.
Bonus Twist:
As you monitor the class, the students’ behavior changes. New friendships form, others break apart, and some students get unexpectedly better or worse. Can you adapt your plan to these changes and keep the whole class improving?
Your Task:
Identify the students’ struggles, find the hidden patterns in their interactions, and create a plan that helps the whole class succeed over time. The better your plan, the stronger the class becomes.
Can you rise to the challenge and lead the class to success?
#define PAIRS 28
#define COMPONENTS 3 // Number of PCA components
#define GNN_LAYERS 2 // Number of GNN layers
#define ACTIONS 3 // Buy, Sell, Hold
// Define currency pairs
string CurrencyPairs[PAIRS] = {
"EURUSD", "GBPUSD", "USDJPY", "GBPJPY", "USDCAD", "EURAUD", "EURJPY",
"AUDCAD", "AUDJPY", "AUDNZD", "AUDUSD", "CADJPY", "EURCAD", "EURCHF",
"EURGBP", "EURNZD", "GBPCAD", "GBPCHF", "NZDCAD", "NZDJPY", "NZDUSD",
"USDCHF", "CHFJPY", "AUDCHF", "GBPNZD", "NZDCHF", "CADCHF", "GBPAUD"
};
// Variables for PCA, GNN, and signals
vars volatilities[PAIRS]; // Current volatilities
vars volClustering[PAIRS]; // Volatility clustering scores
vars kernelMatrix[PAIRS][PAIRS]; // Kernel matrix for PCA
vars pcaReducedFeatures[PAIRS][COMPONENTS]; // PCA-reduced features
vars timeSeriesFeatures[PAIRS]; // Time-series features (e.g., autocorrelation)
vars timeDependentFeatures[PAIRS]; // Time-dependent features (e.g., volatility lag)
vars adjacencyMatrices[PAIRS][PAIRS]; // GNN adjacency matrices
vars gnnWeights[GNN_LAYERS][COMPONENTS][COMPONENTS]; // GNN weights
vars gnnOutputs[PAIRS][ACTIONS]; // GNN probabilities (Buy/Sell/Hold)
vars signals[PAIRS]; // Final trading signals
vars eigenvalues[COMPONENTS]; // Eigenvalues from PCA
// Softmax function
var softmax(vars logits[], int index, int size) {
var sum = 0;
for (int i = 0; i < size; i++) {
sum += exp(logits[i]); // Exponentiate each value
}
return exp(logits[index]) / (sum + 1e-8); // Normalize by the sum, avoiding division by zero
}
// Step 1: Calculate Volatility and Clustering Scores
function calculateVolatilityAndClustering() {
for (int i = 0; i < PAIRS; i++) {
asset(CurrencyPairs[i]);
vars logReturns = series(log(priceClose(0) / priceClose(1))); // Log returns
volatilities[i] = sqrt(SMA(pow(logReturns, 2), 20)); // 20-bar rolling std dev
// Calculate clustering using past volatilities
vars pastVolatilities = series(volatilities[i]); // Volatility series
volClustering[i] = SMA(pastVolatilities, 10) / stddev(pastVolatilities, 10); // Example metric
}
}
// Step 2: Extract Time-Series Features
function extractTimeSeriesFeatures() {
for (int i = 0; i < PAIRS; i++) {
asset(CurrencyPairs[i]);
vars logReturns = series(log(priceClose(0) / priceClose(1))); // Log returns
// Autocorrelation as a feature
timeSeriesFeatures[i] = autocorrelation(logReturns, 5); // 5-lag autocorrelation
// Time-dependent feature: Volatility lag
timeDependentFeatures[i] = volatilities[i - 1]; // Previous volatility
}
}
// Step 3: Perform Enhanced PCA
function performEnhancedPCA() {
// Construct Kernel Matrix
for (int i = 0; i < PAIRS; i++) {
for (int j = 0; j < PAIRS; j++) {
double distance = pow(volatilities[i] - volatilities[j], 2) +
pow(volClustering[i] - volClustering[j], 2) +
pow(timeSeriesFeatures[i] - timeSeriesFeatures[j], 2);
kernelMatrix[i][j] = exp(-distance / (2 * 0.1 * 0.1)); // Gaussian kernel
}
}
// Perform eigen decomposition
vars eigenvectors;
eigenDecomposition(kernelMatrix, eigenvalues, eigenvectors);
// Reduce dimensions using top COMPONENTS
for (int i = 0; i < PAIRS; i++) {
for (int j = 0; j < COMPONENTS; j++) {
pcaReducedFeatures[i][j] = dotProduct(kernelMatrix[i], eigenvectors[j]);
}
}
}
// Step 4: Initialize GNN Weights
function initializeGNNWeights() {
for (int l = 0; l < GNN_LAYERS; l++) {
for (int i = 0; i < COMPONENTS; i++) {
for (int j = 0; j < COMPONENTS; j++) {
gnnWeights[l][i][j] = random() * 0.1; // Small random initialization
}
}
}
}
// Step 5: GNN Propagation
function propagateGNN() {
vars tempFeatures[PAIRS][COMPONENTS];
for (int l = 0; l < GNN_LAYERS; l++) {
for (int i = 0; i < PAIRS; i++) {
for (int k = 0; k < COMPONENTS; k++) {
tempFeatures[i][k] = 0;
for (int j = 0; j < PAIRS; j++) {
for (int m = 0; m < COMPONENTS; m++) {
tempFeatures[i][k] += adjacencyMatrices[i][j] * pcaReducedFeatures[j][m] * gnnWeights[l][m][k];
}
}
tempFeatures[i][k] = max(0, tempFeatures[i][k]); // ReLU activation
}
}
}
// Final probabilities for Buy, Sell, Hold using softmax
for (int i = 0; i < PAIRS; i++) {
vars logits[ACTIONS]; // Placeholder for raw GNN output logits
for (int k = 0; k < ACTIONS; k++) {
logits[k] = tempFeatures[i][k]; // Assign the logits (raw outputs)
}
// Apply softmax to calculate probabilities
for (int k = 0; k < ACTIONS; k++) {
gnnOutputs[i][k] = softmax(logits, k, ACTIONS);
}
}
}
// Step 6: Generate Trading Signals
function generateSignals() {
for (int i = 0; i < PAIRS; i++) {
var gnnBuyProb = gnnOutputs[i][0];
var gnnSellProb = gnnOutputs[i][1];
signals[i] = gnnBuyProb - gnnSellProb; // Base signal
// Threshold decision
if (signals[i] > 0.5) signals[i] = 1; // Strong Buy
else if (signals[i] < -0.5) signals[i] = -1; // Strong Sell
else signals[i] = 0; // Hold
}
}
// Step 7: Execute Trades
function executeTrades() {
for (int i = 0; i < PAIRS; i++) {
if (signals[i] == 1) enterLong(CurrencyPairs[i]); // Strong Buy
else if (signals[i] == -1) enterShort(CurrencyPairs[i]); // Strong Sell
}
}
// Main Strategy Function
function run() {
set(PLOTNOW);
// Step 1: Calculate volatility and clustering
calculateVolatilityAndClustering();
// Step 2: Extract time-series features
extractTimeSeriesFeatures();
// Step 3: Perform enhanced PCA
performEnhancedPCA();
// Step 4: Initialize GNN weights (once)
if (is(INITRUN)) initializeGNNWeights();
// Step 5: Propagate GNN
propagateGNN();
// Step 6: Generate trading signals
generateSignals();
// Step 7: Execute trades
executeTrades();
}