Full Pipeline: Stock Price Forecasting with Tensorflow.js and LSTM - CoPilot Generated
๐งช Project Workflow
This project walks through:
- Fetching stock data via Alpha Vantage
- Computing Simple Moving Average (SMA) to smooth the data
- Training an LSTM model using Tensorflow.js
- Predicting future prices and comparing them to actual values
๐ฆ Dataset Source: Alpha Vantage API
You can fetch historical stock prices using the Alpha Vantage API, which provides:
- Daily or weekly adjusted closing prices
- Open, high, low, close, volume
- Up to 20 years of historical data
๐ ️ How to Use the Alpha Vantage API
- Sign up at Alpha Vantage
- Get your free API key
- Use this endpoint to fetch daily prices:
https://www.alphavantage.co/query?function=TIME_SERIES_DAILY_ADJUSTED&symbol=MSFT&apikey=YOUR_API_KEY
๐ง LSTM Model in Tensorflow.js
Here’s a simplified version of the model setup:
const model = tf.sequential();
model.add(tf.layers.lstm({
units: 64,
inputShape: [sequenceLength, 1],
returnSequences: false
}));
model.add(tf.layers.dense({ units: 1 }));
model.compile({ optimizer: 'adam', loss: 'meanSquaredError' });
step-by-step guide to set up a complete Node.js-based pipeline—from data ingestion to model training, prediction, and visualization.
- Prerequisites
Make sure you have Node.js(v14+) and NPM or Yarn installed. Then initialize and install the required packages.
mkdir tfjs-lstm-pipeline cd tfjs-lstm-pipeline npm init ‑y npm install @tensorflow/tfjs-node node-fetch csv-writer
Package Purpose @tensorflow/tfjs-node Run Tensorflow.js models in Node node-fetch Make HTTP requests (Alpha Vantage API) csv-writer Export results for plotting/analysis - Fetch Historical Stock Data
Create fetch_data.js to pull daily adjusted close prices from Alpha Vantage and save as JSON:
// fetch_data.js const fetch = require('node-fetch'); const fs = require('fs'); const API_KEY = 'YOUR_ALPHA_VANTAGE_KEY'; const SYMBOL = 'MSFT'; async function fetchStockData() { const url = `https://www.alphavantage.co/query?` + `function=TIME_SERIES_DAILY_ADJUSTED&symbol=${SYMBOL}` + `&outputsize=full&apikey=${API_KEY}`; const res = await fetch(url); const json = await res.json(); const series = json['Time Series (Daily)']; const dates = Object.keys(series).sort(); const data = dates.map(date => ({ date, close: parseFloat(series[date]['5. adjusted close']) })); fs.writeFileSync('data/stock.json', JSON.stringify(data, null, 2)); console.log(`Fetched ${data.length} days for ${SYMBOL}`); } fetchStockData(); Run: mkdir data node fetch_data.js
- Preprocess the Data
Create preprocess.js to:
- Load JSON
- Normalize via MinMax scaling
- Build input sequences and labels
// preprocess.js const fs = require('fs'); function minMaxScale(array) { const min = Math.min(...array); const max = Math.max(...array); return array.map(x => (x - min) / (max - min)); } function createSequences(values, seqLen) { const X = [], Y = []; for (let i = 0; i < values.length - seqLen; i++) { X.push(values.slice(i, i + seqLen)); Y.push(values[i + seqLen]); } return { X, Y }; } const raw = JSON.parse(fs.readFileSync('data/stock.json')); const closes = raw.map(item => item.close); const scaled = minMaxScale(closes); const sequenceLength = 30; const { X, Y } = createSequences(scaled, sequenceLength); // Split 80/20 train-test const split = Math.floor(X.length * 0.8); const trainX = X.slice(0, split), trainY = Y.slice(0, split); const testX = X.slice(split), testY = Y.slice(split); // Save preprocessed arrays fs.writeFileSync('data/train.json', JSON.stringify({ trainX, trainY })); fs.writeFileSync('data/test.json', JSON.stringify({ testX, testY })); console.log('Preprocessing complete.'); Run: node preprocess.js
- Build, Train, and Save the LSTM Model
Create train_model.js:
// train_model.js const tf = require('@tensorflow/tfjs-node'); const fs = require('fs'); async function run() { const { trainX, trainY } = JSON.parse(fs.readFileSync('data/train.json')); // Convert to tensors: [samples, seqLen, 1] const xs = tf.tensor3d(trainX, [trainX.length, trainX[0].length, 1]); const ys = tf.tensor2d(trainY, [trainY.length, 1]); const model = tf.sequential(); model.add(tf.layers.lstm({ units: 64, inputShape: [trainX[0].length, 1], returnSequences: false })); model.add(tf.layers.dense({ units: 1 })); model.compile({ optimizer: 'adam', loss: 'meanSquaredError' }); await model.fit(xs, ys, { epochs: 50, batchSize: 32, validationSplit: 0.2, callbacks: tf.callbacks.earlyStopping({ monitor: 'val_loss', patience: 5 }) }); await model.save('file://model'); console.log('Model training complete and saved to /model'); } run(); Run: node train_model.js
- Predict and Export Results
Create predict.js to load the model, make predictions on test data, invert the scale, and save for visualization:
// predict.js const tf = require('@tensorflow/tfjs-node'); const fs = require('fs'); const createCsvWriter = require('csv-writer').createObjectCsvWriter; async function run() { const model = await tf.loadLayersModel('file://model/model.json'); const { testX, testY } = JSON.parse(fs.readFileSync('data/test.json')); const raw = JSON.parse(fs.readFileSync('data/stock.json')); const xs = tf.tensor3d(testX, [testX.length, testX[0].length, 1]); const preds = model.predict(xs).arraySync().flat(); // Inverse MinMax scaling const closes = raw.map(d => d.close); const min = Math.min(...closes), max = Math.max(...closes); const inv = arr => arr.map(x => x * (max - min) + min); const actual = inv(testY); const predicted = inv(preds); const csvWriter = createCsvWriter({ path: 'data/predictions.csv', header: [ {id: 'idx', title: 'Index'}, {id: 'actual', title: 'Actual Close'}, {id: 'predicted', title: 'Predicted Close'} ] }); const records = actual.map((a, i) => ({ idx: i, actual: a, predicted: predicted[i] })); await csvWriter.writeRecords(records); console.log('Predictions saved to data/predictions.csv'); } run(); Run: node predict.js
- Visualize in the Browser
Create index.html in your project root:
<!DOCTYPE html> <html> <head> <meta charset="UTF-8" /> <script src="https://cdn.jsdelivr.net/npm/chart.js"></script> <title>Stock Price Forecast</title> </head> <body> <canvas id="chart" width="800" height="400"></canvas> <script> fetch('data/predictions.csv') .then(res => res.text()) .then(text => { const rows = text.split('\n').slice(1).filter(r => r); const actual = [], predicted = []; rows.forEach(r => { const [_, a, p] = r.split(','); actual.push(+a); predicted.push(+p); }); const ctx = document.getElementById('chart').getContext('2d'); new Chart(ctx, { type: 'line', data: { labels: actual.map((_, i) => i), datasets: [ { label: 'Actual', data: actual, borderColor: 'blue' }, { label: 'Predicted',data: predicted,borderColor: 'red' } ] } }); }); </script> </body> </html>
Serve the folder (e.g. with npx serve .) and open index.html to see your forecast.
Comments
Post a Comment