// basic server that updates the stock prices weekly, but has an endpoint for manual updates
const fs = require('fs');
const crypto = require('crypto');
const http = require('http');
// Import necessary for fetch....
const _ = require('node:path');
// read from .env file without package
var env = {};
const readDotEnv = () => {
if (!fs.existsSync('.env')) {
console.error('.env file does not exist');
process.exit(1);
}
const envConfig = fs.readFileSync('.env', 'utf8');
envConfig.split('\n').forEach(line => {
const [key, value] = line.split('=');
env[key] = value;
});
}
readDotEnv();
const screenAndUpload = async () => {
try {
await runScreener();
await runUpload();
} catch (error) {
console.error('Error occurred while running screener and upload:', error);
return Error('Failed to run screener and upload');
}
}
const main = () => {
// ensure an auth token is set
if (!env.STOCK_SCREENER_AUTH_TOKEN) {
console.error('STOCK_SCREENER_AUTH_TOKEN is not set');
process.exit(1);
}
// no https is necessary, will be running off a reverse proxy
const server = http.createServer((req, res) => {
// print all keys and values of req
if (req.url == '/favicon.ico') {
res.writeHead(204);
res.end();
return;
}
if (req.url == `/api/update-stock-screener?token=${env.STOCK_SCREENER_AUTH_TOKEN}`) {
screenAndUpload().then(() => {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Success! Here is the url of the sheet -> https://docs.google.com/spreadsheets/d/1csTrnR5LwSBft1wtczDdxk0aPnHIdIOHX3MTP-IkpPk');
console.log('Stock screener sucessfully updated on ' + new Date().toISOString() + ' from ' + (req.headers['x-forwarded-for'] || req.socket.remoteAddress));
}).catch((error) => {
console.error('Error occurred while updating stock screener:', error);
res.writeHead(500, { 'Content-Type': 'text/plain' });
res.end('Internal Server Error');
});
} else {
// throw a 403
res.writeHead(403, { 'Content-Type': 'text/plain' });
res.end('Forbidden');
console.log('Unauthorized access attempt from ' + (req.headers['x-forwarded-for'] || req.socket.remoteAddress));
}
});
// Start "sleeping" procs
const PORT = env.PORT || 5000;
server.listen(PORT, () => {
console.log(`Server is running on port ${PORT}`);
});
// have a set interval that updates the stock prices weekly
setInterval(() => {
console.log('Running screen and upload from interval!')
screenAndUpload();
}, 1000 * 60 * 60 * 24 * 7);
}
main();
/*
----------------------------------------------------------------------------------------------------------------------------
SCREENER.js CODE
----------------------------------------------------------------------------------------------------------------------------
*/
// this gets the HTML page of the SP500 list from slickcharts.com
const getSPHTML = async () => {
const response = await fetch('https://www.slickcharts.com/sp500');
if (!response.ok) {
throw new Error('Network response was not ok');
}
const text = await response.text();
return text;
}
// this parses the HTML of the SP500 list to tickers
const parseHTMLToTickers = (html) => {
// get the tickers by slicing on all `/symbol/` occurrences
// (with the special format before it to only get one occurance)
const tickers = [];
html.split('nowrap;">')[1].split('')[0];
// get weight of item using the %
let weight = item.search(/
([\d.]+)%<\/td>/);
if (weight === -1) {
console.warn(`Ticker ${ticker} does not have a valid weight, skipping.`);
return;
}
weight = parseFloat(item.slice(weight + 4, item.indexOf(' | ', weight)));
if (ticker && name && weight) {
tickers.push({ name, symbol: ticker, weight });
}
});
// update the tickers file with the new tickers
if (fs.existsSync('sp500_tickers.json')) {
fs.unlinkSync('sp500_tickers.json');
}
fs.writeFileSync('sp500_tickers.json', JSON.stringify(tickers, null, 2));
// console.log(`Saved ${tickers.length} tickers to sp500_tickers.json`);
return tickers;
}
const getTickersFromFile = () => {
if (!fs.existsSync('sp500_tickers.json')) {
console.error('sp500_tickers.json file does not exist. Please run the script to fetch tickers first.');
return [];
}
const data = fs.readFileSync('sp500_tickers.json', 'utf8');
return JSON.parse(data);
}
// get the JSON history data from the symbol
const getSymbolHistory = async (symbol) => {
// clean the symbol (reaplce . with dash)
symbol = symbol.replace(/\./g, '-');
const parameters = 'interval=1d&includePrePost=true&events=div%7Csplit%7Cearn&lang=en-US®ion=US&range=6mo';
const response = await fetch(`https://query1.finance.yahoo.com/v8/finance/chart/${symbol}?${parameters}`);
if (!response.ok) {
console.error(`Network response was not ok for symbol: ${symbol}. Status: ${response.status}`);
return {};
}
const data = await response.json();
return data;
}
const getSectorMap = () => {
// pull from the sector map tsv file
if (!fs.existsSync('sector-map.tsv')) {
console.error('sector-map.tsv file does not exist. Please run the script to fetch sectors first.');
return 'Unknown';
}
const sectorMap = fs.readFileSync('sector-map.tsv', 'utf8');
const lines = sectorMap.split('\n');
const sectorMapObj = {};
lines.forEach((line, index) => {
if (index === 0) return; // skip the header line
// split the line by comma and get the name, ticker, sector, and subSector
const [name, ticker, sector, subSector] = line.split('\t');
sectorMapObj[ticker.trim()] = [sector.trim(), subSector.trim(), name.trim()];
});
return sectorMapObj;
}
const getHistoriesForEachTicker = async (tickers) => {
// use Promise.all to fetch all histories concurrently
const histories = await Promise.all(tickers.map(ticker => getSymbolHistory(ticker.symbol)));
// zip the histories with the tickers
const zippedHistories = histories.map((history, index) => ({
...tickers[index],
history
}));
return zippedHistories;
}
const formatDataFromHistories = (histories) => {
// format the data from the histories to a more readable format
const csv_headers = ['Ticker', 'Name', '% Weight', 'Sector', 'SubSector', 'RSI (14)', 'MACD (Histogram Value)', '1W', '1M', '3M', '6M'];
const csv_final = [csv_headers];
// get the sector map
const sectorMap = getSectorMap();
histories.forEach(history => {
// Tickern, name weight, sector from html pull
const { symbol, webName, weight } = history;
const sector = sectorMap[symbol] ? sectorMap[symbol][0] : 'Unknown';
const subSector = sectorMap[symbol] ? sectorMap[symbol][1] : 'Unknown';
const name = sectorMap[symbol] ? sectorMap[symbol][2] : webName || 'Unknown';
// Get RSI, MACD from helper
const timestamps = history.history.chart.result[0].timestamp;
const prices = history.history.chart.result[0].indicators.quote[0].close;
const rsi = calculateRSI(prices);
const macd = calculateMACD(prices);
// print first 5 timestamps and prices for debugging
// console.log('First 5 timestamps:', timestamps.slice(0, 5).map(ts => new Date(ts * 1000).toLocaleDateString()));
// console.log('First 5 prices:', prices.slice(0, 5));
const currentPrice = prices[prices.length - 1];
// Directly calculate the percentage changes for 1W, 1M, 3M, and 6M\
const oneWeekAgoPrice = prices[prices.length - 6]; // 5 days of trading
const oneWeekChange = ((currentPrice - oneWeekAgoPrice) / oneWeekAgoPrice) * 100;
const oneMonthAgoPrice = prices[prices.length - 21]; // 20 days of trading (4 weeks)
const oneMonthChange = ((currentPrice - oneMonthAgoPrice) / oneMonthAgoPrice) * 100;
const threeMonthsAgoPrice = prices[parseInt(prices.length / 2) - 1]; // 3 months is half the length of the prices array
const threeMonthChange = ((currentPrice - threeMonthsAgoPrice) / threeMonthsAgoPrice) * 100;
const sixMonthsAgoPrice = prices[0]; // last 6 months is the first price in the array
const sixMonthChange = ((currentPrice - sixMonthsAgoPrice) / sixMonthsAgoPrice) * 100;
const mappedValues = {
Ticker: symbol,
Name: name,
'% Weight': weight,
'Sector': sector,
'Subsector': subSector,
'RSI (14)': rsi.toFixed(3),
'MACD (Histogram Value)': macd.toFixed(3),
'1W': oneWeekChange.toFixed(3) + '%',
'1M': oneMonthChange.toFixed(3) + '%',
'3M': threeMonthChange.toFixed(3) + '%',
'6M': sixMonthChange.toFixed(3) + '%'
};
// pushed the mapped values to the formatted data
csv_final.push(Object.values(mappedValues));
});
// write the formatted data to a CSV file
const csvContent = csv_final.map(e => e.join('\t')).join('\n');
if (fs.existsSync('sp500_formatted_data.tsv')) {
fs.unlinkSync('sp500_formatted_data.tsv');
}
fs.writeFileSync('sp500_formatted_data.tsv', csvContent);
// console.log('Formatted data saved to sp500_formatted_data.tsv');
return csv_final;
};
// testGetHistories();
const calculateMACD = (prices, shortPeriod = 12, longPeriod = 26, signalPeriod = 9) => {
// Helper function to calculate the Exponential Moving Average (EMA)
const exponentialMovingAverage = (data, period) => {
const k = 2 / (period + 1);
let ema = [data[0]]; // Start with the first price as the initial EMA
for (let i = 1; i < data.length; i++) {
const currentEma = (data[i] * k) + (ema[i - 1] * (1 - k));
ema.push(currentEma);
}
return ema;
}
// Calculate the short and long periods
const ema12 = exponentialMovingAverage(prices, shortPeriod);
const ema26 = exponentialMovingAverage(prices, longPeriod);
// Calcualte the MACD line
const macdLine = ema12.map((value, index) => value - ema26[index])
// Calculate the signal line
const signalLine = exponentialMovingAverage(macdLine, signalPeriod);
// Calculate the MACD histogram
const macdHistogram = macdLine.map((value, index) => value - signalLine[index]);
// Return the last value of the MACD histogram
return macdHistogram[macdHistogram.length - 1];
}
const calculateRSI = (prices, period = 14) => {
// calculate the first RSI within our period
let gains = [];
let losses = [];
for (let i = 0; i < period; i++) {
const change = prices[i + 1] - prices[i];
if (change > 0) {
gains.push(change);
losses.push(0);
} else {
losses.push(Math.abs(change));
gains.push(0);
}
}
const averageGain = gains.reduce((a, b) => a + b, 0) / period;
const averageLoss = losses.reduce((a, b) => a + b, 0) / period;
if (averageLoss === 0) {
console.log('No losses in the period, RSI is 100');
return 100; // RSI is 100 if there are no losses
}
const firstRSI = 100 - (100 / (1 + (averageGain / averageLoss)));
if (isNaN(firstRSI)) {
console.error('Calculated RSI is NaN, returning 0');
return 0; // Return 0 if RSI calculation fails
}
const RSIs = [firstRSI];
// `Initial RSI for the first ${period} data points: ${firstRSI}`);
// Calculate the RSI for the rest of the data points
let previousAverageGain = averageGain;
let previousAverageLoss = averageLoss;
for (let i = period; i < prices.length - 1; i++) {
const change = prices[i + 1] - prices[i];
let gain = 0;
let loss = 0;
if (change > 0) {
gain = change;
} else {
loss = Math.abs(change);
}
// Calculate the new average gain and loss
previousAverageGain = (previousAverageGain * (period - 1) + gain) / period;
previousAverageLoss = (previousAverageLoss * (period - 1) + loss) / period;
if (previousAverageLoss === 0) {
console.log('No losses in the period, RSI is 100');
return 100; // RSI is 100 if there are no losses
}
// add this RSI to the list
const rsi = 100 - (100 / (1 + (previousAverageGain / previousAverageLoss)));
RSIs.push(rsi);
}
// Return the last calculated RSI
return RSIs[RSIs.length - 1];
}
const runScreener = async () => {
try {
// gt the test histories from the file
// const histories = fs.readFileSync('sp500_histories.json', 'utf8');
// const parsedHistories = JSON.parse(histories);
// console.log(`Loaded ${parsedHistories.length} histories from sp500_histories.json`);
// get tickers from file
const tickers = getTickersFromFile();
if (tickers.length === 0) {
console.error('No tickers found. Please ensure sp500_tickers.json exists and is populated.');
return;
}
// console.log(`Found ${tickers.length} tickers in sp500_tickers.json`);
// get histories for each symbol
const parsedHistories = await getHistoriesForEachTicker(tickers);
// console.log(`Fetched histories for ${parsedHistories.length} symbols.`);
// format the data from the histories
const formattedData = formatDataFromHistories(parsedHistories);
// console.log('Formatted data:', formattedData.slice(0, 5)); // Print first 5 entries for brevity
} catch (error) {
console.error('Error in main function:', error);
}
}
/*
----------------------------------------------------------------------------------------------------------------------------
UPLOAD.js CODE
----------------------------------------------------------------------------------------------------------------------------
*/
// load in google credentials json file
const credentialsPath = 'google_credentials.json';
const { private_key_id, client_email, token_uri, private_key }
= JSON.parse(fs.readFileSync(credentialsPath, 'utf8'));
const spreadsheetId = '1csTrnR5LwSBft1wtczDdxk0aPnHIdIOHX3MTP-IkpPk';
const templateSheetId = 2006563664; // the ID of the template sheet in the spreadsheet
// url base encoder helper
const urlBase64Encode = (str) => {
return Buffer.from(str)
.toString('base64')
.replace(/\+/g, '-')
.replace(/\//g, '_')
.replace(/=+$/, '');
}
// auth using JWT
// (https://developers.google.com/identity/protocols/oauth2/service-account#httprest)
const createJWT = () => {
const jwtHeader = {
alg: 'RS256',
typ: 'JWT',
kid: private_key_id
};
const jetClaimSet = {
iss: client_email,
scope: 'https://www.googleapis.com/auth/spreadsheets',
aud: token_uri,
exp: Math.floor(Date.now() / 1000) + 3600, // 1 hour expiration
iat: Math.floor(Date.now() / 1000)
};
const signedHeader = urlBase64Encode(JSON.stringify(jwtHeader));
const signedClaimSet = urlBase64Encode(JSON.stringify(jetClaimSet));
const signature = urlBase64Encode(
crypto.createSign('RSA-SHA256')
.update(`${signedHeader}.${signedClaimSet}`)
.sign(private_key)
);
return `${signedHeader}.${signedClaimSet}.${signature}`;
}
const postAuthRequest = async () => {
// get the JWT
const jwt = createJWT();
// prepare the encoded body
let body = {
'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
'assertion': jwt
}
body = new URLSearchParams(body).toString();
// make the POST request to the token URI
const options = {
method: 'POST',
headers: {
'Host': 'oauth2.googleapis.com',
'Content-Type': 'application/x-www-form-urlencoded'
},
body,
};
const response = await fetch(token_uri, options);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}, message: ${await response.text()}`);
}
// return the access token
const data = await response.json();
return data.access_token;
}
const randomSheetId = () => {
// use date without dashes
const date = new Date().toISOString().split('T')[0].replace(/-/g, '');
return parseInt(date);
}
const appendCellsRequest = (sheetId, row) => {
const rowsArray = row.map(cell => {
return {
values: cell.map(value => {
return {
userEnteredValue: {
stringValue: value
}
};
})
};
});
return {
sheetId,
rows: rowsArray,
fields: '*',
};
}
const duplicateSheetRequest = (sheetId) => {
const date = new Date().toISOString().split('T')[0];
return {
sourceSheetId: templateSheetId,
insertSheetIndex: 0,
newSheetId: sheetId,
newSheetName: date,
}
}
const batchUpdateSpreadsheet = (sheetId, rows) => {
return {
"requests": [
{
"duplicateSheet": duplicateSheetRequest(sheetId)
},
{
"appendCells": appendCellsRequest(sheetId, rows)
}
],
};
}
const postBatchUpdateRequest = async (batchUpdateRequest) => {
const accessToken = await postAuthRequest();
const url = `https://sheets.googleapis.com/v4/spreadsheets/${spreadsheetId}:batchUpdate`;
const options = {
method: 'POST',
headers: {
'Authorization': `Bearer ${accessToken}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
requests: batchUpdateRequest.requests
})
};
const response = await fetch(url, options);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}, message: ${await response.text()}`);
}
const data = await response.json();
return data;
}
const getRowsFromTSV = (tsvPath) => {
const tsvData = fs.readFileSync(tsvPath, 'utf8');
const rows = tsvData.split('\n').map(line => line.split('\t').map(cell => cell.trim()));
// remove the first row (header)
rows.shift();
return rows;
}
const runUpload = async () => {
// create a random sheet id
const sheetId = randomSheetId();
// get the rows from the tsv file
const tsvPath = 'sp500_formatted_data.tsv';
const rows = getRowsFromTSV(tsvPath);
// create the batch update request
const batchUpdateRequest = batchUpdateSpreadsheet(sheetId, rows);
// write the request to a file
const outputPath = 'batch_update_request.json';
fs.writeFileSync(outputPath, JSON.stringify(batchUpdateRequest, null, 2));
// console.log(`Batch update request written to ${outputPath}`);
// post the batch update request
try {
const response = await postBatchUpdateRequest(batchUpdateRequest);
// console.log('Batch update response:', response);
} catch (error) {
console.error('Error:', error);
}
}