aboutsummaryrefslogtreecommitdiff
path: root/server.js
blob: 88a7c425695823f197fb8043aa377af5209b2458 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
// basic server that updates the stock prices weekly, but has an endpoint for manual updates
const fs = require('fs');
const crypto = require('crypto');
const http = require('http');
// Import necessary for fetch....
const _ = require('node:path');

// read from .env file without package
var env = {};
const readDotEnv = () => {
    if (!fs.existsSync('.env')) {
        console.error('.env file does not exist');
        process.exit(1);
    }
    const envConfig = fs.readFileSync('.env', 'utf8');
    envConfig.split('\n').forEach(line => {
        const [key, value] = line.split('=');
        env[key] = value;
    });
}
readDotEnv();

const screenAndUpload = async () => {
    try {
        await runScreener();
        await runUpload();
    } catch (error) {
        console.error('Error occurred while running screener and upload:', error);
        return Error('Failed to run screener and upload');
    }
}

const main = () => {
    // ensure an auth token is set
    if (!env.STOCK_SCREENER_AUTH_TOKEN) {
        console.error('STOCK_SCREENER_AUTH_TOKEN is not set');
        process.exit(1);
    }

    // no https is necessary, will be running off a reverse proxy
    const server = http.createServer((req, res) => {
        // print all keys and values of req
        if (req.url == '/favicon.ico') {
            res.writeHead(204);
            res.end();
            return;
        }
        if (req.url == `/api/update-stock-screener?token=${env.STOCK_SCREENER_AUTH_TOKEN}`) {
            screenAndUpload().then(() => {
                res.writeHead(200, { 'Content-Type': 'text/plain' });
                res.end('Success! Here is the url of the sheet -> https://docs.google.com/spreadsheets/d/1csTrnR5LwSBft1wtczDdxk0aPnHIdIOHX3MTP-IkpPk');
                console.log('Stock screener sucessfully updated on ' + new Date().toISOString() + ' from ' + (req.headers['x-forwarded-for'] || req.socket.remoteAddress));
            }).catch((error) => {
                console.error('Error occurred while updating stock screener:', error);
                res.writeHead(500, { 'Content-Type': 'text/plain' });
                res.end('Internal Server Error');
            });
        } else {
            // throw a 403
            res.writeHead(403, { 'Content-Type': 'text/plain' });
            res.end('Forbidden');
            console.log('Unauthorized access attempt from ' + (req.headers['x-forwarded-for'] || req.socket.remoteAddress));
        }
    });

    // Start "sleeping" procs
    const PORT = env.PORT || 5000;
    server.listen(PORT, () => {
        console.log(`Server is running on port ${PORT}`);
    });

    // have a set interval that updates the stock prices weekly
    setInterval(() => {
        console.log('Running screen and upload from interval!')
        screenAndUpload();
    }, 1000 * 60 * 60 * 24 * 7);
}
main();


/*
----------------------------------------------------------------------------------------------------------------------------

SCREENER.js CODE

----------------------------------------------------------------------------------------------------------------------------
*/

// this gets the HTML page of the SP500 list from slickcharts.com
const getSPHTML = async () => {
  const response = await fetch('https://www.slickcharts.com/sp500');
  if (!response.ok) {
    throw new Error('Network response was not ok');
  }
    const text = await response.text();
    return text;
}

// this parses the HTML of the SP500 list to tickers
const parseHTMLToTickers = (html) => {
    // get the tickers by slicing on all `/symbol/` occurrences 
    // (with the special format before it to only get one occurance)
    const tickers = [];
    html.split('nowrap;"><a href="/symbol/').slice(1).forEach(item => {
        // get the ticker from the item (before the next ")"
        const ticker = item.split('"')[0];
        const name = item.split('">')[1].split('</a>')[0];

        // get weight of item using the %
        let weight = item.search(/<td>([\d.]+)%<\/td>/);
        if (weight === -1) {
            console.warn(`Ticker ${ticker} does not have a valid weight, skipping.`);
            return;
        }
        weight = parseFloat(item.slice(weight + 4, item.indexOf('</td>', weight)));

        if (ticker && name && weight) {
            tickers.push({ name, symbol: ticker, weight });
        }
    });

    // update the tickers file with the new tickers
    if (fs.existsSync('sp500_tickers.json')) {
        fs.unlinkSync('sp500_tickers.json');
    }
    fs.writeFileSync('sp500_tickers.json', JSON.stringify(tickers, null, 2));
    // console.log(`Saved ${tickers.length} tickers to sp500_tickers.json`);
    return tickers;
}

const getTickersFromFile = () => {
    if (!fs.existsSync('sp500_tickers.json')) {
        console.error('sp500_tickers.json file does not exist. Please run the script to fetch tickers first.');
        return [];
    }
    const data = fs.readFileSync('sp500_tickers.json', 'utf8');
    return JSON.parse(data);
}

// get the JSON history data from the symbol
const getSymbolHistory = async (symbol) => {
    // clean the symbol (reaplce . with dash)
    symbol = symbol.replace(/\./g, '-');
    const parameters = 'interval=1d&includePrePost=true&events=div%7Csplit%7Cearn&lang=en-US&region=US&range=6mo';
    const response = await fetch(`https://query1.finance.yahoo.com/v8/finance/chart/${symbol}?${parameters}`);
    if (!response.ok) {
        console.error(`Network response was not ok for symbol: ${symbol}. Status: ${response.status}`);
        return {};
    }
    const data = await response.json();

    return data;
}

const getSectorMap = () => {
    // pull from the sector map tsv file
    if (!fs.existsSync('sector-map.tsv')) {
        console.error('sector-map.tsv file does not exist. Please run the script to fetch sectors first.');
        return 'Unknown';
    }
    const sectorMap = fs.readFileSync('sector-map.tsv', 'utf8');
    const lines = sectorMap.split('\n');
    const sectorMapObj = {};
    lines.forEach((line, index) => {
        if (index === 0) return; // skip the header line
        // split the line by comma and get the name, ticker, sector, and subSector
        const [name, ticker, sector, subSector] = line.split('\t');
        sectorMapObj[ticker.trim()] = [sector.trim(), subSector.trim(), name.trim()];
    });
    return sectorMapObj;
}

const getHistoriesForEachTicker = async (tickers) => {
    // use Promise.all to fetch all histories concurrently
    const histories = await Promise.all(tickers.map(ticker => getSymbolHistory(ticker.symbol)));

    // zip the histories with the tickers
    const zippedHistories = histories.map((history, index) => ({
        ...tickers[index],
        history
    }));
    return zippedHistories;
}

const formatDataFromHistories = (histories) => {
    // format the data from the histories to a more readable format
    const csv_headers = ['Ticker', 'Name', '% Weight', 'Sector', 'SubSector', 'RSI (14)', 'MACD (Histogram Value)', '1W', '1M', '3M', '6M'];
    const csv_final = [csv_headers];

    // get the sector map
    const sectorMap = getSectorMap();
    
    histories.forEach(history => {

        // Tickern, name weight, sector from html pull
        const { symbol, webName, weight } = history;
        const sector = sectorMap[symbol] ? sectorMap[symbol][0] : 'Unknown';
        const subSector = sectorMap[symbol] ? sectorMap[symbol][1] : 'Unknown';
        const name = sectorMap[symbol] ? sectorMap[symbol][2] : webName || 'Unknown';

        // Get RSI, MACD from helper
        const timestamps = history.history.chart.result[0].timestamp;
        const prices = history.history.chart.result[0].indicators.quote[0].close;
        const rsi = calculateRSI(prices);
        const macd = calculateMACD(prices);

        // print first 5 timestamps and prices for debugging
        // console.log('First 5 timestamps:', timestamps.slice(0, 5).map(ts => new Date(ts * 1000).toLocaleDateString()));
        // console.log('First 5 prices:', prices.slice(0, 5));
        
        const currentPrice = prices[prices.length - 1];
        // Directly calculate the percentage changes for 1W, 1M, 3M, and 6M\
        const oneWeekAgoPrice = prices[prices.length - 6]; // 5 days of trading
        const oneWeekChange = ((currentPrice - oneWeekAgoPrice) / oneWeekAgoPrice) * 100;

        const oneMonthAgoPrice = prices[prices.length - 21]; // 20 days of trading (4 weeks)
        const oneMonthChange = ((currentPrice - oneMonthAgoPrice) / oneMonthAgoPrice) * 100;

        const threeMonthsAgoPrice = prices[parseInt(prices.length / 2) - 1]; // 3 months is half the length of the prices array
        const threeMonthChange = ((currentPrice - threeMonthsAgoPrice) / threeMonthsAgoPrice) * 100;

        const sixMonthsAgoPrice = prices[0]; // last 6 months is the first price in the array
        const sixMonthChange = ((currentPrice - sixMonthsAgoPrice) / sixMonthsAgoPrice) * 100;
        
        const mappedValues = {
            Ticker: symbol,
            Name: name,
            '% Weight': weight,
            'Sector': sector,
            'Subsector': subSector,
            'RSI (14)': rsi.toFixed(3),
            'MACD (Histogram Value)': macd.toFixed(3),
            '1W': oneWeekChange.toFixed(3) + '%',
            '1M': oneMonthChange.toFixed(3) + '%',
            '3M': threeMonthChange.toFixed(3) + '%',
            '6M': sixMonthChange.toFixed(3) + '%'
        };

        // pushed the mapped values to the formatted data
        csv_final.push(Object.values(mappedValues));
    });

    // write the formatted data to a CSV file
    const csvContent = csv_final.map(e => e.join('\t')).join('\n');
    if (fs.existsSync('sp500_formatted_data.tsv')) {
        fs.unlinkSync('sp500_formatted_data.tsv');
    }
    fs.writeFileSync('sp500_formatted_data.tsv', csvContent);
    // console.log('Formatted data saved to sp500_formatted_data.tsv');
    return csv_final;
};
// testGetHistories();

const calculateMACD = (prices, shortPeriod = 12, longPeriod = 26, signalPeriod = 9) => {
    // Helper function to calculate the Exponential Moving Average (EMA)
    const exponentialMovingAverage = (data, period) => {
        const k = 2 / (period + 1);
        let ema = [data[0]]; // Start with the first price as the initial EMA

        for (let i = 1; i < data.length; i++) {
            const currentEma = (data[i] * k) + (ema[i - 1] * (1 - k));
            ema.push(currentEma);
        }
        return ema;
    }

    // Calculate the short and long periods
    const ema12 = exponentialMovingAverage(prices, shortPeriod);
    const ema26 = exponentialMovingAverage(prices, longPeriod);

    // Calcualte the MACD line
    const macdLine = ema12.map((value, index) => value - ema26[index])

    // Calculate the signal line
    const signalLine = exponentialMovingAverage(macdLine, signalPeriod);

    // Calculate the MACD histogram
    const macdHistogram = macdLine.map((value, index) => value - signalLine[index]);

    // Return the last value of the MACD histogram
    return macdHistogram[macdHistogram.length - 1];
}


const calculateRSI = (prices, period = 14) => {
    // calculate the first RSI within our period
    let gains = [];
    let losses = [];

    for (let i = 0; i < period; i++) {
        const change = prices[i + 1] - prices[i];
        if (change > 0) {
            gains.push(change);
            losses.push(0);
        } else {
            losses.push(Math.abs(change));
            gains.push(0);
        }
    }

    const averageGain = gains.reduce((a, b) => a + b, 0) / period;
    const averageLoss = losses.reduce((a, b) => a + b, 0) / period;
    if (averageLoss === 0) {
        console.log('No losses in the period, RSI is 100');
        return 100; // RSI is 100 if there are no losses
    }
    const firstRSI = 100 - (100 / (1 + (averageGain / averageLoss)));
    if (isNaN(firstRSI)) {
        console.error('Calculated RSI is NaN, returning 0');
        return 0; // Return 0 if RSI calculation fails
    }

    const RSIs = [firstRSI];
    // `Initial RSI for the first ${period} data points: ${firstRSI}`);

    // Calculate the RSI for the rest of the data points
    let previousAverageGain = averageGain;
    let previousAverageLoss = averageLoss;

    for (let i = period; i < prices.length - 1; i++) {
        const change = prices[i + 1] - prices[i];
        let gain = 0;
        let loss = 0;

        if (change > 0) {
            gain = change;
        } else {
            loss = Math.abs(change);
        }

        // Calculate the new average gain and loss
        previousAverageGain = (previousAverageGain * (period - 1) + gain) / period;
        previousAverageLoss = (previousAverageLoss * (period - 1) + loss) / period;
        if (previousAverageLoss === 0) {
            console.log('No losses in the period, RSI is 100');
            return 100; // RSI is 100 if there are no losses
        }

        // add this RSI to the list
        const rsi = 100 - (100 / (1 + (previousAverageGain / previousAverageLoss)));
        RSIs.push(rsi);
    }

    // Return the last calculated RSI
    return RSIs[RSIs.length - 1];
}

const runScreener = async () => {
    try {
        // gt the test histories from the file
        // const histories = fs.readFileSync('sp500_histories.json', 'utf8');
        // const parsedHistories = JSON.parse(histories);
        // console.log(`Loaded ${parsedHistories.length} histories from sp500_histories.json`);

        // get tickers from file
        const tickers = getTickersFromFile();
        if (tickers.length === 0) {
            console.error('No tickers found. Please ensure sp500_tickers.json exists and is populated.');
            return;
        }
        // console.log(`Found ${tickers.length} tickers in sp500_tickers.json`);
        // get histories for each symbol
        const parsedHistories = await getHistoriesForEachTicker(tickers);
        // console.log(`Fetched histories for ${parsedHistories.length} symbols.`);


        // format the data from the histories
        const formattedData = formatDataFromHistories(parsedHistories);
        // console.log('Formatted data:', formattedData.slice(0, 5)); // Print first 5 entries for brevity

    } catch (error) {
        console.error('Error in main function:', error);
    }
}


/*
----------------------------------------------------------------------------------------------------------------------------

UPLOAD.js CODE

----------------------------------------------------------------------------------------------------------------------------
*/

// load in google credentials json file
const credentialsPath = 'google_credentials.json';
const { private_key_id, client_email, token_uri, private_key }
    = JSON.parse(fs.readFileSync(credentialsPath, 'utf8'));

const spreadsheetId = '1csTrnR5LwSBft1wtczDdxk0aPnHIdIOHX3MTP-IkpPk';
const templateSheetId = 2006563664; // the ID of the template sheet in the spreadsheet

// url base encoder helper
const urlBase64Encode = (str) => {
    return Buffer.from(str)
        .toString('base64')
        .replace(/\+/g, '-')
        .replace(/\//g, '_')
        .replace(/=+$/, '');
}

// auth using JWT 
// (https://developers.google.com/identity/protocols/oauth2/service-account#httprest)
const createJWT = () => {
    const jwtHeader = {
        alg: 'RS256',
        typ: 'JWT',
        kid: private_key_id
    };

    const jetClaimSet = {
        iss: client_email,
        scope: 'https://www.googleapis.com/auth/spreadsheets',
        aud: token_uri,
        exp: Math.floor(Date.now() / 1000) + 3600, // 1 hour expiration
        iat: Math.floor(Date.now() / 1000)
    };

    const signedHeader = urlBase64Encode(JSON.stringify(jwtHeader));
    const signedClaimSet = urlBase64Encode(JSON.stringify(jetClaimSet));
    const signature = urlBase64Encode(
        crypto.createSign('RSA-SHA256')
            .update(`${signedHeader}.${signedClaimSet}`)
            .sign(private_key)
    );

    return `${signedHeader}.${signedClaimSet}.${signature}`;
}

const postAuthRequest = async () => {
    // get the JWT
    const jwt = createJWT();

    // prepare the encoded body
    let body = {
        'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
        'assertion': jwt
    }
    body = new URLSearchParams(body).toString();

    // make the POST request to the token URI
    const options = {
        method: 'POST',
        headers: {
            'Host': 'oauth2.googleapis.com',
            'Content-Type': 'application/x-www-form-urlencoded'
        },
        body,
    };
    const response = await fetch(token_uri, options);
    if (!response.ok) {
        throw new Error(`HTTP error! status: ${response.status}, message: ${await response.text()}`);
    }

    // return the access token
    const data = await response.json();
    return data.access_token;
}

const randomSheetId = () => {
    // use date without dashes
    const date = new Date().toISOString().split('T')[0].replace(/-/g, '');
    return parseInt(date);
}

const appendCellsRequest = (sheetId, row) => {
    const rowsArray = row.map(cell => {
        return {
            values: cell.map(value => {
                return {
                    userEnteredValue: {
                        stringValue: value
                    }
                };
            })
        };
    });
    
    return {
        sheetId,
        rows: rowsArray,
        fields: '*',
    };
}

const duplicateSheetRequest = (sheetId) => {
    const date = new Date().toISOString().split('T')[0];
    return {
        sourceSheetId: templateSheetId,
        insertSheetIndex: 0,
        newSheetId: sheetId,
        newSheetName: date,
    }
}

const batchUpdateSpreadsheet = (sheetId, rows) => {
    return {
        "requests": [
            {
                "duplicateSheet": duplicateSheetRequest(sheetId)
            },
            {
                "appendCells": appendCellsRequest(sheetId, rows)
            }
        ],
    };
}
const postBatchUpdateRequest = async (batchUpdateRequest) => {
    const accessToken = await postAuthRequest();
    const url = `https://sheets.googleapis.com/v4/spreadsheets/${spreadsheetId}:batchUpdate`;
    const options = {
        method: 'POST',
        headers: {
            'Authorization': `Bearer ${accessToken}`,
            'Content-Type': 'application/json'
        },
        body: JSON.stringify({
            requests: batchUpdateRequest.requests
        })
    };
    const response = await fetch(url, options);
    if (!response.ok) {
        throw new Error(`HTTP error! status: ${response.status}, message: ${await response.text()}`);
    }
    const data = await response.json();
    return data;
} 

const getRowsFromTSV = (tsvPath) => {
    const tsvData = fs.readFileSync(tsvPath, 'utf8');
    const rows = tsvData.split('\n').map(line => line.split('\t').map(cell => cell.trim()));
    // remove the first row (header)
    rows.shift();
    return rows;
}   

const runUpload = async () => {
    // create a random sheet id
    const sheetId = randomSheetId();
    // get the rows from the tsv file
    const tsvPath = 'sp500_formatted_data.tsv';
    const rows = getRowsFromTSV(tsvPath);
    // create the batch update request
    const batchUpdateRequest = batchUpdateSpreadsheet(sheetId, rows);

    // write the request to a file
    const outputPath = 'batch_update_request.json';
    fs.writeFileSync(outputPath, JSON.stringify(batchUpdateRequest, null, 2));
    // console.log(`Batch update request written to ${outputPath}`);
    // post the batch update request
    try {
        const response = await postBatchUpdateRequest(batchUpdateRequest);
        // console.log('Batch update response:', response);
    } catch (error) {
        console.error('Error:', error);
    }
}