aboutsummaryrefslogtreecommitdiff
path: root/upload.js
diff options
context:
space:
mode:
Diffstat (limited to 'upload.js')
-rw-r--r--upload.js200
1 files changed, 0 insertions, 200 deletions
diff --git a/upload.js b/upload.js
deleted file mode 100644
index 5df247b..0000000
--- a/upload.js
+++ /dev/null
@@ -1,200 +0,0 @@
-import fs from 'fs';
-import crypto from 'crypto';
-
-// load in google credentials json file
-const credentialsPath = 'google_credentials.json';
-const { private_key_id, client_email, token_uri, private_key }
- = JSON.parse(fs.readFileSync(credentialsPath, 'utf8'));
-
-const spreadsheetId = '1csTrnR5LwSBft1wtczDdxk0aPnHIdIOHX3MTP-IkpPk';
-const templateSheetId = 2006563664; // the ID of the template sheet in the spreadsheet
-
-// url base encoder helper
-const urlBase64Encode = (str) => {
- return Buffer.from(str)
- .toString('base64')
- .replace(/\+/g, '-')
- .replace(/\//g, '_')
- .replace(/=+$/, '');
-}
-
-// auth using JWT
-// (https://developers.google.com/identity/protocols/oauth2/service-account#httprest)
-const createJWT = () => {
- const jwtHeader = {
- alg: 'RS256',
- typ: 'JWT',
- kid: private_key_id
- };
-
- const jetClaimSet = {
- iss: client_email,
- scope: 'https://www.googleapis.com/auth/spreadsheets',
- aud: token_uri,
- exp: Math.floor(Date.now() / 1000) + 3600, // 1 hour expiration
- iat: Math.floor(Date.now() / 1000)
- };
-
- const signedHeader = urlBase64Encode(JSON.stringify(jwtHeader));
- const signedClaimSet = urlBase64Encode(JSON.stringify(jetClaimSet));
- const signature = urlBase64Encode(
- crypto.createSign('RSA-SHA256')
- .update(`${signedHeader}.${signedClaimSet}`)
- .sign(private_key)
- );
-
- return `${signedHeader}.${signedClaimSet}.${signature}`;
-}
-
-const postAuthRequest = async () => {
- // get the JWT
- const jwt = createJWT();
-
- // prepare the encoded body
- let body = {
- 'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
- 'assertion': jwt
- }
- body = new URLSearchParams(body).toString();
-
- // make the POST request to the token URI
- const options = {
- method: 'POST',
- headers: {
- 'Host': 'oauth2.googleapis.com',
- 'Content-Type': 'application/x-www-form-urlencoded'
- },
- body,
- };
- const response = await fetch(token_uri, options);
- if (!response.ok) {
- throw new Error(`HTTP error! status: ${response.status}, message: ${await response.text()}`);
- }
-
- // return the access token
- const data = await response.json();
- return data.access_token;
-}
-
-const randomSheetId = () => {
- // use date without dashes
- const date = new Date().toISOString().split('T')[0].replace(/-/g, '');
- return parseInt(date);
-}
-
-const appendCellsRequest = (sheetId, row) => {
- const rowsArray = row.map(cell => {
- return {
- values: cell.map(value => {
- return {
- userEnteredValue: {
- stringValue: value
- }
- };
- })
- };
- });
-
- return {
- sheetId,
- rows: rowsArray,
- fields: '*',
- };
-}
-
-const duplicateSheetRequest = (sheetId) => {
- const date = new Date().toISOString().split('T')[0];
- return {
- sourceSheetId: templateSheetId,
- insertSheetIndex: 0,
- newSheetId: sheetId,
- newSheetName: date,
- }
-}
-
-const batchUpdateSpreadsheet = (sheetId, rows) => {
- return {
- "requests": [
- {
- "duplicateSheet": duplicateSheetRequest(sheetId)
- },
- {
- "appendCells": appendCellsRequest(sheetId, rows)
- }
- ],
- };
-}
-const postBatchUpdateRequest = async (batchUpdateRequest) => {
- const accessToken = await postAuthRequest();
- const url = `https://sheets.googleapis.com/v4/spreadsheets/${spreadsheetId}:batchUpdate`;
- const options = {
- method: 'POST',
- headers: {
- 'Authorization': `Bearer ${accessToken}`,
- 'Content-Type': 'application/json'
- },
- body: JSON.stringify({
- requests: batchUpdateRequest.requests
- })
- };
- const response = await fetch(url, options);
- if (!response.ok) {
- throw new Error(`HTTP error! status: ${response.status}, message: ${await response.text()}`);
- }
- const data = await response.json();
- return data;
-}
-
-const getRowsFromTSV = (tsvPath) => {
- const tsvData = fs.readFileSync(tsvPath, 'utf8');
- const rows = tsvData.split('\n').map(line => line.split('\t').map(cell => cell.trim()));
- // remove the first row (header)
- rows.shift();
- return rows;
-}
-
-export const runUpload = async () => {
- // // get the data from the tsv
- // const tsvPath = 'sp500_formatted_data.tsv';
- // const tsvData = fs.readFileSync(tsvPath, 'utf8');
- // const rows = tsvData.split('\n').map(line => line.split('\t').map(cell => cell.trim()));
- // // remove the first row (header)
- // rows.shift();
- // // create a random sheet id
- // const sheetId = randomSheetId();
- // // create the batch update request
- // const batchUpdateRequest = batchUpdateSpreadsheet(sheetId, rows);
- // // write the request to a file
- // const outputPath = 'batch_update_request.json';
- // fs.writeFileSync(outputPath, JSON.stringify(batchUpdateRequest, null, 2));
- // console.log(`Batch update request written to ${outputPath}`);
-
- // Authenticate and get access token
-
- // try {
- // const accessToken = await postAuthRequest();
- // console.log('Access Token:', accessToken);
- // } catch (error) {
- // console.error('Error:', error);
- // }
-
- // create a random sheet id
- const sheetId = randomSheetId();
- // get the rows from the tsv file
- const tsvPath = 'sp500_formatted_data.tsv';
- const rows = getRowsFromTSV(tsvPath);
- // create the batch update request
- const batchUpdateRequest = batchUpdateSpreadsheet(sheetId, rows);
-
- // write the request to a file
- const outputPath = 'batch_update_request.json';
- fs.writeFileSync(outputPath, JSON.stringify(batchUpdateRequest, null, 2));
- console.log(`Batch update request written to ${outputPath}`);
- // post the batch update request
- try {
- const response = await postBatchUpdateRequest(batchUpdateRequest);
- console.log('Batch update response:', response);
- } catch (error) {
- console.error('Error:', error);
- }
-} \ No newline at end of file