aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJulia McCauley <skurvyj@gmail.com>2021-04-17 22:33:43 -0400
committerJulia McCauley <skurvyj@gmail.com>2021-04-17 22:33:43 -0400
commitf7d040e978953956f204ee2c5a10548629114e3e (patch)
treea23d50047c9c16c1fa73c0627588e12c9e89866e
parent7ce891a72a5932bbd8c2bfd19bf2568f125e437b (diff)
parent9a8483885977d6ca17344d465e431f1f2cdafc06 (diff)
Merge branch 'edgar-api' of github.com:cs0320-2021/term-project-cohwille-jmccaul3-mfoiani-rhunt2 into merge_data
# Conflicts: # data/trades.sqlite3 # src/main/java/edu/brown/cs/student/term/Main.java
-rw-r--r--data/trades.sqlite3bin36864 -> 16539648 bytes
-rw-r--r--src/main/java/edu/brown/cs/student/term/Main.java175
-rw-r--r--src/main/java/edu/brown/cs/student/term/parsing/FilingFeed.java60
-rw-r--r--src/main/java/edu/brown/cs/student/term/parsing/TxtXmlParser.java103
-rw-r--r--src/main/java/edu/brown/cs/student/term/parsing/UrlXmlParser.java2
-rw-r--r--src/main/java/edu/brown/cs/student/term/repl/commands/LoadCommand.java155
-rw-r--r--src/test/java/edu/brown/cs/student/FilingTest.java82
-rw-r--r--trades.sqlite30
8 files changed, 447 insertions, 130 deletions
diff --git a/data/trades.sqlite3 b/data/trades.sqlite3
index db88fc6..878261a 100644
--- a/data/trades.sqlite3
+++ b/data/trades.sqlite3
Binary files differ
diff --git a/src/main/java/edu/brown/cs/student/term/Main.java b/src/main/java/edu/brown/cs/student/term/Main.java
index db077db..55b1634 100644
--- a/src/main/java/edu/brown/cs/student/term/Main.java
+++ b/src/main/java/edu/brown/cs/student/term/Main.java
@@ -1,28 +1,35 @@
package edu.brown.cs.student.term;
-import com.google.common.collect.ImmutableMap;
-import edu.brown.cs.student.term.hub.Holder;
-import edu.brown.cs.student.term.profit.ProfitCalculation;
-import edu.brown.cs.student.term.profit.StockHolding;
-import edu.brown.cs.student.term.hub.SuspicionRanker;
import edu.brown.cs.student.term.repl.Command;
import edu.brown.cs.student.term.repl.REPL;
import edu.brown.cs.student.term.repl.commands.LoadCommand;
-import edu.brown.cs.student.term.repl.commands.RankCommand;
import edu.brown.cs.student.term.repl.commands.SetupCommand;
-import edu.brown.cs.student.term.trade.Trade;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
-import java.io.OutputStream;
-import java.io.PrintStream;
-import java.time.Instant;
+import java.sql.Connection;
import java.sql.Date;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
import java.util.HashMap;
+import java.io.*;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
import spark.*;
import spark.template.freemarker.FreeMarkerEngine;
+
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+
+import com.google.common.collect.ImmutableMap;
+
import freemarker.template.Configuration;
//fix
@@ -30,21 +37,44 @@ import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
-import java.util.LinkedList;
+import java.sql.ResultSet;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-
+import java.util.*;
+import com.google.common.collect.ImmutableMap;
import com.google.gson.Gson;
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import spark.ExceptionHandler;
+import spark.ModelAndView;
+import spark.QueryParamsMap;
+import spark.Request;
+import spark.Response;
+import spark.Route;
+import spark.Filter;
+import spark.Spark;
+import spark.TemplateViewRoute;
+import spark.template.freemarker.FreeMarkerEngine;
+
+import freemarker.template.Configuration;
+
import org.json.JSONObject;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
/**
* The Main class of our project. This is where execution begins.
*/
public final class Main {
+ // TODO: fix temproary solution
public static JSONObject xmlLinks = null;
- private static final Gson GSON = new Gson();
+
+
private static final int DEFAULT_PORT = 4567;
/**
@@ -73,9 +103,6 @@ public final class Main {
if (options.has("gui")) {
runSparkServer((int) options.valueOf("port"));
- //will auto connect to correct db when running gui!
- SetupCommand setConnection = new SetupCommand();
- setConnection.run(new String[] {"data/trades.sqlite3"});
}
if (!options.has("debug")) {
@@ -85,11 +112,9 @@ public final class Main {
}));
}
-
HashMap<String, Command> commandHashMap = new HashMap<>();
commandHashMap.put("setup", new SetupCommand());
commandHashMap.put("load", new LoadCommand());
- commandHashMap.put("rank", new RankCommand());
/** add commands to map here! */
REPL repl = new REPL(commandHashMap);
repl.runREPL();
@@ -108,7 +133,7 @@ public final class Main {
return new FreeMarkerEngine(config);
}
- public void runSparkServer(int port) {
+ public void runSparkServer(int port) {
Spark.port(port);
Spark.externalStaticFileLocation("src/main/resources/static");
Spark.exception(Exception.class, new ExceptionPrinter());
@@ -116,107 +141,43 @@ public final class Main {
Spark.options("/*",
(request, response) -> {
- String accessControlRequestHeaders = request
- .headers("Access-Control-Request-Headers");
- if (accessControlRequestHeaders != null) {
- response.header("Access-Control-Allow-Headers",
- accessControlRequestHeaders);
- }
-
- String accessControlRequestMethod = request
- .headers("Access-Control-Request-Method");
- if (accessControlRequestMethod != null) {
- response.header("Access-Control-Allow-Methods",
- accessControlRequestMethod);
- }
-
- return "OK";
+ String accessControlRequestHeaders = request
+ .headers("Access-Control-Request-Headers");
+ if (accessControlRequestHeaders != null) {
+ response.header("Access-Control-Allow-Headers",
+ accessControlRequestHeaders);
+ }
+
+ String accessControlRequestMethod = request
+ .headers("Access-Control-Request-Method");
+ if (accessControlRequestMethod != null) {
+ response.header("Access-Control-Allow-Methods",
+ accessControlRequestMethod);
+ }
+
+ return "OK";
});
+
+
Spark.before((request, response) -> response.header("Access-Control-Allow-Origin", "*"));
- //TODO: Add system testing for all of our end points
- Spark.post("/data", new SuspicionRankHandler());
- Spark.post("/profit", new ProfitQueryHandler());
- Spark.post("/trade-lookup", new TradeQueryHandler());
+ Spark.post("/data", new DataHandler());
}
- /**
- * Gets the list of holders with id, name, and suspicion rank.
- */
- private static class SuspicionRankHandler implements Route {
- @Override
- /**
- * Expects that the request will contain two longs that are the start/end
- * dates for the suspicion rank to run on as epoch time in milliseconds
- */
- public Object handle(Request request, Response response) throws Exception {
- //String str = request.body();
- //xmlLinks = new JSONObject(str); //this is all the filedAt times and xml files
- try {
- DatabaseQuerier db = SetupCommand.getDq();
- SuspicionRanker ranker = new SuspicionRanker(db);
-
- JSONObject data = new JSONObject(request.body());
-
- long startMilli = data.getLong("start");
- long endMilli = data.getLong("end");
- Instant start = Instant.ofEpochMilli(startMilli);
- Instant end = Instant.ofEpochMilli(endMilli);
- List<Holder> suspiciousHolders = ranker.getSuspicionScoreList(start, end);
- Map<String, Object> variables = ImmutableMap.of("holders", suspiciousHolders);
- return GSON.toJson(variables);
- } catch (Exception e) {
- System.out.println("Error retrieving the suspicion ranks for GUI");
- return "Error";
- }
- }
- }
- private static class ProfitQueryHandler implements Route {
+ private static class DataHandler implements Route {
@Override
public Object handle(Request request, Response response) throws Exception {
- JSONObject req = new JSONObject(request.body());
- String person = req.getString("person");
- Date startPeriod = new Date(req.getLong("startTime"));
- Date endPeriod = new Date(req.getLong("endTime"));
-
- ProfitCalculation profit =
- new ProfitCalculation(DatabaseQuerier.getConn(), person, startPeriod, endPeriod);
- List<StockHolding> holdings = profit.getHoldingsList();
- double gains = profit.calculateGains();
- double sp500PercentGain = profit.compareToSP500();
-
- Map<String, Object> res = new HashMap<>();
- res.put("person", person);
- res.put("moneyIn", profit.getMoneyInput());
- res.put("moneyOut", profit.getMoneyInput() + gains);
- res.put("holdings", holdings);
- res.put("percentGain", 100 * gains / profit.getMoneyInput());
- res.put("SP500", (1 + sp500PercentGain) * profit.getMoneyInput());
- res.put("percentSP500", 100 * sp500PercentGain);
- return GSON.toJson(res);
+ String str = request.body();
+ xmlLinks = new JSONObject(str); //this is all the filedAt times and xml files
- }
-
- }
-
- private static class TradeQueryHandler implements Route {
- @Override
- public Object handle(Request request, Response response) throws Exception {
- JSONObject req = new JSONObject(request.body());
- String person = req.getString("person");
- Date startPeriod = new Date(req.getLong("startTime"));
- Date endPeriod = new Date(req.getLong("endTime"));
-
- DatabaseQuerier db = SetupCommand.getDq();
- List<Trade> trades = db.getAllTradesByHolder(person, startPeriod, endPeriod);
-
- return GSON.toJson(trades);
+ return "replace";
}
}
/**
* Display an error page when an exception occurs in the server.
+ *
*/
private static class ExceptionPrinter implements ExceptionHandler {
@Override
@@ -231,4 +192,6 @@ public final class Main {
res.body(stacktrace.toString());
}
}
+
+
} \ No newline at end of file
diff --git a/src/main/java/edu/brown/cs/student/term/parsing/FilingFeed.java b/src/main/java/edu/brown/cs/student/term/parsing/FilingFeed.java
new file mode 100644
index 0000000..b5a6acf
--- /dev/null
+++ b/src/main/java/edu/brown/cs/student/term/parsing/FilingFeed.java
@@ -0,0 +1,60 @@
+package edu.brown.cs.student.term.parsing;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Represents the filing from the Edgar rss feed.
+ */
+public class FilingFeed {
+ private final List<String> filings;
+
+ /**
+ * Constructor that takes the parsed document and extracts the url.
+ * @param document The document of the rss feed.
+ */
+ public FilingFeed(Document document) {
+ // Init array
+ filings = new ArrayList<>();
+
+ // Get all entries
+ NodeList entries = document.getElementsByTagName("entry");
+ for (int i = 0; i < entries.getLength(); i++) {
+ // Assertion allows the cast to be ok :)
+ assert entries.item(i).getNodeType() == Node.ELEMENT_NODE;
+ Element entry = (Element) entries.item(i);
+
+ NodeList link = entry.getElementsByTagName("link");
+ String linkUrl = link.item(0).getAttributes().getNamedItem("href").getNodeValue();
+
+ filings.add(getXmlUrl(linkUrl));
+ }
+ }
+
+ /**
+ * Turns the local url into a publicly hosted one.
+ * @param filingUrl The local url of the .txt to the filing.
+ * @return The publicly hosted version of the url.
+ */
+ private String getXmlUrl(String filingUrl) {
+ String url = filingUrl.replace("-index.htm", ".txt");
+ if (!url.contains("https://www.sec.gov/")) {
+ url = "https://www.sec.gov" + url;
+ }
+ return url;
+ }
+
+ /**
+ * Accessor that returns the url to the txt format of the filings.
+ * @return The list of publicly hosted urls to each filing.
+ */
+ public List<String> getFilings() {
+ return filings;
+ }
+
+}
diff --git a/src/main/java/edu/brown/cs/student/term/parsing/TxtXmlParser.java b/src/main/java/edu/brown/cs/student/term/parsing/TxtXmlParser.java
new file mode 100644
index 0000000..2e30fa7
--- /dev/null
+++ b/src/main/java/edu/brown/cs/student/term/parsing/TxtXmlParser.java
@@ -0,0 +1,103 @@
+package edu.brown.cs.student.term.parsing;
+
+import org.w3c.dom.Document;
+import org.xml.sax.InputSource;
+import org.xml.sax.SAXException;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.StringReader;
+import java.net.URL;
+import java.net.URLConnection;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.time.Instant;
+
+/**
+ * Class that parses the XML contained within a publicly held txt file.
+ */
+public class TxtXmlParser extends XmlParser {
+ public final static SimpleDateFormat TIMECONVERTER = new SimpleDateFormat("yyyyMMddHHmmss");
+
+ private long timestamp;
+
+ public TxtXmlParser() {
+ super();
+ timestamp = -1;
+ }
+
+ /**
+ * Method used to parse the xml file.
+ *
+ * @param pathToXml The path to the xml text file.
+ * @return The tree structure parsed as an xml doc.
+ */
+ @Override
+ public Document parse(String pathToXml) {
+ try {
+ System.err.println("LOG: To make class for url: " + pathToXml + " in parse() of " + getClass());
+ URL url = new URL(pathToXml);
+ System.err.println("LOG: To establish urlConnection in parse() of " + getClass());
+ URLConnection conn = url.openConnection();
+ conn.addRequestProperty("User-Agent", "Chrome");
+ System.err.println("LOG: Making bufferedReader for url: " + pathToXml + " in " + getClass());
+ BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+
+ StringBuilder xmlParts = new StringBuilder();
+
+ boolean isXml = false;
+ String line;
+ while ((line = br.readLine()) != null) {
+ // Get timestamp
+ if (line.startsWith("<ACCEPTANCE-DATETIME>")) {
+ String datetime = line.replaceAll("<ACCEPTANCE-DATETIME>", "");
+ // TODO: check for errors
+ this.timestamp = formatTimestamp(datetime);
+ }
+
+ // For xml
+ if (line.equals("</XML>")) {
+ break;
+ }
+ if (isXml) {
+ xmlParts.append(line);
+ }
+ if (line.equals("<XML>")) {
+ isXml = true;
+ }
+ }
+ System.err.println("LOG: Calling builder.parse() after extracting xml parts from: " + pathToXml + " in " + getClass());
+
+ InputSource xmlLines = new InputSource(new StringReader(xmlParts.toString()));
+ return builder.parse(xmlLines);
+ } catch (SAXException e) {
+ System.err.println("INTERNAL: SAX " + getClass() + " : " + e.getClass());
+ } catch (IOException e) {
+ e.printStackTrace();
+ System.err.println("INTERNAL: IO " + getClass() + " : " + e.getClass());
+ }
+ return null;
+ }
+
+ public long formatTimestamp(String datetime) {
+ long timestamp = -1;
+ try {
+ timestamp = TIMECONVERTER.parse(datetime).toInstant().toEpochMilli();
+ } catch (ParseException e) {
+ e.printStackTrace();
+ }
+ return timestamp;
+ }
+
+ /**
+ * Returns the timestamp then resets it to -1.
+ * @return The timestamp as a number (long). -1 if not assigned.
+ */
+ public long getTimestamp() {
+ long temp = timestamp;
+ // Set to -1 for next one...
+ timestamp = -1;
+ return temp;
+ }
+}
diff --git a/src/main/java/edu/brown/cs/student/term/parsing/UrlXmlParser.java b/src/main/java/edu/brown/cs/student/term/parsing/UrlXmlParser.java
index c89c31d..21cd7c5 100644
--- a/src/main/java/edu/brown/cs/student/term/parsing/UrlXmlParser.java
+++ b/src/main/java/edu/brown/cs/student/term/parsing/UrlXmlParser.java
@@ -6,8 +6,10 @@ import org.xml.sax.SAXException;
import java.io.IOException;
import java.net.URL;
import java.net.URLConnection;
+import java.time.Instant;
public class UrlXmlParser extends XmlParser{
+
public UrlXmlParser() {
super();
}
diff --git a/src/main/java/edu/brown/cs/student/term/repl/commands/LoadCommand.java b/src/main/java/edu/brown/cs/student/term/repl/commands/LoadCommand.java
index 54f9fc0..00ba3ad 100644
--- a/src/main/java/edu/brown/cs/student/term/repl/commands/LoadCommand.java
+++ b/src/main/java/edu/brown/cs/student/term/repl/commands/LoadCommand.java
@@ -2,8 +2,11 @@ package edu.brown.cs.student.term.repl.commands;
import edu.brown.cs.student.term.DatabaseQuerier;
import edu.brown.cs.student.term.Main;
+import edu.brown.cs.student.term.parsing.FilingFeed;
import edu.brown.cs.student.term.parsing.Transaction;
+import edu.brown.cs.student.term.parsing.TxtXmlParser;
import edu.brown.cs.student.term.parsing.UrlXmlParser;
+import edu.brown.cs.student.term.parsing.XmlParser;
import edu.brown.cs.student.term.repl.Command;
import edu.brown.cs.student.term.trade.Trade;
import org.json.JSONArray;
@@ -15,10 +18,13 @@ import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.time.Instant;
import java.time.ZonedDateTime;
+import java.util.ArrayList;
+import java.util.List;
public class LoadCommand implements Command {
private Connection conn;
- private final static UrlXmlParser URL_XML_PARSER = new UrlXmlParser();
+ private final static XmlParser URL_XML_PARSER = new UrlXmlParser();
+ private final static TxtXmlParser TXT_XML_PARSER = new TxtXmlParser();
/**
* Main run method for every command.
@@ -27,46 +33,145 @@ public class LoadCommand implements Command {
*/
@Override
public String run(String[] args) {
- // TODO: add log comments
+ // param checking
+ if (args.length != 1 && args.length !=2 && args.length !=3) {
+ return "ERROR: Incorrect number of arguments for load command";
+ }
+
+ int numFilings;
+ try {
+ numFilings = Integer.parseInt(args[0]);
+ if (numFilings <=0) {
+ return "ERROR: Please input an positive integer for number of filings.";
+ }
+ } catch (NumberFormatException e) {
+ return "ERROR: Please input an integer for number of filings.";
+ }
+
+ int shift = 0;
+ try {
+ if (args.length == 2) {
+ shift = Integer.parseInt(args[1]);
+ if (shift <=0) {
+ return "ERROR: Please input an positive integer for the count shift.";
+ }
+ }
+ } catch (NumberFormatException e) {
+ return "ERROR: Please input an integer for the shift.";
+ }
+
+ String filingDate = null;
+ if (args.length == 3) {
+ filingDate = args[2];
+ System.out.println("WARNING: The archive version of the command make take " +
+ "a long time if a broad query param is inputted.");
+ }
+
+
System.err.println("LOG: Entered .run() of " + getClass());
- // TODO: call to api for urls to call through the urlxmlparser from reagan
- if (Main.xmlLinks == null) {
- return "ERROR: Please load xml links from frontend.";
+ //List<String> filingUrls = getFilings(numFilings);
+ getFilings(numFilings, shift, filingDate);
+
+ //loadFilings(filingUrls);
+
+ return "Finished loading " + numFilings + " filings.";
+ }
+
+ private void timeout() {
+ // System.out.println("timeout 100 mil");
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
}
+ }
- conn = DatabaseQuerier.getConn();
- JSONArray data = Main.xmlLinks.getJSONArray("data");
- for(int i =0; i < data.length(); i++) {
- JSONObject link = data.optJSONObject(i);
- String timestamp = link.getString("timestamp");
- String url = link.getString("url");
+ /**
+ * Parses the urls to filings and loads them into the setup DB.
+ * @param urls The list of urls to parsable Edgar txt files.
+ */
+ public void loadFilings(List<String> urls) {
+ if (urls.isEmpty()) {
+ System.err.println("WARNING: No filings loaded.");
+ return;
+ }
+
+ conn = DatabaseQuerier.getConn();
+ for(String url : urls) {
try {
System.err.println("LOG: Calling loadTransactionIntoDB() in " + getClass());
- loadTransactionIntoDB(timestamp, url);
+ loadTransactionIntoDB(url);
} catch (SQLException throwables) {
System.err.println("INTERNAL: SQLException in .run() of " + getClass());
//throwables.printStackTrace();
}
}
+ }
+
+ /**
+ * Makes a request to the public Edgar url and parses it's rss feed.
+ * @param numFilings The number of filings to parse.
+ */
+ private void getFilings(int numFilings, int shift, String filingDate) {
+ int counter = 0;
- return "Loaded?";
+ while (100*counter <= (numFilings - shift)) {
+ timeout();
+
+ String queryUrl =
+ (filingDate != null) ?
+ "https://www.sec.gov/cgi-bin/srch-edgar?" +
+ "text=form-type%3D4+and+(filing-date%3D" + filingDate + ")" +
+ "&start=" + (100*counter++ + shift) +
+ "&count=" + 100 +
+ "&first=2020" +
+ "&last=2021" +
+ "&output=atom"
+ :
+ "https://www.sec.gov/cgi-bin/browse-edgar?" +
+ "action=getcurrent" +
+ "&CIK=" +
+ "&type=4" +
+ "&company=" +
+ "&dateb=" +
+ "&owner=only" +
+ "&start=" + (100*counter++ + shift) +
+ "&count=" + 100 +
+ "&output=atom";
+
+ System.err.println("LOG: Requesting filings with url: " + queryUrl);
+ Document document = URL_XML_PARSER.parse(queryUrl);
+ if (document == null) {
+ System.err.println("WARNING: Document was null " + queryUrl + " in getFilings(): " + getClass());
+ continue;
+ }
+
+ FilingFeed filingFeed = new FilingFeed(document);
+ loadFilings(filingFeed.getFilings());
+
+ if (counter%10 == 0) {
+ System.out.println("PROGRESS: " + counter*100 + "/" + numFilings);
+ }
+ }
+ // TODO: make params more adjustable
}
+
/**
* Loads a whole transaction, which can have multiple trades, into the DB.
* @param url The url to the public xml file.
* @throws SQLException If the prep statement fails or db doesn't exist, throws.
*/
- private void loadTransactionIntoDB(String timestamp, String url) throws SQLException {
+ private void loadTransactionIntoDB(String url) throws SQLException {
System.err.println("LOG: Parsing XML into transaction in loadTransactionIntoDB(). URL: " + url);
// TODO: check if this is right @julia
// TODO: add parse error handling...
- ZonedDateTime zonedDateTime = ZonedDateTime.parse(timestamp);
- Instant instant = zonedDateTime.toInstant();
-
- Document document = URL_XML_PARSER.parse(url);
- if (document == null) {
+ // timeout to reduce the too many requests
+ timeout();
+ Document document = TXT_XML_PARSER.parse(url);
+ long timestamp = TXT_XML_PARSER.getTimestamp();
+ if (document == null || timestamp == -1) {
System.err.println("WARNING: URL " + url + " failed to parse... continuing.");
return;
}
@@ -77,7 +182,7 @@ public class LoadCommand implements Command {
for(Trade trade : helper.getTrades()) {
System.err.println("LOG: Loading a trade into DB -> " + trade);
- loadTradeIntoDB(instant, trade);
+ loadTradeIntoDB(timestamp, trade, url);
System.err.println("LOG: Loaded that trade.");
}
} catch (Exception e) {
@@ -90,7 +195,7 @@ public class LoadCommand implements Command {
* @param trade The trade to be loaded.
* @throws SQLException If the prep statement fails or db doesn't exist, throws.
*/
- private void loadTradeIntoDB(Instant instant, Trade trade) throws SQLException {
+ private void loadTradeIntoDB(long timestamp, Trade trade, String url) throws SQLException {
// current table schema that is used...
// TODO: make this TABLE with this SCHEMA if doesn't exist.
/*
@@ -103,23 +208,25 @@ public class LoadCommand implements Command {
number_of_shares INTEGER,
holder_id INTEGER,
share_price NUMERIC,
+ filing_url TEXT
UNIQUE (trade_timestamp, is_buy, number_of_shares, holder_id, share_price));
*/
System.err.println("LOG: Setting prepared statement on " + conn);
PreparedStatement prep = conn.prepareStatement(
"INSERT INTO trades (stock_name, holder_name, trade_timestamp, is_buy, " +
- "number_of_shares, holder_id, share_price) " +
- "VALUES (?, ?, ?, ?, ?, ?, ?)");
+ "number_of_shares, holder_id, share_price, filing_url) " +
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?)");
prep.setString(1, trade.getStock());
prep.setString(2, trade.getHolder().getName());
// TODO: update with timestamp @julia
- prep.setLong(3, instant.toEpochMilli());
+ prep.setLong(3, timestamp);
prep.setInt(4, trade.isBuy() ? 1 : 0);
prep.setInt(5, trade.getNumShares());
prep.setInt(6, trade.getHolder().getId());
prep.setDouble(7, trade.getPrice());
+ prep.setString(8, url);
System.err.println("LOG: Inserted values into prep statement.");
prep.execute();
diff --git a/src/test/java/edu/brown/cs/student/FilingTest.java b/src/test/java/edu/brown/cs/student/FilingTest.java
new file mode 100644
index 0000000..a9b21d3
--- /dev/null
+++ b/src/test/java/edu/brown/cs/student/FilingTest.java
@@ -0,0 +1,82 @@
+package edu.brown.cs.student;
+
+import edu.brown.cs.student.term.parsing.LocalXmlParser;
+import edu.brown.cs.student.term.parsing.Transaction;
+import edu.brown.cs.student.term.parsing.TxtXmlParser;
+import edu.brown.cs.student.term.parsing.UrlXmlParser;
+import edu.brown.cs.student.term.parsing.XmlParser;
+import edu.brown.cs.student.term.trade.Trade;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+import javax.print.Doc;
+
+import static org.junit.Assert.*;
+
+public class FilingTest {
+ private XmlParser _xmlParser, _txtXmlParser;
+
+ @Before
+ public void setUp() {
+ _xmlParser = new UrlXmlParser();
+ _txtXmlParser = new TxtXmlParser();
+ }
+
+ @After
+ public void tearDown() {
+ _xmlParser = null;
+ _txtXmlParser = null;
+ }
+
+ @Test
+ public void seeWorks(){
+ setUp();
+
+ String url = "https://www.sec.gov/cgi-bin/browse-edgar?" +
+ "action=getcurrent" +
+ "&CIK=" +
+ "&type=4" +
+ "&company=" +
+ "&dateb=" +
+ "&owner=only" +
+ "&start=0" +
+ "&count=10" +
+ "&output=atom";
+
+ Document doc = _xmlParser.parse(url);
+ assertNotNull(doc);
+ NodeList entries = doc.getElementsByTagName("entry");
+ assertNotEquals(entries.getLength(), 0);
+ assertEquals(entries.item(0).getNodeType(), Node.ELEMENT_NODE);
+ for (int i = 0; i < entries.getLength(); i++) {
+ Element entry = (Element) entries.item(i);
+ NodeList link = entry.getElementsByTagName("link");
+ assertEquals(link.getLength(), 1);
+ String linkUrl = link.item(0).getAttributes().getNamedItem("href").getNodeValue();
+ System.out.println(linkUrl);
+
+ NodeList updated = entry.getElementsByTagName("updated");
+ assertEquals(link.getLength(), 1);
+ System.out.println(updated.item(0).getTextContent());
+ }
+
+ tearDown();
+ }
+
+ @Test
+ public void xmlUrlFromFilingUrl(){
+ setUp();
+
+ String url = "https://www.sec.gov/Archives/edgar/data/1597341/000141588921001958/0001415889-21-001958.txt";
+ Document doc = _txtXmlParser.parse(url);
+ assertNotNull(doc);
+ tearDown();
+ }
+
+
+}
diff --git a/trades.sqlite3 b/trades.sqlite3
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/trades.sqlite3