From 983f51b62f4b869bdb86fc4b708098d02f0d749d Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Mon, 3 Feb 2020 17:57:09 -0500 Subject: added base64 encodings support for image upload, removed logs from scraper.py --- src/server/ApiManagers/DownloadManager.ts | 6 ++++-- src/server/ApiManagers/GooglePhotosManager.ts | 9 +++++++-- src/server/ApiManagers/UploadManager.ts | 3 ++- src/server/ApiManagers/UtilManager.ts | 7 ++++++- 4 files changed, 19 insertions(+), 6 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts index 1bb84f374..fad5e6789 100644 --- a/src/server/ApiManagers/DownloadManager.ts +++ b/src/server/ApiManagers/DownloadManager.ts @@ -254,11 +254,13 @@ async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hiera // and dropped in the browser and thus hosted remotely) so we upload it // to our server and point the zip file to it, so it can bundle up the bytes const information = await DashUploadUtils.UploadImage(result); - path = information.serverAccessPaths[SizeSuffix.Original]; + path = information instanceof Error ? "" : information.serverAccessPaths[SizeSuffix.Original]; } // write the file specified by the path to the directory in the // zip file given by the prefix. - file.file(path, { name: documentTitle, prefix }); + if (path) { + file.file(path, { name: documentTitle, prefix }); + } } else { // we've hit a collection, so we have to recurse await writeHierarchyRecursive(file, result, `${prefix}/${documentTitle}`); diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 107542ce2..1727cc5a6 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -88,8 +88,13 @@ export default class GooglePhotosManager extends ApiManager { if (contents) { const completed: Opt[] = []; for (const item of contents.mediaItems) { - const { contentSize, ...attributes } = await DashUploadUtils.InspectImage(item.baseUrl); - const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize!); + const results = await DashUploadUtils.InspectImage(item.baseUrl); + if (results instanceof Error) { + failed++; + continue; + } + const { contentSize, ...attributes } = results; + const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize); if (!found) { const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); if (upload) { diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index a92b613b7..4d09528f4 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -65,7 +65,8 @@ export default class UploadManager extends ApiManager { secureHandler: async ({ req, res }) => { const { sources } = req.body; if (Array.isArray(sources)) { - return res.send(await Promise.all(sources.map(url => DashUploadUtils.UploadImage(url)))); + const results = await Promise.all(sources.map(source => DashUploadUtils.UploadImage(source))); + return res.send(results); } res.send(); } diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index a0d0d0f4b..d7b085a30 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -47,7 +47,12 @@ export default class UtilManager extends ApiManager { const onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; const onRejected = (err: any) => { console.error(err.message); res.send(err); }; - const tryPython3 = () => command_line('python3 scraper.py', cwd).then(onResolved, onRejected); + const tryPython3 = (reason: any) => { + console.log("Initial scraper failed for the following reason:"); + console.log(red(reason.Error)); + console.log("Falling back to python3..."); + command_line('python3 scraper.py', cwd).then(onResolved, onRejected); + }; return command_line('python scraper.py', cwd).then(onResolved, tryPython3); }, -- cgit v1.2.3-70-g09d2 From b50716cde09e18a6d6e923821929ee14fd7089ca Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Fri, 7 Feb 2020 20:01:31 -0500 Subject: scraper --- src/scraping/buxton/json/buxton.json | 42 +++--- src/scraping/buxton/json/incomplete.json | 216 +++++++++++++++---------------- src/scraping/buxton/node_scraper.ts | 2 +- src/server/ApiManagers/UtilManager.ts | 2 +- 4 files changed, 128 insertions(+), 134 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/scraping/buxton/json/buxton.json b/src/scraping/buxton/json/buxton.json index ff618f32c..166f4bd49 100644 --- a/src/scraping/buxton/json/buxton.json +++ b/src/scraping/buxton/json/buxton.json @@ -187,27 +187,6 @@ "shortDescription": "The OLPC XO-1 is very innovative device that nevertheless raises serious issues about technology and social responsibility. It is included in the collection primarily as a warning against technological hubris, and the fact that no technologies are neutral from a social-cultural perspective.", "longDescription": "IntroductionI have this computer in my collection as a reminder of the delicate relationship between object and purpose, and how no matter how well one does on the former, it will likely have no impact on making a wanting concept achieve the stated (and even valid) purpose any better. I include it in the collection as a cautionary tale of how the object may help sell a concept, regardless how ill-conceived – even to those who should know better, had they applied the most basic critical thinking. For consumers, investors and designers, its story serves as a cautionary reminder to the importance of cultivating and retaining a critical mind and questioning perspective, regardless of how intrinsically seductive or well-intentioned a technology may be. From the perspective of hardware and software, what the One Laptop Per Child (OLPC) project was able to accomplish is impressive. In general, the team delivered a computer that could be produced at a remarkably low price – even if about double that which was targeted. Specifically, the display, for example, is innovative, and stands out due to its ability to work both in the bright sun (reflective) as well as in poorly lit spaces (emissive) – something that goes beyond pretty much anything else that is available on today’s (2017) slate computers or e-readers. In short, some excellent work went into this machine, something that is even more impressive, given the nature of the organization from which it emerged. The industrial design was equally impressive. Undertaken by Yves Behar’s FuseprojectUltimately, however, the machine was a means to an end, not the end itself. Rather than a device, the actual mission of the OLPC project was: … to empower the world's poorest children through education. Yet, as described by in their materials, the computer was intended to play a key role in this: With access to this type of tool [the computer], children are engaged in their own education, and learn, share, and create together. They become connected to each other, to the world and to a brighter future. Hence, making a suitable computer suitable to that purpose and the conditions where it would be used, at a price point that would enable broad distribution, was a key part of the project. The Underlying Belief System of the OLPC ProjectSince they are key to the thinking behind the OLPC project, I believe if fair to frame my discussion around the following four questions: Will giving computers to kids in the developing world improve their education? Will having a thus better-educated youth help bring a society out of poverty? Can that educational improvement be accomplished by giving the computers to the kids, with no special training for teachers? Should this be attempted on a global scale without any advance field trials or pilot studies? From the perspective of the OLPC project, the answer to every one of these questions is an unequivocal “yes”. In fact, as we shall see, any suggestion to the contrary is typically answered by condescension and/or mockery. The answers appear to be viewed as self-evident and not worth even questioning. Those who have not subscribed to this doctrine might call such a viewpoint hubris. What staggers me is how the project got so far without the basic assumptions being more broadly questioned, much less such questions being seriously addressed by the proponents. How did seemingly otherwise people commit to the project, through their labour or financial investment, given the apparently naïve and utopian approach that it took? Does the desire to do good cloud judgment that much? Are we that dazzled by a cool technology or big hairy audacious goal? Or by a charismatic personality? To explain my concern, and what this artifact represents to me, let me just touch on the four assumptions on which the project was founded. Will giving computers to kids in the developing world improve education? The literature on this question is, at best, mixed. What is clear is that one cannot make any assumption that such improvements will occur, regardless of whether one is talking about the developing world or suburban USA. For example, in January 2011, The World Bank published the following study: Can Computers Help Students Learn? From Evidence to Policy, January 2011, Number 4, The World Bank. A public-private partnership in Colombia, called Computers for Education, was created in 2002 to increase the availability of computers in public schools for use in education. Since starting, the program has installed more than 73, 000 computers in over 6, 300 public schools in more than 1, 000 municipalities. By 2008, over 2 million students and 83, 000 teachers had taken part. This document reports on a two-year study to determine the impact of the program on student performance. Students in schools that received the computers and teacher training did not do measurably better on tests than students in the control group. Nor was there a positive effect on other measures of learning. Researchers did not find any difference in test scores when they looked at specific components of math and language studies, such as algebra and geometry, and grammar and paraphrase ability in Spanish. But report also notes that results of such studies are mixed: Studies on the relationship between using computers in the classroom and improved test scores in developing countries give mixed results: A review of Israel’s Tomorrow-98 program in the mid-1990s, which put computers in schools across the country, did not find any impact on math and Hebrew language scores. But in India, a study of a computer-assisted learning program showed a significant positive impact on math scores. One thing researchers agree on, more work is needed in this field. Before moving on, a search of the literature will show that these results are consistent with those that were available in the literature at the time that the project was started. The point that I am making is not that the OLPC project could not be made to work; rather, that it was wrong to assume that it would do so without spending at least as much time designing the process to bring that about, as was expended designing the computer itself. Risk is fine, and something that can be mitigated. But diving in under the assumption that it would just work is not calculated risk, it is gambling - with other people’s lives, education and money. Will a better educated population help bring a society out of poverty? I am largely going to punt on this question. The fact is, I would be hard pressed to argue against education. But let us grant that improving education in the developing world is a good thing. The appropriate question is: is the approach of the OLPC project a reasonable or responsible way to disburse the limited resources that are available to address the educational challenges of the developing world? At the very least, I would suggest that this is a topic worthy of debate. An a priori assumption that giving computers is the right solution is akin to the, “If you build it they will come” approach seen in the movie, Field of Dreams. The problem here is that this is not a movie. There are real lives and futures that are at stake here – lives of those who cannot afford to see the movie, much less have precious resources spent on projects that are not well thought through. Can that improvement be accomplished by just giving the computers to the kids without training teachers? Remarkably, the OLPC Project’s answer is an explicit, “Yes”. In a TED talk filmed in December 2007, the founder of the OLPC initiative, Nicholas Negroponte states: “When people tell me, you know, who’s going to teach the teachers to teach the kids, I say to myself, “What planet do you come from? ” Okay, there’s not a person in this room [the TED Conference], I don’t care how techy you are, there’s not a person in this room that doesn’t give their laptop or cell phone to a kid to help them debug it. Okay, we all need help, even those of us who are very seasoned. ”Let us leave aside the naïvete of this statement stemming from the lack of distinction between ability to use applications and devices versus the ability to create and shape them. A failure of logic remains in that those unseasoned kids are part of “us”, as in “we all need help”. Where do the kids go for help? To other kids? What if they don’t know? Often they won’t. After all, the question may well have to do with a concept in calculus, rather than how to use the computer. What then? No answer is offered. Rather, those who dare raise the serious and legitimate concerns regarding teacher preparation are mockingly dismissed as coming from another planet! Well, perhaps they are. But in that case, there should at least be some debate as to who lives on which planet. Is it the people raising the question or the one dismissing the concern that lives in the real world of responsible thought and action? Can this all be accomplished without any advance field trials? Should one just immediately commit to international deployment of the program? As recently as September 2009, Negroponte took part in a panel discussion where he spoke on this matter. He states: I'd like you to imagine that I told you \"I have a technology that is going to change the quality of life. \" And then I tell you \"Really the right thing to do is to set up a pilot project to test my technology. And then the second thing to do is, once the pilot has been running for some period of time, is to go and measure very carefully the benefits of that technology. \"And then I am to tell you that what we are going to is very scientifically evaluate this technology, with control groups - giving it to some, giving it to others. And this all is very reasonable until I tell you the technology is electricity. And you say \"Wait, you don't have to do that!\"But you don't have to do that with laptops and learning either. And the fact that somebody in the room would say the impact is unclear is to me amazing - unbelievably amazing. There's not a person in this room who hasn't bought a laptop for their child, if they could afford it. And you don't know somebody who hasn't done it, if they can afford it. So there's only one question on the table and that's, “How to afford it? ” That's the only question. There is no other question - it's just the economics. And so, when One Laptop Per Child started, I didn't have the picture quite as clear as that, but we did focus on trying to get the price down. We did focus on those things. Unfortunately, Negroponte demonstrates his lack of understanding of both the history of electricity and education in this example. His historical mistake is this: yes, it was pretty obvious that electricity could bring many benefits to society. But what happened when Edison did exactly what Negroponte advocates? He almost lost his company due to his complete (but mistaken) conviction that DC, rather the AC was the correct technology to pursue. As with electricity, yes, it is rather obvious that education could bring significant benefits to the developing world. But in order to avoid making the same kind of expensive mistake that Edison did, perhaps one might want to do one’s best to make sure that the chosen technology is the AC, rather than DC, of education. A little more research, and a little less hubris might have put the investments in Edison and the OLPC to much better use. But the larger question is this: in what way is it responsible for the wealthy western world to advocate an untested and expensive (in every sense) technological solution on the poorest nations in the world? If history has taught us anything, it has taught us that just because our intentions are good, the same is not necessarily true for consequences of our actions. Later in his presentation, Negroponte states: … our problems are swimming against very naïve views of education. With this, I have to agree. It is just whose views on education are naïve, and how can such views emerge from MIT, no less, much less pass with so little critical scrutiny by the public, the press, participants, and funders? In an interview with Paul Marks, published in the New Scientist in December 2008, we see the how the techno-centric aspect of the project plays into the ostensible human centric purpose of the project. Negroponte’s retort regarding some of the initial skepticism that the project provoked was this: “When we first said we could build a laptop for $100 it was viewed as unrealistic and so 'anti-market' and so 'anti' the current laptops which at the time were around $1000 each, \" Negroponte said. \"It was viewed as pure bravado - but look what happened: the netbook market has developed in our wake. \" The project's demands for cheaper components such as keyboards, and processors nudged the industry into finding ways to cut costs, he says. \"What started off as a revolution became a culture. \"Surprise, yes, computers get smaller, faster, and cheaper over the course of time, and yes, one can even grant that the OLPC project may have accelerated that inevitable move. And, I have already stated my admiration and respect for the quality of the technology that was developed. But in the context of the overall objectives of the project, the best that one can say is, “Congratulations on meeting a milestone. ” However, by the same token, one might also legitimately question if starting with the hardware was not an instance of putting the cart before the horse. Yes, it is obviously necessary to have portable computers in the first place, before one can introduce them into the classroom, home, and donate them to children in the developing world. But it is also the case that small portable computers were already in existence and at the time that the project was initiated. While a factor of ten more expensive than the eventual target price, they were both available and adequate to support limited preliminary testing of the underlying premises of the project in an affordable manner. That is, before launching into a major - albeit well-intentioned – hardware development project, it may have been prudent to have tested the underlying premises of its motivation. Here we have to return to the raison d’être of the initiative: … to empower the world's poorest children through educationHence, the extent to which this is achieved from a given investment must be the primary metric of success, as well as the driving force of the project. Yet, that is clearly not what happened. Driven by a blind Edisonian belief in their un-tested premise, the project’s investments were overwhelmingly on the side of technology rather than pedagogy. Perhaps the nature and extent of the naïve (but well-meaning) utopian dream underlying the project is captured in the last part of the interview, above: Negroponte believes that empowering children and their parents with the educational resources offered by computers and the Internet will lead to informed decisions that improve democracy. Indeed, it has led to some gentle ribbing between himself and his brother: John Negroponte - currently deputy secretary of state in the outgoing Bush administration and the first ever director of national intelligence at the National Security Agency. \"I often joke with John that he can bring democracy his way - and I'll bring it mine, \" he says. Apparently providing inexpensive laptops to children in the developing world is not only going to raise educational standards, eradicate poverty, it is also going to bring democracy! All that, with no mention of the numerous poor non-democratic countries that have literacy levels equal to or higher than the USA (Cuba might be one reasonable example). The words naïve technological-utopianism come to mind. I began by admitting that I was conflicted in terms of this project. From the purely technological perspective, there is much to admire in the project’s accomplishments. Sadly, that was not the project’s primary objective. What appears to be missing throughout is an inability to distinguish between the technology and the purpose to which is was intended to serve. My concern in this regard is reflected in a paper by Warschauer & Ames(2010). The analysis reveals that provision of individual laptops is a utopian vision for the children in the poorest countries, whose educational and social futures could be more effectively improved if the same investments were instead made on more sustainable and proven interventions. Middle- and high-income countries may have a stronger rationale for providing individual laptops to children, but will still want to eschew OLPC’s technocentric vision. In summary, OLPC represents the latest in a long line of technologically utopian development schemes that have unsuccessfully attempted to solve complex social problems with overly simplistic solutions. There is a delicate relationship between technology and society, culture, ethics, and values. What this case study reflects is the fact that technologies are not neutral. They never are. Hence, technological initiatives must be accompanied by appropriate social, cultural and ethical considerations – especially in projects such as this where the technologies are being introduced into particularly vulnerable societies. That did not happen here, The fact that this project got the support that it did, and has gone as far as it has, given the way it was approached, is why this reminder – in the form of this device – is included in the collection. And if anyone ever wonders why I am so vocal about the need for public discourse around technology, one need look no further than the OLPC project." }, - { - "title": "Blue Orb Inc. OrbiTouch", - "company": "Blue Orb Inc", - "year": 2002, - "primaryKey": [ - "Joystick" - ], - "secondaryKey": [ - "Keyboard" - ], - "originalPrice": 695, - "degreesOfFreedom": 4, - "dimensions": { - "length": 482.6, - "width": 228.6, - "height": 74.2, - "unit": "mm" - }, - "shortDescription": "On the one hand, this device has the overall footprint of a keyboard, and it is used to enter text. And yet, it is two wide, flat, spring-loaded, self-returning joysticks, which are used to enter characters, rather than the keys that we typically employ. To add to the unconventional nature of this device, one enters text via these two joysticks by means of something called radial menus, one for each hand. And, in keeping with many keyboards, such as those with an integrated touch pad, the OrbiTouch also enables mouse like capabilities, such as pointing and selecting, also by means of one of the joysticks.", - "longDescription": "Keyboards, Joysticks and Hierarchic Radial MenusIntroductionWhen you first look at this device, you might guess that it is some kind of keyboard. It even says so on the box and on the device itself. The keyboard-like footprint might reinforce this notion, as might the alphanumeric characters in the grey ring around the circular orb on the right-hand. On the other hand, if this is a keyboard, where are the keys? Reading the labels more carefully sheds light on the paradox: there are none. This is a “keyless keyboard. ” Yes, this is a contradiction in terms. But it is just such curiosities that make devices like this potentially interesting. Hence, we shall take a reasonably deep dive to see what might be revealed. Let’s start by trying to understand what the rationale was for landing on this particular design. The orbiTouch was developed by an industrial engineering doctoral student at the University of Central Florida, Peter McAlindon. His goal was to develop a means of text entry that minimized hand and wrist motion. The intent was to reduce the incidence of repetitive stress injury. A fair bit of research was undertaken between initial concept and commercial release. This can be accessed online, and doing so is a worthwhile exercise. Let us now turn our eye to the physical device in order to get a sense of where all of this landed. The Physical DeviceThe orbiTouch is dominated by two large circular “orbs. ” To my eye, their form initially practically screamed out, “I am a rotary control - Turn me!” However, appearances can be deceptive. Rather than dials, the orbs turn out to be a pair of a joysticks of a particular type. Rather than the stick-tilting motion typical of most, these “joysticks” are operated by moving them along the horizontal plane. In this they are a close cousins of the Altra Felix and KA Design Turbo Puck, both also in the collection. However, in contrast with the Felix and Turbo Puck, whose handles are “floating” (if you let go, they remain in the position where you released your grip), the orbs are “self-centering. ” That is, when released, internal springs return the orbs to their neutral central “home” position. In this, they behave much like the Gravis joystick in the collection, for example. At a finer level of detail, the orbs are specific class of joystick: “8-way joy-switches”. The term”8-way” indicates that only movement along the 8 main axes of the compass are sensed. As to the word “switch”, think of each orb as 8 switches, any one of which can be turned on by moving the orb in one of the 8 directions. (Conversely, they are turned off when the orb is released and returns to home position). Unlike an analogue joystick, such switches do not, and cannot, report how far or fast the orb has moved in any particular direction, nor how much pressure might be applied in the process. While limited, joy-switches provide a less complex and lower cost solution that are appropriate in situations where this additional data is not needed. There are several examples of joy-switches in the collection, especially video game controllers. One of the most iconic examples is the Atari CX-40 controller, which is a 4-way joy-switch. To recap, the orbiTouch is a bi-manual device for entering text by means of two orb-shaped planer-moving 8-way self-centering joy-switches. Having swallowed that mouth-full, let us now explore how text is entered using such a transducer. Entering TextIn general, a character or function is input by moving the two orbs. Which character or function depends on the direction (if any) each of the orbs has moved. For example, if both the left and right orb move west (left), the character “a” is entered. On the other hand, if the right orb again moves west, but the left one east (right), then the character input is “e”. How or why this is the case can be explained with the help of some images. For easier reading, the figure below shows the labels around the orbs in an exploded view. Notice that for both orbs, there is a label segment for each of its 8 directions. Since the example discussed entering an “a” and an “e”, each of which involved the right orb moving west (left) let’s look at the associated label segment in even more detail. Like all of the label segments for the right orb, this one consists of six areas containing text, each with a distinct background colour: red, yellow, green, orange and blue for the letters A through E, respectively, and black for the region containing “BACKSPACE”. Now look again at previous image and notice that each of these colours matches the label associated with one of the directions of the left orb. Text is entered using a two part process. Moving the right orb to the left/west specifies that you are going to enter one of: a, b, c, d, e, or BACKSPACE. (Like most keyboards, despite the labels on the key-caps being upper case, lower-case characters are entered unless the shift key is depressed. )Moving the left orb in the direction whose label corresponds to the background colour of the desired character causes that character to be entered. Hence, with the right orb held in the left/west position, one can enter the sequence, “abcde”, followed by a Backspace, by sequentially moving the left orb west (red), north-west (yellow), north (green), north-east (orange), east (blue) and south (black). The same technique can then be used to access all the characters and commands found in the right orb’s labels. Special ModesThere is one thing to add at this point: While entering printing characters always requires the use of both orbs, some actions can be performed using the left orb only. This can be inferred by the text that accompanies some of the left orb’s labels. For example, moving the left orb north (green) in quick succession (analogous to a double-click on a mouse), indicates that SHIFT will apply to the next character entered. Likewise, doing the same thing in the south-west (grey) direction applies the Caps Lock mode, i. e. , SHIFT will be applied to all subsequent entries until the mode is cancelled. These one-handed special modes/functions are summarized in the image below. Of these, the only one that I want to discuss at the moment is the ability of the orbiTouch to switch from entering text to controlling the screen cursor. This is done by moving the left orb south (black) twice in quick succession. When this is done, the right orb controls the cursor movement – the cursor moves continuously in the direction that you move the orb. In this, any doubts that you had about me characterizing the orbs as joysticks should disappear, since this cursor control is classic joystick behaviour. One issue of note is that the label describes this as “mouse” not “joystick”, which while understandable, is incorrect. Finally, before moving on to the next topic, note that while the right orb controls the movement of the screen cursor in mouse mode, movement of the left or left/west or right/east is taken as a left and right mouse button press, respectively. Remembering that the premise here is that the hands don’t have to move from the orbiTouch in order switch between typing and pointing tasks. But that doesn’t mean that the overhead in switching between the tasks is removed. One type of overhead is just substituted for another. And, the moded nature of the orbiTouch means that the option of parallel pointing-typing actions are eliminated. Rather than criticism, I mention these points to indicate the need to be mindful of the trade-offs and consequences of different design decisions - consequences that the designer should be aware of. Going Meta: What’s Really Going On? I want to approach doing so by stepping back, and approaching the underlying method of “typing” by going “meta”. That is, I want to jump up a lever of abstraction, beyond the physical device (for the moment), and explain what is going on at the conceptual level. The rest of the text is in much rougher form …. What will be revealed, if we do so, is that text is entered by means of the parallel use of two 8-direction radial menus. So what is a radial menu? These are the neglected cousins of the linear menus that populate conventional graphical user interfaces. The difference is that one makes a selection by the direction of movement, rather than the distance (as in the case with linear menus). It turns out that people can learn these quickly if the directions correspond to the 8 main points of the compass. For example, in a program menu, moving up (North) might mean Print, down (South) could mean Save, and moving down to the right (South East), Save As. Like linear menus, these menus can also be hierarchic. So, for example, after moving South East in order to specify Save As, a stroke to the left (West) might mean that it should be saved as a PDF file, whereas it would be saved as a Plain Text file if the secondary connected stroke was to the right (East). The reason for this brief tutorial on radial menus is that they pretty much define at the conceptual level how text is entered using the orbiTouch. The eight directions that you can move the orbs defines the menu item selected. And, by having the actual output depending on the combination of the selection made by each of the two orbs, the device can perhaps be best described as entering text using a two-level hierarchic radial menu, where menu selections are made using two planar moving 8-way joy switches. That is quite a mouth-full, and it has taken all of the text above to bring us to the point where there is a reasonable chance that it makes sense. And we still haven’t gotten into the details! it uses hierarchic (2-level) radial menus, but where the hierarchy is space multiplexed, rather than time multiplexed. That is, rather than doing one menu selection after the other, you do them simultaneously, by using a different hand to articulate the selection from each of the two menus. (While the text on the description is sparse still, look at the training cards, etc. and the photos on the page. )At the level of the mental model, there is no question in my mind (actually, I shouldn’t say that, because I am supposed to be an objective researcher who needs empirical data to inform decisions, but what the hell!) that you could give someone who knew how to use this device two isotonic joysticks, such as used with a video game controller, and they would be able to enter text just as fast as with this device. Furthermore, I am sure that if one had a slate capable of sensing both touch and stylus simultaneously, I am certain that the skill would transfer equally to using a touch radial gesture in the non-dominant hand, and stylus (or touch) radial gesture with the other. At the basic level, it is a 2-level radial menu, but where each level is operated independently and quasi-simultaneously by a different one of the operator’s two hands. Level 1: Right HandThis lets the operator select one of eight regionsThe label for each region consists of 6 characters (5 printing and one “special)In selecting one of the regions, one is not selecting any one of the characters of that region; rather, they are just indicating that the character that they want is one of the six in that regionEach of the characters in a region has a different background colour: blue, orange, green, yellow, red and black. Level 2: Left HandThis lets the operator select one of eight regionsEach region is labeled by a single colourAmong the colours that label the eight regions are the same ones used as character background colours in the regions of the right-hand control: blue, orange, green, yellow, red and blackBy the left hand selecting one of these six colours, one indicates which character is to be entered from among the six characters in the region indicated by the right hand – the selected character being the one whose background colour corresponds to the colour selected by the left hand. Hence, there are two 8-way, single level radial menus used. I believe it fair to say that it is, nevertheless, a 2 level radial menu, since both need to be used in order to enter one token. In actual fact, things are more complex, since none of the above covers issues such as all of the special character, punctuation, etc. , that do not appear on the labels of the right hand. To keep things brief, this is why only 6 of the left-hand menu options are used in what is discussed above. The other two options are needed to fill in the gaps. And, even then, the device resorts to something like double-clicks to get special modes and capabilities. For example, double clicking the black (south) region of the left hand turns the right-hand dome into a pointing device, i. e. , a mouse substitute for pointing, etc. I went through the – as it turned out – interesting exercise of translating the two parallel depth-1 radial menus of the orbiTouch UI into two different depth-2, breadth-8 hierarchic radial menus. You can see them in the attached images. The one assumes that the LH “dome” as the first-level selection, and then make the second-level selection with the right-hand dome. The other does the opposite, i. e. , the right-hand dome selection is the first level. It is interesting to compare the two with each other, as well as with both the labeling on the orbiTouch and the Quickstart documentation: The RH level-1 version seems easier to get rudimentary understanding compared to the LH due to clustering of letters and numbers on outer menus. Likewise, for the special characters that are the upper case of the numbersThe physical device is fine for letting you hunt-and-peck, so to speak, for characters, but it is useless for numbers, and most special characters. The documentation provided with the Quick Start (attached is not especially useful in terms of providing heuristics for memorization. While the orbiTouch certainly uses radial menus, it decidedly does not employ marking menus. One of the key things missing is the ability to check and correct before committing to an input, and the lack of ability to backtrack to the start, and therefore abort without entering anything. One thing that I have learned from this exercise is the difference that results due to having self-returning joysticks. Gestures don’t have that attribute. It matters esp w. r. t. the last point. What I like about this story, is how looking at something seemingly very different at the right level of abstraction, teaches us/me something new about something I was supposed to be an expert in. That is, that 2-level hierarchic marking menus can be achieved by two simultaneous single-level MMs. This is why I have the collection, and why I love what I do. There is still delight, despite being a 63-year-old geezer grandfather. The orbiTouch Keyless Keyboard was first known as the Keybowl, and the company was formerly known as Keybowl Inc. , and then Blue Orb Inc." - }, { "title": "TASA Model 55 ASCII Keyboard", "company": "TASA (Touch Activated Switch Arrays)", @@ -257,5 +236,26 @@ }, "shortDescription": "The Twiddler is a one-hand chord keyboard with integrated pointing capability, which can control the cursor in a joystick-like manner. This was a favourite device of the early Cyborg wearable-computer community.", "longDescription": "……. . Note: Lyons, et al. abstract: An experienced user of the Twiddler, a one--handed chording keyboard, averages speeds of 60 words per minute with letter--by--letter typing of standard test phrases. This fast typing rate coupled with the Twiddler's 3x4 button design, similar to that of a standard mobile telephone, makes it a potential alternative to multi--tap for text entry on mobile phones. Despite this similarity, there is very little data on the Twiddler's performance and learnability. We present a longitudinal study of novice users' learning rates on the Twiddler. Ten participants typed for 20 sessions using two different methods. Each session is composed of 20 minutes of typing with multi--tap and 20 minutes of one--handed chording on the Twiddler. We found that users initially have a faster average typing rate with multi--tap; however, after four sessions the difference becomes negligible, and by the eighth session participants type faster with chording on the Twiddler. Furthermore, after 20 sessions typing rates for the Twiddler are still increasing." + }, + { + "title": "Blue Orb Inc. OrbiTouch", + "company": "Blue Orb Inc", + "year": 2002, + "primaryKey": [ + "Joystick" + ], + "secondaryKey": [ + "Keyboard" + ], + "originalPrice": 695, + "degreesOfFreedom": 4, + "dimensions": { + "length": 482.6, + "width": 228.6, + "height": 74.2, + "unit": "mm" + }, + "shortDescription": "On the one hand, this device has the overall footprint of a keyboard, and it is used to enter text. And yet, it is two wide, flat, spring-loaded, self-returning joysticks, which are used to enter characters, rather than the keys that we typically employ. To add to the unconventional nature of this device, one enters text via these two joysticks by means of something called radial menus, one for each hand. And, in keeping with many keyboards, such as those with an integrated touch pad, the OrbiTouch also enables mouse like capabilities, such as pointing and selecting, also by means of one of the joysticks.", + "longDescription": "Keyboards, Joysticks and Hierarchic Radial MenusIntroductionWhen you first look at this device, you might guess that it is some kind of keyboard. It even says so on the box and on the device itself. The keyboard-like footprint might reinforce this notion, as might the alphanumeric characters in the grey ring around the circular orb on the right-hand. On the other hand, if this is a keyboard, where are the keys? Reading the labels more carefully sheds light on the paradox: there are none. This is a “keyless keyboard. ” Yes, this is a contradiction in terms. But it is just such curiosities that make devices like this potentially interesting. Hence, we shall take a reasonably deep dive to see what might be revealed. Let’s start by trying to understand what the rationale was for landing on this particular design. The orbiTouch was developed by an industrial engineering doctoral student at the University of Central Florida, Peter McAlindon. His goal was to develop a means of text entry that minimized hand and wrist motion. The intent was to reduce the incidence of repetitive stress injury. A fair bit of research was undertaken between initial concept and commercial release. This can be accessed online, and doing so is a worthwhile exercise. Let us now turn our eye to the physical device in order to get a sense of where all of this landed. The Physical DeviceThe orbiTouch is dominated by two large circular “orbs. ” To my eye, their form initially practically screamed out, “I am a rotary control - Turn me!” However, appearances can be deceptive. Rather than dials, the orbs turn out to be a pair of a joysticks of a particular type. Rather than the stick-tilting motion typical of most, these “joysticks” are operated by moving them along the horizontal plane. In this they are a close cousins of the Altra Felix and KA Design Turbo Puck, both also in the collection. However, in contrast with the Felix and Turbo Puck, whose handles are “floating” (if you let go, they remain in the position where you released your grip), the orbs are “self-centering. ” That is, when released, internal springs return the orbs to their neutral central “home” position. In this, they behave much like the Gravis joystick in the collection, for example. At a finer level of detail, the orbs are specific class of joystick: “8-way joy-switches”. The term”8-way” indicates that only movement along the 8 main axes of the compass are sensed. As to the word “switch”, think of each orb as 8 switches, any one of which can be turned on by moving the orb in one of the 8 directions. (Conversely, they are turned off when the orb is released and returns to home position). Unlike an analogue joystick, such switches do not, and cannot, report how far or fast the orb has moved in any particular direction, nor how much pressure might be applied in the process. While limited, joy-switches provide a less complex and lower cost solution that are appropriate in situations where this additional data is not needed. There are several examples of joy-switches in the collection, especially video game controllers. One of the most iconic examples is the Atari CX-40 controller, which is a 4-way joy-switch. To recap, the orbiTouch is a bi-manual device for entering text by means of two orb-shaped planer-moving 8-way self-centering joy-switches. Having swallowed that mouth-full, let us now explore how text is entered using such a transducer. Entering TextIn general, a character or function is input by moving the two orbs. Which character or function depends on the direction (if any) each of the orbs has moved. For example, if both the left and right orb move west (left), the character “a” is entered. On the other hand, if the right orb again moves west, but the left one east (right), then the character input is “e”. How or why this is the case can be explained with the help of some images. For easier reading, the figure below shows the labels around the orbs in an exploded view. Notice that for both orbs, there is a label segment for each of its 8 directions. Since the example discussed entering an “a” and an “e”, each of which involved the right orb moving west (left) let’s look at the associated label segment in even more detail. Like all of the label segments for the right orb, this one consists of six areas containing text, each with a distinct background colour: red, yellow, green, orange and blue for the letters A through E, respectively, and black for the region containing “BACKSPACE”. Now look again at previous image and notice that each of these colours matches the label associated with one of the directions of the left orb. Text is entered using a two part process. Moving the right orb to the left/west specifies that you are going to enter one of: a, b, c, d, e, or BACKSPACE. (Like most keyboards, despite the labels on the key-caps being upper case, lower-case characters are entered unless the shift key is depressed. )Moving the left orb in the direction whose label corresponds to the background colour of the desired character causes that character to be entered. Hence, with the right orb held in the left/west position, one can enter the sequence, “abcde”, followed by a Backspace, by sequentially moving the left orb west (red), north-west (yellow), north (green), north-east (orange), east (blue) and south (black). The same technique can then be used to access all the characters and commands found in the right orb’s labels. Special ModesThere is one thing to add at this point: While entering printing characters always requires the use of both orbs, some actions can be performed using the left orb only. This can be inferred by the text that accompanies some of the left orb’s labels. For example, moving the left orb north (green) in quick succession (analogous to a double-click on a mouse), indicates that SHIFT will apply to the next character entered. Likewise, doing the same thing in the south-west (grey) direction applies the Caps Lock mode, i. e. , SHIFT will be applied to all subsequent entries until the mode is cancelled. These one-handed special modes/functions are summarized in the image below. Of these, the only one that I want to discuss at the moment is the ability of the orbiTouch to switch from entering text to controlling the screen cursor. This is done by moving the left orb south (black) twice in quick succession. When this is done, the right orb controls the cursor movement – the cursor moves continuously in the direction that you move the orb. In this, any doubts that you had about me characterizing the orbs as joysticks should disappear, since this cursor control is classic joystick behaviour. One issue of note is that the label describes this as “mouse” not “joystick”, which while understandable, is incorrect. Finally, before moving on to the next topic, note that while the right orb controls the movement of the screen cursor in mouse mode, movement of the left or left/west or right/east is taken as a left and right mouse button press, respectively. Remembering that the premise here is that the hands don’t have to move from the orbiTouch in order switch between typing and pointing tasks. But that doesn’t mean that the overhead in switching between the tasks is removed. One type of overhead is just substituted for another. And, the moded nature of the orbiTouch means that the option of parallel pointing-typing actions are eliminated. Rather than criticism, I mention these points to indicate the need to be mindful of the trade-offs and consequences of different design decisions - consequences that the designer should be aware of. Going Meta: What’s Really Going On? I want to approach doing so by stepping back, and approaching the underlying method of “typing” by going “meta”. That is, I want to jump up a lever of abstraction, beyond the physical device (for the moment), and explain what is going on at the conceptual level. The rest of the text is in much rougher form …. What will be revealed, if we do so, is that text is entered by means of the parallel use of two 8-direction radial menus. So what is a radial menu? These are the neglected cousins of the linear menus that populate conventional graphical user interfaces. The difference is that one makes a selection by the direction of movement, rather than the distance (as in the case with linear menus). It turns out that people can learn these quickly if the directions correspond to the 8 main points of the compass. For example, in a program menu, moving up (North) might mean Print, down (South) could mean Save, and moving down to the right (South East), Save As. Like linear menus, these menus can also be hierarchic. So, for example, after moving South East in order to specify Save As, a stroke to the left (West) might mean that it should be saved as a PDF file, whereas it would be saved as a Plain Text file if the secondary connected stroke was to the right (East). The reason for this brief tutorial on radial menus is that they pretty much define at the conceptual level how text is entered using the orbiTouch. The eight directions that you can move the orbs defines the menu item selected. And, by having the actual output depending on the combination of the selection made by each of the two orbs, the device can perhaps be best described as entering text using a two-level hierarchic radial menu, where menu selections are made using two planar moving 8-way joy switches. That is quite a mouth-full, and it has taken all of the text above to bring us to the point where there is a reasonable chance that it makes sense. And we still haven’t gotten into the details! it uses hierarchic (2-level) radial menus, but where the hierarchy is space multiplexed, rather than time multiplexed. That is, rather than doing one menu selection after the other, you do them simultaneously, by using a different hand to articulate the selection from each of the two menus. (While the text on the description is sparse still, look at the training cards, etc. and the photos on the page. )At the level of the mental model, there is no question in my mind (actually, I shouldn’t say that, because I am supposed to be an objective researcher who needs empirical data to inform decisions, but what the hell!) that you could give someone who knew how to use this device two isotonic joysticks, such as used with a video game controller, and they would be able to enter text just as fast as with this device. Furthermore, I am sure that if one had a slate capable of sensing both touch and stylus simultaneously, I am certain that the skill would transfer equally to using a touch radial gesture in the non-dominant hand, and stylus (or touch) radial gesture with the other. At the basic level, it is a 2-level radial menu, but where each level is operated independently and quasi-simultaneously by a different one of the operator’s two hands. Level 1: Right HandThis lets the operator select one of eight regionsThe label for each region consists of 6 characters (5 printing and one “special)In selecting one of the regions, one is not selecting any one of the characters of that region; rather, they are just indicating that the character that they want is one of the six in that regionEach of the characters in a region has a different background colour: blue, orange, green, yellow, red and black. Level 2: Left HandThis lets the operator select one of eight regionsEach region is labeled by a single colourAmong the colours that label the eight regions are the same ones used as character background colours in the regions of the right-hand control: blue, orange, green, yellow, red and blackBy the left hand selecting one of these six colours, one indicates which character is to be entered from among the six characters in the region indicated by the right hand – the selected character being the one whose background colour corresponds to the colour selected by the left hand. Hence, there are two 8-way, single level radial menus used. I believe it fair to say that it is, nevertheless, a 2 level radial menu, since both need to be used in order to enter one token. In actual fact, things are more complex, since none of the above covers issues such as all of the special character, punctuation, etc. , that do not appear on the labels of the right hand. To keep things brief, this is why only 6 of the left-hand menu options are used in what is discussed above. The other two options are needed to fill in the gaps. And, even then, the device resorts to something like double-clicks to get special modes and capabilities. For example, double clicking the black (south) region of the left hand turns the right-hand dome into a pointing device, i. e. , a mouse substitute for pointing, etc. I went through the – as it turned out – interesting exercise of translating the two parallel depth-1 radial menus of the orbiTouch UI into two different depth-2, breadth-8 hierarchic radial menus. You can see them in the attached images. The one assumes that the LH “dome” as the first-level selection, and then make the second-level selection with the right-hand dome. The other does the opposite, i. e. , the right-hand dome selection is the first level. It is interesting to compare the two with each other, as well as with both the labeling on the orbiTouch and the Quickstart documentation: The RH level-1 version seems easier to get rudimentary understanding compared to the LH due to clustering of letters and numbers on outer menus. Likewise, for the special characters that are the upper case of the numbersThe physical device is fine for letting you hunt-and-peck, so to speak, for characters, but it is useless for numbers, and most special characters. The documentation provided with the Quick Start (attached is not especially useful in terms of providing heuristics for memorization. While the orbiTouch certainly uses radial menus, it decidedly does not employ marking menus. One of the key things missing is the ability to check and correct before committing to an input, and the lack of ability to backtrack to the start, and therefore abort without entering anything. One thing that I have learned from this exercise is the difference that results due to having self-returning joysticks. Gestures don’t have that attribute. It matters esp w. r. t. the last point. What I like about this story, is how looking at something seemingly very different at the right level of abstraction, teaches us/me something new about something I was supposed to be an expert in. That is, that 2-level hierarchic marking menus can be achieved by two simultaneous single-level MMs. This is why I have the collection, and why I love what I do. There is still delight, despite being a 63-year-old geezer grandfather. The orbiTouch Keyless Keyboard was first known as the Keybowl, and the company was formerly known as Keybowl Inc. , and then Blue Orb Inc." } ] \ No newline at end of file diff --git a/src/scraping/buxton/json/incomplete.json b/src/scraping/buxton/json/incomplete.json index a9ed39e21..595412e56 100644 --- a/src/scraping/buxton/json/incomplete.json +++ b/src/scraping/buxton/json/incomplete.json @@ -1,8 +1,4 @@ [ - { - "filename": "3Dconnexion_SpaceNavigator.docx", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, { "filename": "3DMag.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", @@ -22,6 +18,10 @@ "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, + { + "filename": "3Dconnexion_SpaceNavigator.docx", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, { "filename": "3MErgo.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", @@ -29,25 +29,24 @@ "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, { - "filename": "Abaton.docx", + "filename": "ADB2.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, { - "filename": "Active.docx", + "filename": "AWrock.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, { - "filename": "ADB2.docx", + "filename": "Abaton.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, { - "filename": "adecm.docx", + "filename": "Active.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", @@ -71,10 +70,6 @@ "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "Apple_iPhone.docx", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, { "filename": "Apple_Mac_Portable-Katy’s MacBook Air-2.docx", "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", @@ -95,10 +90,8 @@ "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, { - "filename": "AWrock.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + "filename": "Apple_iPhone.docx", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, { "filename": "BAT.docx", @@ -138,11 +131,6 @@ "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "Citizen_LCl_914.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, { "filename": "Citizen_LC_909.docx", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", @@ -153,6 +141,11 @@ "filename": "Citizen_LC_913.docx", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." }, + { + "filename": "Citizen_LCl_914.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, { "filename": "CoolPix.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", @@ -173,15 +166,6 @@ "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "eMate.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, { "filename": "Emotiv.docx", "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", @@ -236,6 +220,24 @@ "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." }, + { + "filename": "GRiD1550-Katy’s MacBook Air-2.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "GRiD1550-Katy’s MacBook Air.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "GRiD1550.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, { "filename": "Gavilan_SC.docx", "company": "ERR__COMPANY__: outer match wasn't captured.", @@ -255,30 +257,9 @@ "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, { - "filename": "gravis.docx", - "year": "ERR__YEAR__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "GRiD1550-Katy’s MacBook Air-2.docx", + "filename": "HTC_Touch.docx", "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "GRiD1550-Katy’s MacBook Air.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "GRiD1550.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." }, { "filename": "Helios-Klimax.docx", @@ -293,11 +274,6 @@ "filename": "Honeywell_T86.docx", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "HTC_Touch.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." - }, { "filename": "IBMTrack.docx", "year": "ERR__YEAR__: outer match was captured.", @@ -339,24 +315,6 @@ "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "iGesture.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "iGrip.docx", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "iLiad.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, { "filename": "Joyboard.docx", "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", @@ -388,6 +346,19 @@ "filename": "M1.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured." }, + { + "filename": "MS-1_Stereoscope.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "MWB_Braille_Writer.docx", + "company": "ERR__COMPANY__: outer match wasn't captured.", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, { "filename": "MaltronLH.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", @@ -436,19 +407,6 @@ "filename": "MousePen.docx", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "MS-1_Stereoscope.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "MWB_Braille_Writer.docx", - "company": "ERR__COMPANY__: outer match wasn't captured.", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, { "filename": "NB75D.docx", "year": "ERR__YEAR__: outer match was captured.", @@ -487,16 +445,16 @@ "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, + { + "filename": "PARCkbd.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." + }, { "filename": "PadMouse.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "PARCkbd.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." - }, { "filename": "Philco_Mystery_Control.docx", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", @@ -507,12 +465,6 @@ "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "ProAgio (1).docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, { "filename": "ProAgio.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", @@ -530,13 +482,6 @@ "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "round.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, { "filename": "SafeType_Kbd.docx", "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." @@ -549,6 +494,11 @@ "filename": "SurfMouse.docx", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." }, + { + "filename": "TPARCtab.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, { "filename": "The_Tap.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", @@ -562,8 +512,52 @@ "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, { - "filename": "TPARCtab.docx", + "filename": "adecm.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "eMate.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "gravis.docx", + "year": "ERR__YEAR__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "iGesture.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "iGrip.docx", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "iLiad.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "round.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." } ] \ No newline at end of file diff --git a/src/scraping/buxton/node_scraper.ts b/src/scraping/buxton/node_scraper.ts index b27495045..117a0af84 100644 --- a/src/scraping/buxton/node_scraper.ts +++ b/src/scraping/buxton/node_scraper.ts @@ -295,7 +295,7 @@ namespace Doc { export function proxifyGuids(ids: string[]) { return ids.map(id => ({ fieldId: id, - __type: "prefetch_proxy" + __type: "proxy" })); } diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index d7b085a30..a4b4b14a9 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -51,7 +51,7 @@ export default class UtilManager extends ApiManager { console.log("Initial scraper failed for the following reason:"); console.log(red(reason.Error)); console.log("Falling back to python3..."); - command_line('python3 scraper.py', cwd).then(onResolved, onRejected); + return command_line('python3 scraper.py', cwd).then(onResolved, onRejected); }; return command_line('python scraper.py', cwd).then(onResolved, tryPython3); -- cgit v1.2.3-70-g09d2 From 36933b7b647a54aa7bda0600612d34b402d42919 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 8 Feb 2020 12:05:08 -0500 Subject: switching scraper --- package-lock.json | 49 ++++--- src/client/documents/Documents.ts | 7 + src/scraping/buxton/json/buxton.json | 145 --------------------- src/scraping/buxton/json/incomplete.json | 95 -------------- src/scraping/buxton/node_scraper.ts | 90 +------------ src/server/ApiManagers/UtilManager.ts | 7 + .../authentication/models/current_user_utils.ts | 1 + 7 files changed, 53 insertions(+), 341 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/package-lock.json b/package-lock.json index a33d060d2..379cd3337 100644 --- a/package-lock.json +++ b/package-lock.json @@ -647,7 +647,7 @@ }, "@types/passport": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.2.tgz", "integrity": "sha512-Pf39AYKf8q+YoONym3150cEwfUD66dtwHJWvbeOzKxnA0GZZ/vAXhNWv9vMhKyRQBQZiQyWQnhYBEBlKW6G8wg==", "requires": { "@types/express": "*" @@ -5391,7 +5391,8 @@ }, "ansi-regex": { "version": "2.1.1", - "bundled": true + "bundled": true, + "optional": true }, "aproba": { "version": "1.2.0", @@ -5409,11 +5410,13 @@ }, "balanced-match": { "version": "1.0.0", - "bundled": true + "bundled": true, + "optional": true }, "brace-expansion": { "version": "1.1.11", "bundled": true, + "optional": true, "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -5426,15 +5429,18 @@ }, "code-point-at": { "version": "1.1.0", - "bundled": true + "bundled": true, + "optional": true }, "concat-map": { "version": "0.0.1", - "bundled": true + "bundled": true, + "optional": true }, "console-control-strings": { "version": "1.1.0", - "bundled": true + "bundled": true, + "optional": true }, "core-util-is": { "version": "1.0.2", @@ -5537,7 +5543,8 @@ }, "inherits": { "version": "2.0.4", - "bundled": true + "bundled": true, + "optional": true }, "ini": { "version": "1.3.5", @@ -5547,6 +5554,7 @@ "is-fullwidth-code-point": { "version": "1.0.0", "bundled": true, + "optional": true, "requires": { "number-is-nan": "^1.0.0" } @@ -5559,17 +5567,20 @@ "minimatch": { "version": "3.0.4", "bundled": true, + "optional": true, "requires": { "brace-expansion": "^1.1.7" } }, "minimist": { "version": "0.0.8", - "bundled": true + "bundled": true, + "optional": true }, "minipass": { "version": "2.9.0", "bundled": true, + "optional": true, "requires": { "safe-buffer": "^5.1.2", "yallist": "^3.0.0" @@ -5586,6 +5597,7 @@ "mkdirp": { "version": "0.5.1", "bundled": true, + "optional": true, "requires": { "minimist": "0.0.8" } @@ -5666,7 +5678,8 @@ }, "number-is-nan": { "version": "1.0.1", - "bundled": true + "bundled": true, + "optional": true }, "object-assign": { "version": "4.1.1", @@ -5676,6 +5689,7 @@ "once": { "version": "1.4.0", "bundled": true, + "optional": true, "requires": { "wrappy": "1" } @@ -5751,7 +5765,8 @@ }, "safe-buffer": { "version": "5.1.2", - "bundled": true + "bundled": true, + "optional": true }, "safer-buffer": { "version": "2.1.2", @@ -5781,6 +5796,7 @@ "string-width": { "version": "1.0.2", "bundled": true, + "optional": true, "requires": { "code-point-at": "^1.0.0", "is-fullwidth-code-point": "^1.0.0", @@ -5798,6 +5814,7 @@ "strip-ansi": { "version": "3.0.1", "bundled": true, + "optional": true, "requires": { "ansi-regex": "^2.0.0" } @@ -5836,11 +5853,13 @@ }, "wrappy": { "version": "1.0.2", - "bundled": true + "bundled": true, + "optional": true }, "yallist": { "version": "3.1.1", - "bundled": true + "bundled": true, + "optional": true } } }, @@ -13785,7 +13804,7 @@ }, "readable-stream": { "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", "requires": { "core-util-is": "~1.0.0", @@ -15703,7 +15722,7 @@ }, "strip-ansi": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "requires": { "ansi-regex": "^2.0.0" @@ -17930,7 +17949,7 @@ }, "wrap-ansi": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "resolved": "http://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", "requires": { "string-width": "^1.0.1", diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 64dc0d8b7..d647b34e6 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -53,6 +53,7 @@ import { InkingStroke } from "../views/InkingStroke"; import { InkField } from "../../new_fields/InkField"; import { InkingControl } from "../views/InkingControl"; import { RichTextField } from "../../new_fields/RichTextField"; +import { Networking } from "../Network"; const requestImageSize = require('../util/request-image-size'); const path = require('path'); @@ -340,6 +341,12 @@ export namespace Docs { */ export namespace Create { + export async function Buxton() { + console.log(await Networking.FetchFromServer("/newBuxton")); + } + + Scripting.addGlobal(Buxton); + const delegateKeys = ["x", "y", "layoutKey", "_width", "_height", "_panX", "_panY", "_viewType", "_nativeWidth", "_nativeHeight", "dropAction", "_annotationOn", "_chromeStatus", "_forceActive", "_autoHeight", "_fitWidth", "_LODdisable", "_itemIndex", "_showSidebar", "showTitle"]; diff --git a/src/scraping/buxton/json/buxton.json b/src/scraping/buxton/json/buxton.json index 166f4bd49..8371f2cf2 100644 --- a/src/scraping/buxton/json/buxton.json +++ b/src/scraping/buxton/json/buxton.json @@ -43,54 +43,6 @@ "shortDescription": "The Mini-Touch Keyboard is a surprisingly rare device: a laptop-style, small-footprint keyboard with a centrally mounted touch-pad. .", "longDescription": "First released in 2003 with a PS/2 connector (ACK-540PW & ACK-540PB). USB version released in 2006 in either black (ACK-540UB) or white (ACK-540UW). Marketed under different brands, including SolidTek: http: //www. tigerdirect. com/applications/searchtools/item-details. asp? EdpNo=1472243https: //acecaddigital. com/index. php/products/keyboards/mini-keyboards/kb-540 Deltaco: https: //www. digitalimpuls. no/logitech/116652/deltaco-minitastatur-med-touchpad-usb" }, - { - "title": "Braun AG T3 Transistor Radio", - "company": "Braun AG", - "year": 1958, - "primaryKey": [ - "Radio" - ], - "secondaryKey": [ - "Handheld", - "Object", - "Reference" - ], - "originalPrice": 28.57, - "degreesOfFreedom": 2, - "dimensions": { - "length": 152, - "width": 41, - "height": 83, - "unit": "mm" - }, - "shortDescription": "The 1958 Braun T3 transistor radio, designed by Dieter Rams Dieter Rams in conjunction with the Ulm Hochschüle fur Gestaltung (School of Design). An excellent example of the international style of design of the mid-20th century, the T3 radio was the inspiration for the design language of the Apple iPod Classic.", - "longDescription": "The 1958 Braun T3 transistor radio is a classic of the international design style prevalent in the mid-20th century. By its sparse clean lines, it shares characteristics of the style seen in another familiar example, the font Helvetic, which was designed the previous year. The T3 was designed by Dieter Rams, recruited by Braun in 1955, in collaboration with the Ulm Hochschüle fur Gestaltun. . Its design language had a strong influence on that of the original Apple iPod Classic. The connection is made more obvious if one views the radio rotated 90° clockwise, as in one of the accompanying photographs. Here one can easily see the the similarity of proportions, uniformity of colour, angle of corners, location of display (audio versus visual), and the use of a flush rotary wheel controller." - }, - { - "title": "Casio CZ-101 Digital Synthesizer", - "company": "Casio", - "year": 1984, - "primaryKey": [ - "Synthesizer" - ], - "secondaryKey": [ - "Chord", - "Keyboard", - "Object", - "Reference", - "Wheel" - ], - "originalPrice": 499, - "degreesOfFreedom": 1, - "dimensions": { - "length": 20, - "width": 65.7, - "height": 58, - "unit": "mm" - }, - "shortDescription": "One of the first programable polyphonic (8 simultaneous voices) digital synthesizers for less than $500. 00. Used a form of digital synthesis known as Phase Distortion to obtain a rich variety of dynamic timbres. Could be used with batteries or plugged in to power. This one was given to me at the product launch.", - "longDescription": "One of the first programable polyphonic (8 simultaneous voices) digital synthesizers for less than $500. 00. Used a form of digital synthesis known as Phase Distortion to obtain a rich variety of dynamic timbres. Could be used with batteries or plugged in to power. This one was given to me at the product launch. The inclusion of this synthesizer in the collection is as a small reminder of the diversity of keyboard types, and especially, as an example to shed light on chord keyboards. In entering text, for example, chord keyboards are those where more than one key must be simultaneously pressed to enter a single character. Technically, this includes any keyboard with a SHIFT key. Interestingly, piano-type like keyboards like that on the Casio-CZ-101 probably don’t conform to this definition of chording, despite its ability to play musical chords. On the other hand, flutes and trumpets definitely do fall within the definition. Why? With piano-like keyboards, each unique note has a single unique key dedicated to it. When one plays a chord, i. e. , simultaneously presses multiple keys, the result is a chord of notes – the note associated with each depressed key sounds. On the other hand, with trumpet valves or flute keys, only one note is produced at a time. It is the combination of keys pressed (coupled with breath) which determines the pitch of that single note. This is far closer to entering text with a chord keyboard, where each chord enters a single unique character." - }, { "title": "Contour Design UniTrap ", "company": "Contour Design", @@ -136,32 +88,6 @@ "shortDescription": "This mouse is one of the first commercially available mice to be sold publicly. It is known as the Swiss mouse, and yes, the roller mechanism was designed by a Swiss watchmaker. Coincidentally, the company that made it, Depraz, is based in Apples, Switzerland. Their success in selling this mouse is what caused Logitech to switch from a software development shop to one of the world’s leading suppliers of mice and other input devices.", "longDescription": "DePraz began manufacturing in 1980, but following design built in 1979. Logitech started selling it in 1982. It was one of the first mass produced mice, one of the first available ball mice, as well as to have an optical shaft encoder – thereby improving linearity. An interesting fact, given its Swiss heritage, is that its designer, André Guignard, was trained as a Swiss watch maker. Unlike most modern mice, the DePraz, or “Swiss” mouse had a quasi-hemispherical shape. Hence, it was held in a so-called “power-grip”, much as one would grip a horizontally held ball – the thumb and small finger applying pressure on each side, with added support from the weight/friction of the palm on the back of the mouse. In this posture, the three middle fingers naturally positioning themselves over the three buttons mounted at the lower edge of the front. Largely freed of grip pressure, by grace of thumb and little finger, the middle fingers had essentially freedom of motion to independently operate the buttons. Each having a dedicated finger, the buttons could be easily pushed independently or in any combination. Like the three valves on a trumpet, this ability to “chord” extended the three physical buttons to have the power of seven. The down-side of this “turtle shell” form factor is that it placed the hand in a posture in which mouse movement relied more of the larger muscle groups of the arm to wrist, rather than wrist to fingers – the latter being the approach taken in most subsequent mice. The original Swiss Mouse was developed at École Polytechnique Fédérale de Lausanne by a project led by Jean-Daniel Nicoud, who was also responsible for the development of its optical shaft encoder. To augment their revenue stream, Logitech, then a software and hardware consulting company for the publishing industry, acquired marketing rights for North America. Mouse revenue quickly overshadowed that from software. In 1983, Logitech acquired DePraz, named the Swiss Mouse the “P4”, and grew to become one of the largest input device manufacturer in the world. One curious coincidence is that they were founded in the town of Apples, Switzerland." }, - { - "title": "FingerWorks TouchStream LP", - "company": "FingerWorks", - "year": 2002, - "primaryKey": [ - "Keyboard" - ], - "secondaryKey": [ - "Foldable", - "Gesture", - "Keyboard", - "Multi-touch", - "Reskin", - "Touchpad" - ], - "originalPrice": 339, - "degreesOfFreedom": 2, - "dimensions": { - "length": 180, - "width": 140, - "height": 9, - "unit": "mm" - }, - "shortDescription": "The TouchStream is a keyboard based on a pair of multi-touch pads. These can sense key taps and finger gestures. The “keys” are graphic. They are flush with the pad and have no mechanical movement. There are however, small raised points to help position the hands on the keyboard eyes-free typing, but these still allow the fingers slide easily on the surface when gesturing, such as when emulating a mouse. . The keyboard is independent of the base. It can be folded in half for compact portability. It can also be placed conveniently over a laptop’s keyboard as a replacement which then also enables the gesture enhancements to be used on the road. Although not obvious to the eye, this is the core technology which, after being acquired by Apple, evolved into the iPhone’s multi-touch capability.", - "longDescription": "Named FingerBoard during development, this product was relabeled TouchStream in October 2001 as the release date approached. when finally shipped, was renamed TouchStreamThe very rare original stand for this device was a gift from Sean Gerety, Atlanta, GA." - }, { "title": "One Laptop Per Child (OLPC) XO-1", "company": "One Laptop Per Child (OLPC)", @@ -186,76 +112,5 @@ }, "shortDescription": "The OLPC XO-1 is very innovative device that nevertheless raises serious issues about technology and social responsibility. It is included in the collection primarily as a warning against technological hubris, and the fact that no technologies are neutral from a social-cultural perspective.", "longDescription": "IntroductionI have this computer in my collection as a reminder of the delicate relationship between object and purpose, and how no matter how well one does on the former, it will likely have no impact on making a wanting concept achieve the stated (and even valid) purpose any better. I include it in the collection as a cautionary tale of how the object may help sell a concept, regardless how ill-conceived – even to those who should know better, had they applied the most basic critical thinking. For consumers, investors and designers, its story serves as a cautionary reminder to the importance of cultivating and retaining a critical mind and questioning perspective, regardless of how intrinsically seductive or well-intentioned a technology may be. From the perspective of hardware and software, what the One Laptop Per Child (OLPC) project was able to accomplish is impressive. In general, the team delivered a computer that could be produced at a remarkably low price – even if about double that which was targeted. Specifically, the display, for example, is innovative, and stands out due to its ability to work both in the bright sun (reflective) as well as in poorly lit spaces (emissive) – something that goes beyond pretty much anything else that is available on today’s (2017) slate computers or e-readers. In short, some excellent work went into this machine, something that is even more impressive, given the nature of the organization from which it emerged. The industrial design was equally impressive. Undertaken by Yves Behar’s FuseprojectUltimately, however, the machine was a means to an end, not the end itself. Rather than a device, the actual mission of the OLPC project was: … to empower the world's poorest children through education. Yet, as described by in their materials, the computer was intended to play a key role in this: With access to this type of tool [the computer], children are engaged in their own education, and learn, share, and create together. They become connected to each other, to the world and to a brighter future. Hence, making a suitable computer suitable to that purpose and the conditions where it would be used, at a price point that would enable broad distribution, was a key part of the project. The Underlying Belief System of the OLPC ProjectSince they are key to the thinking behind the OLPC project, I believe if fair to frame my discussion around the following four questions: Will giving computers to kids in the developing world improve their education? Will having a thus better-educated youth help bring a society out of poverty? Can that educational improvement be accomplished by giving the computers to the kids, with no special training for teachers? Should this be attempted on a global scale without any advance field trials or pilot studies? From the perspective of the OLPC project, the answer to every one of these questions is an unequivocal “yes”. In fact, as we shall see, any suggestion to the contrary is typically answered by condescension and/or mockery. The answers appear to be viewed as self-evident and not worth even questioning. Those who have not subscribed to this doctrine might call such a viewpoint hubris. What staggers me is how the project got so far without the basic assumptions being more broadly questioned, much less such questions being seriously addressed by the proponents. How did seemingly otherwise people commit to the project, through their labour or financial investment, given the apparently naïve and utopian approach that it took? Does the desire to do good cloud judgment that much? Are we that dazzled by a cool technology or big hairy audacious goal? Or by a charismatic personality? To explain my concern, and what this artifact represents to me, let me just touch on the four assumptions on which the project was founded. Will giving computers to kids in the developing world improve education? The literature on this question is, at best, mixed. What is clear is that one cannot make any assumption that such improvements will occur, regardless of whether one is talking about the developing world or suburban USA. For example, in January 2011, The World Bank published the following study: Can Computers Help Students Learn? From Evidence to Policy, January 2011, Number 4, The World Bank. A public-private partnership in Colombia, called Computers for Education, was created in 2002 to increase the availability of computers in public schools for use in education. Since starting, the program has installed more than 73, 000 computers in over 6, 300 public schools in more than 1, 000 municipalities. By 2008, over 2 million students and 83, 000 teachers had taken part. This document reports on a two-year study to determine the impact of the program on student performance. Students in schools that received the computers and teacher training did not do measurably better on tests than students in the control group. Nor was there a positive effect on other measures of learning. Researchers did not find any difference in test scores when they looked at specific components of math and language studies, such as algebra and geometry, and grammar and paraphrase ability in Spanish. But report also notes that results of such studies are mixed: Studies on the relationship between using computers in the classroom and improved test scores in developing countries give mixed results: A review of Israel’s Tomorrow-98 program in the mid-1990s, which put computers in schools across the country, did not find any impact on math and Hebrew language scores. But in India, a study of a computer-assisted learning program showed a significant positive impact on math scores. One thing researchers agree on, more work is needed in this field. Before moving on, a search of the literature will show that these results are consistent with those that were available in the literature at the time that the project was started. The point that I am making is not that the OLPC project could not be made to work; rather, that it was wrong to assume that it would do so without spending at least as much time designing the process to bring that about, as was expended designing the computer itself. Risk is fine, and something that can be mitigated. But diving in under the assumption that it would just work is not calculated risk, it is gambling - with other people’s lives, education and money. Will a better educated population help bring a society out of poverty? I am largely going to punt on this question. The fact is, I would be hard pressed to argue against education. But let us grant that improving education in the developing world is a good thing. The appropriate question is: is the approach of the OLPC project a reasonable or responsible way to disburse the limited resources that are available to address the educational challenges of the developing world? At the very least, I would suggest that this is a topic worthy of debate. An a priori assumption that giving computers is the right solution is akin to the, “If you build it they will come” approach seen in the movie, Field of Dreams. The problem here is that this is not a movie. There are real lives and futures that are at stake here – lives of those who cannot afford to see the movie, much less have precious resources spent on projects that are not well thought through. Can that improvement be accomplished by just giving the computers to the kids without training teachers? Remarkably, the OLPC Project’s answer is an explicit, “Yes”. In a TED talk filmed in December 2007, the founder of the OLPC initiative, Nicholas Negroponte states: “When people tell me, you know, who’s going to teach the teachers to teach the kids, I say to myself, “What planet do you come from? ” Okay, there’s not a person in this room [the TED Conference], I don’t care how techy you are, there’s not a person in this room that doesn’t give their laptop or cell phone to a kid to help them debug it. Okay, we all need help, even those of us who are very seasoned. ”Let us leave aside the naïvete of this statement stemming from the lack of distinction between ability to use applications and devices versus the ability to create and shape them. A failure of logic remains in that those unseasoned kids are part of “us”, as in “we all need help”. Where do the kids go for help? To other kids? What if they don’t know? Often they won’t. After all, the question may well have to do with a concept in calculus, rather than how to use the computer. What then? No answer is offered. Rather, those who dare raise the serious and legitimate concerns regarding teacher preparation are mockingly dismissed as coming from another planet! Well, perhaps they are. But in that case, there should at least be some debate as to who lives on which planet. Is it the people raising the question or the one dismissing the concern that lives in the real world of responsible thought and action? Can this all be accomplished without any advance field trials? Should one just immediately commit to international deployment of the program? As recently as September 2009, Negroponte took part in a panel discussion where he spoke on this matter. He states: I'd like you to imagine that I told you \"I have a technology that is going to change the quality of life. \" And then I tell you \"Really the right thing to do is to set up a pilot project to test my technology. And then the second thing to do is, once the pilot has been running for some period of time, is to go and measure very carefully the benefits of that technology. \"And then I am to tell you that what we are going to is very scientifically evaluate this technology, with control groups - giving it to some, giving it to others. And this all is very reasonable until I tell you the technology is electricity. And you say \"Wait, you don't have to do that!\"But you don't have to do that with laptops and learning either. And the fact that somebody in the room would say the impact is unclear is to me amazing - unbelievably amazing. There's not a person in this room who hasn't bought a laptop for their child, if they could afford it. And you don't know somebody who hasn't done it, if they can afford it. So there's only one question on the table and that's, “How to afford it? ” That's the only question. There is no other question - it's just the economics. And so, when One Laptop Per Child started, I didn't have the picture quite as clear as that, but we did focus on trying to get the price down. We did focus on those things. Unfortunately, Negroponte demonstrates his lack of understanding of both the history of electricity and education in this example. His historical mistake is this: yes, it was pretty obvious that electricity could bring many benefits to society. But what happened when Edison did exactly what Negroponte advocates? He almost lost his company due to his complete (but mistaken) conviction that DC, rather the AC was the correct technology to pursue. As with electricity, yes, it is rather obvious that education could bring significant benefits to the developing world. But in order to avoid making the same kind of expensive mistake that Edison did, perhaps one might want to do one’s best to make sure that the chosen technology is the AC, rather than DC, of education. A little more research, and a little less hubris might have put the investments in Edison and the OLPC to much better use. But the larger question is this: in what way is it responsible for the wealthy western world to advocate an untested and expensive (in every sense) technological solution on the poorest nations in the world? If history has taught us anything, it has taught us that just because our intentions are good, the same is not necessarily true for consequences of our actions. Later in his presentation, Negroponte states: … our problems are swimming against very naïve views of education. With this, I have to agree. It is just whose views on education are naïve, and how can such views emerge from MIT, no less, much less pass with so little critical scrutiny by the public, the press, participants, and funders? In an interview with Paul Marks, published in the New Scientist in December 2008, we see the how the techno-centric aspect of the project plays into the ostensible human centric purpose of the project. Negroponte’s retort regarding some of the initial skepticism that the project provoked was this: “When we first said we could build a laptop for $100 it was viewed as unrealistic and so 'anti-market' and so 'anti' the current laptops which at the time were around $1000 each, \" Negroponte said. \"It was viewed as pure bravado - but look what happened: the netbook market has developed in our wake. \" The project's demands for cheaper components such as keyboards, and processors nudged the industry into finding ways to cut costs, he says. \"What started off as a revolution became a culture. \"Surprise, yes, computers get smaller, faster, and cheaper over the course of time, and yes, one can even grant that the OLPC project may have accelerated that inevitable move. And, I have already stated my admiration and respect for the quality of the technology that was developed. But in the context of the overall objectives of the project, the best that one can say is, “Congratulations on meeting a milestone. ” However, by the same token, one might also legitimately question if starting with the hardware was not an instance of putting the cart before the horse. Yes, it is obviously necessary to have portable computers in the first place, before one can introduce them into the classroom, home, and donate them to children in the developing world. But it is also the case that small portable computers were already in existence and at the time that the project was initiated. While a factor of ten more expensive than the eventual target price, they were both available and adequate to support limited preliminary testing of the underlying premises of the project in an affordable manner. That is, before launching into a major - albeit well-intentioned – hardware development project, it may have been prudent to have tested the underlying premises of its motivation. Here we have to return to the raison d’être of the initiative: … to empower the world's poorest children through educationHence, the extent to which this is achieved from a given investment must be the primary metric of success, as well as the driving force of the project. Yet, that is clearly not what happened. Driven by a blind Edisonian belief in their un-tested premise, the project’s investments were overwhelmingly on the side of technology rather than pedagogy. Perhaps the nature and extent of the naïve (but well-meaning) utopian dream underlying the project is captured in the last part of the interview, above: Negroponte believes that empowering children and their parents with the educational resources offered by computers and the Internet will lead to informed decisions that improve democracy. Indeed, it has led to some gentle ribbing between himself and his brother: John Negroponte - currently deputy secretary of state in the outgoing Bush administration and the first ever director of national intelligence at the National Security Agency. \"I often joke with John that he can bring democracy his way - and I'll bring it mine, \" he says. Apparently providing inexpensive laptops to children in the developing world is not only going to raise educational standards, eradicate poverty, it is also going to bring democracy! All that, with no mention of the numerous poor non-democratic countries that have literacy levels equal to or higher than the USA (Cuba might be one reasonable example). The words naïve technological-utopianism come to mind. I began by admitting that I was conflicted in terms of this project. From the purely technological perspective, there is much to admire in the project’s accomplishments. Sadly, that was not the project’s primary objective. What appears to be missing throughout is an inability to distinguish between the technology and the purpose to which is was intended to serve. My concern in this regard is reflected in a paper by Warschauer & Ames(2010). The analysis reveals that provision of individual laptops is a utopian vision for the children in the poorest countries, whose educational and social futures could be more effectively improved if the same investments were instead made on more sustainable and proven interventions. Middle- and high-income countries may have a stronger rationale for providing individual laptops to children, but will still want to eschew OLPC’s technocentric vision. In summary, OLPC represents the latest in a long line of technologically utopian development schemes that have unsuccessfully attempted to solve complex social problems with overly simplistic solutions. There is a delicate relationship between technology and society, culture, ethics, and values. What this case study reflects is the fact that technologies are not neutral. They never are. Hence, technological initiatives must be accompanied by appropriate social, cultural and ethical considerations – especially in projects such as this where the technologies are being introduced into particularly vulnerable societies. That did not happen here, The fact that this project got the support that it did, and has gone as far as it has, given the way it was approached, is why this reminder – in the form of this device – is included in the collection. And if anyone ever wonders why I am so vocal about the need for public discourse around technology, one need look no further than the OLPC project." - }, - { - "title": "TASA Model 55 ASCII Keyboard", - "company": "TASA (Touch Activated Switch Arrays)", - "year": 1979, - "primaryKey": [ - "Keyboard" - ], - "secondaryKey": [ - "Pad", - "Touch" - ], - "originalPrice": 80, - "degreesOfFreedom": 0, - "dimensions": { - "length": 382.27, - "width": 158.75, - "height": 8.255, - "unit": "mm" - }, - "shortDescription": "This touch-sensitive keyboard is especially suited for super clean environments, such as hospitals, and those which are just the opposite. The reason is that, being completely flat, there are no crack or gaps where dirt or bacteria can accumulate. This same property enables it to be easily cleaned. However, the reason that I got this keyboard because it was silent – there are no mechanical key-clicks. Hence, for example, it enabled me to soundlessly enter data to my digital musical instrument during a concert or while recording.", - "longDescription": "This is a solid-state touch-sensitive keyboard with no moving parts. Because its surface is flat, the only way one knows that it is a QWERTY keyboard is by the graphical representation on its surface. One types by placing one’s fingers on pictures of keys, rather than physical/mechanical keycaps. Because of the lack of the tactile feedback associated with conventional keyboards, as expected, typing speed and/or accuracy will be compromised with this keyboard. And yet, this keyboard brings real value in certain situations, and in so doing, it provides a good example of the rule: Everything is best for something and worst for something else. Because the is especially suited for super clean environments, such as hospitals, and those which are just the opposite. The reason is that, being completely flat, there are no crack or gaps where dirt or bacteria can accumulate. This same property enables it to be easily cleaned. However, the reason that I got this keyboard because it was silent – there are no mechanical key-clicks. Hence, for example, it enabled me to soundlessly enter data to my digital musical instrument during a concert or while recording. This is one of a number of capacitive touch-sensing input devices produced in the period around 1981 by Touch Activated Switch Arrays (TASA). The others included a touch-sensitive linear controller, the Ferinstat, which could function as a linear slider/fader, for applications such as audio or process control. These came in two lengths and are included in the collection. There were also the Model 16 Micro Proximity Keyboards, which were 16-button keyboards, arranged in a 4x4 array of touch-sensitive buttons that included a touch-sensitive numerical keypad. They also demonstrated a small, capacitive touch-sensitive touch pad, not unlike what one sees on today’s laptops, for example." - }, - { - "title": "HandyKey (TekGear) Twiddler ", - "company": "HandyKey (TekGear)", - "year": 1991, - "primaryKey": [ - "Chord", - "Keyboard" - ], - "secondaryKey": [ - "Gesture", - "Joystick", - "Keyboard", - "Reality", - "Virtual", - "Vr", - "Wearable" - ], - "originalPrice": 199, - "degreesOfFreedom": 2, - "dimensions": { - "length": 128, - "width": 45, - "height": 50, - "unit": "mm" - }, - "shortDescription": "The Twiddler is a one-hand chord keyboard with integrated pointing capability, which can control the cursor in a joystick-like manner. This was a favourite device of the early Cyborg wearable-computer community.", - "longDescription": "……. . Note: Lyons, et al. abstract: An experienced user of the Twiddler, a one--handed chording keyboard, averages speeds of 60 words per minute with letter--by--letter typing of standard test phrases. This fast typing rate coupled with the Twiddler's 3x4 button design, similar to that of a standard mobile telephone, makes it a potential alternative to multi--tap for text entry on mobile phones. Despite this similarity, there is very little data on the Twiddler's performance and learnability. We present a longitudinal study of novice users' learning rates on the Twiddler. Ten participants typed for 20 sessions using two different methods. Each session is composed of 20 minutes of typing with multi--tap and 20 minutes of one--handed chording on the Twiddler. We found that users initially have a faster average typing rate with multi--tap; however, after four sessions the difference becomes negligible, and by the eighth session participants type faster with chording on the Twiddler. Furthermore, after 20 sessions typing rates for the Twiddler are still increasing." - }, - { - "title": "Blue Orb Inc. OrbiTouch", - "company": "Blue Orb Inc", - "year": 2002, - "primaryKey": [ - "Joystick" - ], - "secondaryKey": [ - "Keyboard" - ], - "originalPrice": 695, - "degreesOfFreedom": 4, - "dimensions": { - "length": 482.6, - "width": 228.6, - "height": 74.2, - "unit": "mm" - }, - "shortDescription": "On the one hand, this device has the overall footprint of a keyboard, and it is used to enter text. And yet, it is two wide, flat, spring-loaded, self-returning joysticks, which are used to enter characters, rather than the keys that we typically employ. To add to the unconventional nature of this device, one enters text via these two joysticks by means of something called radial menus, one for each hand. And, in keeping with many keyboards, such as those with an integrated touch pad, the OrbiTouch also enables mouse like capabilities, such as pointing and selecting, also by means of one of the joysticks.", - "longDescription": "Keyboards, Joysticks and Hierarchic Radial MenusIntroductionWhen you first look at this device, you might guess that it is some kind of keyboard. It even says so on the box and on the device itself. The keyboard-like footprint might reinforce this notion, as might the alphanumeric characters in the grey ring around the circular orb on the right-hand. On the other hand, if this is a keyboard, where are the keys? Reading the labels more carefully sheds light on the paradox: there are none. This is a “keyless keyboard. ” Yes, this is a contradiction in terms. But it is just such curiosities that make devices like this potentially interesting. Hence, we shall take a reasonably deep dive to see what might be revealed. Let’s start by trying to understand what the rationale was for landing on this particular design. The orbiTouch was developed by an industrial engineering doctoral student at the University of Central Florida, Peter McAlindon. His goal was to develop a means of text entry that minimized hand and wrist motion. The intent was to reduce the incidence of repetitive stress injury. A fair bit of research was undertaken between initial concept and commercial release. This can be accessed online, and doing so is a worthwhile exercise. Let us now turn our eye to the physical device in order to get a sense of where all of this landed. The Physical DeviceThe orbiTouch is dominated by two large circular “orbs. ” To my eye, their form initially practically screamed out, “I am a rotary control - Turn me!” However, appearances can be deceptive. Rather than dials, the orbs turn out to be a pair of a joysticks of a particular type. Rather than the stick-tilting motion typical of most, these “joysticks” are operated by moving them along the horizontal plane. In this they are a close cousins of the Altra Felix and KA Design Turbo Puck, both also in the collection. However, in contrast with the Felix and Turbo Puck, whose handles are “floating” (if you let go, they remain in the position where you released your grip), the orbs are “self-centering. ” That is, when released, internal springs return the orbs to their neutral central “home” position. In this, they behave much like the Gravis joystick in the collection, for example. At a finer level of detail, the orbs are specific class of joystick: “8-way joy-switches”. The term”8-way” indicates that only movement along the 8 main axes of the compass are sensed. As to the word “switch”, think of each orb as 8 switches, any one of which can be turned on by moving the orb in one of the 8 directions. (Conversely, they are turned off when the orb is released and returns to home position). Unlike an analogue joystick, such switches do not, and cannot, report how far or fast the orb has moved in any particular direction, nor how much pressure might be applied in the process. While limited, joy-switches provide a less complex and lower cost solution that are appropriate in situations where this additional data is not needed. There are several examples of joy-switches in the collection, especially video game controllers. One of the most iconic examples is the Atari CX-40 controller, which is a 4-way joy-switch. To recap, the orbiTouch is a bi-manual device for entering text by means of two orb-shaped planer-moving 8-way self-centering joy-switches. Having swallowed that mouth-full, let us now explore how text is entered using such a transducer. Entering TextIn general, a character or function is input by moving the two orbs. Which character or function depends on the direction (if any) each of the orbs has moved. For example, if both the left and right orb move west (left), the character “a” is entered. On the other hand, if the right orb again moves west, but the left one east (right), then the character input is “e”. How or why this is the case can be explained with the help of some images. For easier reading, the figure below shows the labels around the orbs in an exploded view. Notice that for both orbs, there is a label segment for each of its 8 directions. Since the example discussed entering an “a” and an “e”, each of which involved the right orb moving west (left) let’s look at the associated label segment in even more detail. Like all of the label segments for the right orb, this one consists of six areas containing text, each with a distinct background colour: red, yellow, green, orange and blue for the letters A through E, respectively, and black for the region containing “BACKSPACE”. Now look again at previous image and notice that each of these colours matches the label associated with one of the directions of the left orb. Text is entered using a two part process. Moving the right orb to the left/west specifies that you are going to enter one of: a, b, c, d, e, or BACKSPACE. (Like most keyboards, despite the labels on the key-caps being upper case, lower-case characters are entered unless the shift key is depressed. )Moving the left orb in the direction whose label corresponds to the background colour of the desired character causes that character to be entered. Hence, with the right orb held in the left/west position, one can enter the sequence, “abcde”, followed by a Backspace, by sequentially moving the left orb west (red), north-west (yellow), north (green), north-east (orange), east (blue) and south (black). The same technique can then be used to access all the characters and commands found in the right orb’s labels. Special ModesThere is one thing to add at this point: While entering printing characters always requires the use of both orbs, some actions can be performed using the left orb only. This can be inferred by the text that accompanies some of the left orb’s labels. For example, moving the left orb north (green) in quick succession (analogous to a double-click on a mouse), indicates that SHIFT will apply to the next character entered. Likewise, doing the same thing in the south-west (grey) direction applies the Caps Lock mode, i. e. , SHIFT will be applied to all subsequent entries until the mode is cancelled. These one-handed special modes/functions are summarized in the image below. Of these, the only one that I want to discuss at the moment is the ability of the orbiTouch to switch from entering text to controlling the screen cursor. This is done by moving the left orb south (black) twice in quick succession. When this is done, the right orb controls the cursor movement – the cursor moves continuously in the direction that you move the orb. In this, any doubts that you had about me characterizing the orbs as joysticks should disappear, since this cursor control is classic joystick behaviour. One issue of note is that the label describes this as “mouse” not “joystick”, which while understandable, is incorrect. Finally, before moving on to the next topic, note that while the right orb controls the movement of the screen cursor in mouse mode, movement of the left or left/west or right/east is taken as a left and right mouse button press, respectively. Remembering that the premise here is that the hands don’t have to move from the orbiTouch in order switch between typing and pointing tasks. But that doesn’t mean that the overhead in switching between the tasks is removed. One type of overhead is just substituted for another. And, the moded nature of the orbiTouch means that the option of parallel pointing-typing actions are eliminated. Rather than criticism, I mention these points to indicate the need to be mindful of the trade-offs and consequences of different design decisions - consequences that the designer should be aware of. Going Meta: What’s Really Going On? I want to approach doing so by stepping back, and approaching the underlying method of “typing” by going “meta”. That is, I want to jump up a lever of abstraction, beyond the physical device (for the moment), and explain what is going on at the conceptual level. The rest of the text is in much rougher form …. What will be revealed, if we do so, is that text is entered by means of the parallel use of two 8-direction radial menus. So what is a radial menu? These are the neglected cousins of the linear menus that populate conventional graphical user interfaces. The difference is that one makes a selection by the direction of movement, rather than the distance (as in the case with linear menus). It turns out that people can learn these quickly if the directions correspond to the 8 main points of the compass. For example, in a program menu, moving up (North) might mean Print, down (South) could mean Save, and moving down to the right (South East), Save As. Like linear menus, these menus can also be hierarchic. So, for example, after moving South East in order to specify Save As, a stroke to the left (West) might mean that it should be saved as a PDF file, whereas it would be saved as a Plain Text file if the secondary connected stroke was to the right (East). The reason for this brief tutorial on radial menus is that they pretty much define at the conceptual level how text is entered using the orbiTouch. The eight directions that you can move the orbs defines the menu item selected. And, by having the actual output depending on the combination of the selection made by each of the two orbs, the device can perhaps be best described as entering text using a two-level hierarchic radial menu, where menu selections are made using two planar moving 8-way joy switches. That is quite a mouth-full, and it has taken all of the text above to bring us to the point where there is a reasonable chance that it makes sense. And we still haven’t gotten into the details! it uses hierarchic (2-level) radial menus, but where the hierarchy is space multiplexed, rather than time multiplexed. That is, rather than doing one menu selection after the other, you do them simultaneously, by using a different hand to articulate the selection from each of the two menus. (While the text on the description is sparse still, look at the training cards, etc. and the photos on the page. )At the level of the mental model, there is no question in my mind (actually, I shouldn’t say that, because I am supposed to be an objective researcher who needs empirical data to inform decisions, but what the hell!) that you could give someone who knew how to use this device two isotonic joysticks, such as used with a video game controller, and they would be able to enter text just as fast as with this device. Furthermore, I am sure that if one had a slate capable of sensing both touch and stylus simultaneously, I am certain that the skill would transfer equally to using a touch radial gesture in the non-dominant hand, and stylus (or touch) radial gesture with the other. At the basic level, it is a 2-level radial menu, but where each level is operated independently and quasi-simultaneously by a different one of the operator’s two hands. Level 1: Right HandThis lets the operator select one of eight regionsThe label for each region consists of 6 characters (5 printing and one “special)In selecting one of the regions, one is not selecting any one of the characters of that region; rather, they are just indicating that the character that they want is one of the six in that regionEach of the characters in a region has a different background colour: blue, orange, green, yellow, red and black. Level 2: Left HandThis lets the operator select one of eight regionsEach region is labeled by a single colourAmong the colours that label the eight regions are the same ones used as character background colours in the regions of the right-hand control: blue, orange, green, yellow, red and blackBy the left hand selecting one of these six colours, one indicates which character is to be entered from among the six characters in the region indicated by the right hand – the selected character being the one whose background colour corresponds to the colour selected by the left hand. Hence, there are two 8-way, single level radial menus used. I believe it fair to say that it is, nevertheless, a 2 level radial menu, since both need to be used in order to enter one token. In actual fact, things are more complex, since none of the above covers issues such as all of the special character, punctuation, etc. , that do not appear on the labels of the right hand. To keep things brief, this is why only 6 of the left-hand menu options are used in what is discussed above. The other two options are needed to fill in the gaps. And, even then, the device resorts to something like double-clicks to get special modes and capabilities. For example, double clicking the black (south) region of the left hand turns the right-hand dome into a pointing device, i. e. , a mouse substitute for pointing, etc. I went through the – as it turned out – interesting exercise of translating the two parallel depth-1 radial menus of the orbiTouch UI into two different depth-2, breadth-8 hierarchic radial menus. You can see them in the attached images. The one assumes that the LH “dome” as the first-level selection, and then make the second-level selection with the right-hand dome. The other does the opposite, i. e. , the right-hand dome selection is the first level. It is interesting to compare the two with each other, as well as with both the labeling on the orbiTouch and the Quickstart documentation: The RH level-1 version seems easier to get rudimentary understanding compared to the LH due to clustering of letters and numbers on outer menus. Likewise, for the special characters that are the upper case of the numbersThe physical device is fine for letting you hunt-and-peck, so to speak, for characters, but it is useless for numbers, and most special characters. The documentation provided with the Quick Start (attached is not especially useful in terms of providing heuristics for memorization. While the orbiTouch certainly uses radial menus, it decidedly does not employ marking menus. One of the key things missing is the ability to check and correct before committing to an input, and the lack of ability to backtrack to the start, and therefore abort without entering anything. One thing that I have learned from this exercise is the difference that results due to having self-returning joysticks. Gestures don’t have that attribute. It matters esp w. r. t. the last point. What I like about this story, is how looking at something seemingly very different at the right level of abstraction, teaches us/me something new about something I was supposed to be an expert in. That is, that 2-level hierarchic marking menus can be achieved by two simultaneous single-level MMs. This is why I have the collection, and why I love what I do. There is still delight, despite being a 63-year-old geezer grandfather. The orbiTouch Keyless Keyboard was first known as the Keybowl, and the company was formerly known as Keybowl Inc. , and then Blue Orb Inc." } ] \ No newline at end of file diff --git a/src/scraping/buxton/json/incomplete.json b/src/scraping/buxton/json/incomplete.json index 595412e56..4b05a2a86 100644 --- a/src/scraping/buxton/json/incomplete.json +++ b/src/scraping/buxton/json/incomplete.json @@ -56,20 +56,10 @@ "filename": "AlphaSmart_Pro.docx", "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." }, - { - "filename": "Amazon_Kindle_Keyboard.docx", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, { "filename": "Apple_ADB_Mouse.docx", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." }, - { - "filename": "Apple_Adj_Keyboard.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, { "filename": "Apple_Mac_Portable-Katy’s MacBook Air-2.docx", "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", @@ -79,10 +69,6 @@ "filename": "Apple_Mac_Portable-Katy’s MacBook Air.docx", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "Apple_Mac_Portable.docx", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, { "filename": "Apple_Scroll_Mouse.docx", "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", @@ -93,16 +79,6 @@ "filename": "Apple_iPhone.docx", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "BAT.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Bill_Notes_CyKey.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, { "filename": "Brailler.docx", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", @@ -115,22 +91,12 @@ "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "CasioC801.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, { "filename": "CasioTC500.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "Casio_Mini.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, { "filename": "Citizen_LC_909.docx", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", @@ -188,11 +154,6 @@ "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." }, - { - "filename": "FingerWorks_Prototype.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, { "filename": "Freeboard.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match was captured.", @@ -200,10 +161,6 @@ "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "FrogPad.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, { "filename": "FujitsuPalm.docx", "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", @@ -238,24 +195,10 @@ "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." }, - { - "filename": "Gavilan_SC.docx", - "company": "ERR__COMPANY__: outer match wasn't captured.", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, { "filename": "Genius_Ring_Mouse.docx", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "Grandjean_Stenotype.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, { "filename": "HTC_Touch.docx", "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", @@ -329,10 +272,6 @@ "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "Kindle_3G_lighted_cover.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, { "filename": "Leatherman_Tread.docx", "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", @@ -392,10 +331,6 @@ "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "Microwriter.docx", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, { "filename": "Motorola_DynaTAC.docx", "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", @@ -403,21 +338,6 @@ "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "MousePen.docx", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "NB75D.docx", - "year": "ERR__YEAR__: outer match was captured.", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, { "filename": "NewO.docx", "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", @@ -445,21 +365,12 @@ "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." }, - { - "filename": "PARCkbd.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." - }, { "filename": "PadMouse.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "Philco_Mystery_Control.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, { "filename": "PowerTrack.docx", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", @@ -499,12 +410,6 @@ "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." }, - { - "filename": "The_Tap.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, { "filename": "Thumbelina.docx", "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", diff --git a/src/scraping/buxton/node_scraper.ts b/src/scraping/buxton/node_scraper.ts index 117a0af84..ab6c9dcb2 100644 --- a/src/scraping/buxton/node_scraper.ts +++ b/src/scraping/buxton/node_scraper.ts @@ -1,9 +1,7 @@ import { readdirSync, writeFile, existsSync, mkdirSync } from "fs"; import * as path from "path"; import { red, cyan, yellow, green } from "colors"; -import { Database } from "../../server/database"; import { Opt } from "../../new_fields/Doc"; -import { Utils } from "../../Utils"; const StreamZip = require('node-stream-zip'); export interface DeviceDocument { @@ -104,7 +102,6 @@ function correctSentences(raw: string) { return { transformed: raw }; } -const targetMongoCollection = "newDocuments"; const outDir = path.resolve(__dirname, "json"); const successOut = "buxton.json"; const failOut = "incomplete.json"; @@ -119,7 +116,7 @@ function printEntries(zip: any) { } } -export async function wordToPlainText(pathToDocument: string): Promise { +async function wordToPlainText(pathToDocument: string): Promise { const zip = new StreamZip({ file: pathToDocument, storeEntries: true }); const contents = await new Promise((resolve, reject) => { zip.on('ready', () => { @@ -172,7 +169,7 @@ function capitalize(word: string): string { return word.charAt(0).toUpperCase() + word.slice(1); } -export function analyze(path: string, body: string): AnalysisResult { +function analyze(path: string, body: string): AnalysisResult { const device: any = {}; const segments = path.split("/"); @@ -249,90 +246,11 @@ async function writeOutputFile(relativePath: string, data: any[], total: number, }); } -namespace Doc { - - export async function create(fields: T, viewType?: number) { - const dataDocId = Utils.GenerateGuid(); - const dataDoc = { - _id: dataDocId, - fields: { - ...fields, - isPrototype: true, - author: "Bill Buxton" - }, - __type: "Doc" - }; - const viewDocId = Utils.GenerateGuid(); - const viewDoc = { - _id: viewDocId, - fields: { - proto: protofy(dataDocId), - x: 10, - y: 10, - _width: 900, - _height: 600, - _panX: 0, - _panY: 0, - zIndex: 2, - libraryBrush: false, - _viewType: viewType || 4, - _LODdisable: true - }, - __type: "Doc" - }; - await Database.Instance.insert(viewDoc, targetMongoCollection); - await Database.Instance.insert(dataDoc, targetMongoCollection); - return viewDocId; - } - - export function protofy(id: string) { - return { - fieldId: id, - __type: "proxy" - }; - } - - export function proxifyGuids(ids: string[]) { - return ids.map(id => ({ - fieldId: id, - __type: "proxy" - })); - } - - export function listify(fields: any[]) { - return { - fields: fields, - __type: "list" - }; - } - -} - -async function main() { +export async function main() { if (!existsSync(outDir)) { mkdirSync(outDir); } - - const devices = await parseFiles(); - await Database.tryInitializeConnection(); - - const { create, protofy, proxifyGuids, listify } = Doc; - const parentGuid = await Doc.create({ - proto: protofy("collectionProto"), - title: "The Buxton Collection", - data: listify(proxifyGuids(await Promise.all(devices.map(create)))) - }); - const result = await Database.Instance.updateMany( - { "fields.title": "Collection 1" }, - { $push: { "fields.data.fields": { fieldId: parentGuid, __type: "proxy" } } }, - targetMongoCollection - ); - - console.log(result); - console.log(green(`\nSuccessfully inserted ${devices.length} devices into ${targetMongoCollection}.`)); - - Database.disconnect(); - process.exit(0); + return parseFiles(); } main(); \ No newline at end of file diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index a4b4b14a9..dbf274e93 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -4,6 +4,7 @@ import { exec } from 'child_process'; import { command_line } from "../ActionUtilities"; import RouteSubscriber from "../RouteSubscriber"; import { red } from "colors"; +import { main } from "../../scraping/buxton/node_scraper"; export default class UtilManager extends ApiManager { @@ -58,6 +59,12 @@ export default class UtilManager extends ApiManager { }, }); + register({ + method: Method.GET, + subscription: "/newBuxton", + secureHandler: async ({ res }) => res.send(await main()) + }); + register({ method: Method.GET, subscription: "/version", diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 8b760db00..71775bed6 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -58,6 +58,7 @@ export class CurrentUserUtils { { title: "todo item", icon: "check", ignoreClick: true, drag: 'getCopy(this.dragFactory, true)', dragFactory: notes[notes.length - 1] }, { title: "web page", icon: "globe-asia", ignoreClick: true, drag: 'Docs.Create.WebDocument("https://en.wikipedia.org/wiki/Hedgehog", {_width: 300, _height: 300, title: "New Webpage" })' }, { title: "cat image", icon: "cat", ignoreClick: true, drag: 'Docs.Create.ImageDocument("https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", { _width: 200, title: "an image of a cat" })' }, + { title: "buxton", icon: "faObjectGroup", ignoreClick: true, drag: "Docs.Create.Buxton()" }, { title: "record", icon: "microphone", ignoreClick: true, drag: `Docs.Create.AudioDocument("${nullAudio}", { _width: 200, title: "ready to record audio" })` }, { title: "clickable button", icon: "bolt", ignoreClick: true, drag: 'Docs.Create.ButtonDocument({ _width: 150, _height: 50, title: "Button" })' }, { title: "presentation", icon: "tv", click: 'openOnRight(Doc.UserDoc().curPresentation = getCopy(this.dragFactory, true))', drag: `Doc.UserDoc().curPresentation = getCopy(this.dragFactory,true)`, dragFactory: emptyPresentation }, -- cgit v1.2.3-70-g09d2 From 79e1323acd0d0f95d08a09cefce908e35d0e7558 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 8 Feb 2020 17:32:33 -0500 Subject: initial commit, pending png read error bug fix --- .gitignore | 2 +- src/client/documents/Documents.ts | 45 +- .../collectionFreeForm/CollectionFreeFormView.tsx | 2 +- src/scraping/buxton/final/BuxtonImporter.ts | 316 ++++++++++++++ src/scraping/buxton/json/buxton.json | 116 ----- src/scraping/buxton/json/incomplete.json | 468 --------------------- src/scraping/buxton/node_scraper.ts | 256 ----------- src/server/ApiManagers/UtilManager.ts | 38 +- .../authentication/models/current_user_utils.ts | 2 +- 9 files changed, 374 insertions(+), 871 deletions(-) create mode 100644 src/scraping/buxton/final/BuxtonImporter.ts delete mode 100644 src/scraping/buxton/json/buxton.json delete mode 100644 src/scraping/buxton/json/incomplete.json delete mode 100644 src/scraping/buxton/node_scraper.ts (limited to 'src/server/ApiManagers') diff --git a/.gitignore b/.gitignore index b88fed833..fbf8668ca 100644 --- a/.gitignore +++ b/.gitignore @@ -6,7 +6,7 @@ ClientUtils.ts solr-8.3.1/server/logs/ solr-8.3.1/server/solr/dash/data/tlog/* solr-8.3.1/server/solr/dash/data/index/* -src/scraping/buxton/source/ +src/scraping/buxton/final/source/ src/server/public/files/ src/scraping/acm/package-lock.json src/server/session_manager/logs/**/*.log diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index d647b34e6..f05bb3736 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -54,6 +54,7 @@ import { InkField } from "../../new_fields/InkField"; import { InkingControl } from "../views/InkingControl"; import { RichTextField } from "../../new_fields/RichTextField"; import { Networking } from "../Network"; +import { extname } from "path"; const requestImageSize = require('../util/request-image-size'); const path = require('path'); @@ -341,8 +342,34 @@ export namespace Docs { */ export namespace Create { - export async function Buxton() { - console.log(await Networking.FetchFromServer("/newBuxton")); + export function Buxton() { + const loading = new Doc; + loading.title = "Please wait for the import script..."; + const parent = TreeDocument([loading], { + title: "The Buxton Collection", + _width: 400, + _height: 400 + }); + Networking.FetchFromServer("/buxton").then(response => { + parent.data = new List(); + const devices = JSON.parse(response); + if (!Array.isArray(devices)) { + alert("Improper Buxton import formatting!"); + return; + } + devices.forEach(device => { + const { __images } = device; + delete device.__images; + const { ImageDocument, StackingDocument } = Docs.Create; + if (Array.isArray(__images)) { + const deviceImages = __images.map((url, i) => ImageDocument(url, { title: `image${i}.${extname(url)}` })); + const doc = StackingDocument(deviceImages, { title: device.title }); + Docs.Get.DocumentHierarchyFromJson(device, undefined, doc); + Doc.AddDocToList(parent, "data", doc); + } + }); + }); + return parent; } Scripting.addGlobal(Buxton); @@ -628,7 +655,7 @@ export namespace Docs { * or the result of any JSON.parse() call. * @param title an optional title to give to the highest parent document in the hierarchy */ - export function DocumentHierarchyFromJson(input: any, title?: string): Opt { + export function DocumentHierarchyFromJson(input: any, title?: string, appendToTarget?: Doc): Opt { if (input === undefined || input === null || ![...primitives, "object"].includes(typeof input)) { return undefined; } @@ -638,7 +665,7 @@ export namespace Docs { } let converted: Doc; if (typeof parsed === "object" && !(parsed instanceof Array)) { - converted = convertObject(parsed, title); + converted = convertObject(parsed, title, appendToTarget); } else { (converted = new Doc).json = toField(parsed); } @@ -653,12 +680,12 @@ export namespace Docs { * @returns the object mapped from JSON to field values, where each mapping * might involve arbitrary recursion (since toField might itself call convertObject) */ - const convertObject = (object: any, title?: string): Doc => { - const target = new Doc(); + const convertObject = (object: any, title?: string, target?: Doc): Doc => { + const resolved = target ?? new Doc; let result: Opt; - Object.keys(object).map(key => (result = toField(object[key], key)) && (target[key] = result)); - title && !target.title && (target.title = title); - return target; + Object.keys(object).map(key => (result = toField(object[key], key)) && (resolved[key] = result)); + title && !resolved.title && (resolved.title = title); + return resolved; }; /** diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx index 2518a4a55..66e4ef1b0 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx @@ -807,7 +807,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { doFreeformLayout(poolData: Map) { const layoutDocs = this.childLayoutPairs.map(pair => pair.layout); const initResult = this.Document.arrangeInit && this.Document.arrangeInit.script.run({ docs: layoutDocs, collection: this.Document }, console.log); - let state = initResult && initResult.success ? initResult.result.scriptState : undefined; + const state = initResult && initResult.success ? initResult.result.scriptState : undefined; const elements = initResult && initResult.success ? this.viewDefsToJSX(initResult.result.views) : []; this.childLayoutPairs.filter(pair => this.isCurrent(pair.layout)).map((pair, i) => { diff --git a/src/scraping/buxton/final/BuxtonImporter.ts b/src/scraping/buxton/final/BuxtonImporter.ts new file mode 100644 index 000000000..804d65d74 --- /dev/null +++ b/src/scraping/buxton/final/BuxtonImporter.ts @@ -0,0 +1,316 @@ +import { readdirSync, writeFile, mkdirSync, createWriteStream, createReadStream, unlinkSync } from "fs"; +import * as path from "path"; +import { red, cyan, yellow, green } from "colors"; +import { Utils } from "../../../Utils"; +import rimraf = require("rimraf"); +const StreamZip = require('node-stream-zip'); +import * as sharp from 'sharp'; +import { SizeSuffix, DashUploadUtils, InjectSize } from "../../../server/DashUploadUtils"; +import { AcceptibleMedia } from "../../../server/SharedMediaTypes"; + +export interface DeviceDocument { + title: string; + shortDescription: string; + longDescription: string; + company: string; + year: number; + originalPrice: number; + degreesOfFreedom: number; + dimensions: string; + primaryKey: string; + secondaryKey: string; +} + +interface DocumentContents { + body: string; + images: string[]; +} + +interface AnalysisResult { + device?: DeviceDocument; + errors?: any; +} + +type Converter = (raw: string) => { transformed?: T, error?: string }; + +interface Processor { + exp: RegExp; + matchIndex?: number; + transformer?: Converter; +} + +namespace Utilities { + + export function numberValue(raw: string) { + const transformed = Number(raw); + if (isNaN(transformed)) { + return { error: `${transformed} cannot be parsed to a numeric value.` }; + } + return { transformed }; + } + + export function collectUniqueTokens(raw: string) { + return { transformed: Array.from(new Set(raw.replace(/,|\s+and\s+/g, " ").split(/\s+/).map(token => token.toLowerCase().trim()))).map(capitalize).sort() }; + } + + export function correctSentences(raw: string) { + raw = raw.replace(/\./g, ". ").replace(/\:/g, ": ").replace(/\,/g, ", ").replace(/\?/g, "? ").trimRight(); + raw = raw.replace(/\s{2,}/g, " "); + return { transformed: raw }; + } + + export function tryGetValidCapture(matches: RegExpExecArray | null, matchIndex: number): string | undefined { + let captured: string; + if (!matches || !(captured = matches[matchIndex])) { + return undefined; + } + const lower = captured.toLowerCase(); + if (/to come/.test(lower)) { + return undefined; + } + if (lower.includes("xxx")) { + return undefined; + } + if (!captured.toLowerCase().replace(/[….\s]+/g, "").length) { + return undefined; + } + return captured; + } + + export function capitalize(word: string): string { + const clean = word.trim(); + if (!clean.length) { + return word; + } + return word.charAt(0).toUpperCase() + word.slice(1); + } + +} + +const RegexMap = new Map>([ + ["title", { + exp: /contact\s+(.*)Short Description:/ + }], + ["company", { + exp: /Company:\s+([^\|]*)\s+\|/, + transformer: (raw: string) => ({ transformed: raw.replace(/\./g, "") }) + }], + ["year", { + exp: /Year:\s+([^\|]*)\s+\|/, + transformer: Utilities.numberValue + }], + ["primaryKey", { + exp: /Primary:\s+(.*)(Secondary|Additional):/, + transformer: Utilities.collectUniqueTokens + }], + ["secondaryKey", { + exp: /(Secondary|Additional):\s+([^\{\}]*)Links/, + transformer: Utilities.collectUniqueTokens, + matchIndex: 2 + }], + ["originalPrice", { + exp: /Original Price \(USD\)\:\s+\$([0-9\.]+)/, + transformer: Utilities.numberValue + }], + ["degreesOfFreedom", { + exp: /Degrees of Freedom:\s+([0-9]+)/, + transformer: Utilities.numberValue + }], + ["dimensions", { + exp: /Dimensions\s+\(L x W x H\):\s+([0-9\.]+\s+x\s+[0-9\.]+\s+x\s+[0-9\.]+\s\([A-Za-z]+\))/, + transformer: (raw: string) => { + const [length, width, group] = raw.split(" x "); + const [height, unit] = group.split(" "); + return { + transformed: { + dim_length: Number(length), + dim_width: Number(width), + dim_height: Number(height), + dim_unit: unit.replace(/[\(\)]+/g, "") + } + }; + } + }], + ["shortDescription", { + exp: /Short Description:\s+(.*)Bill Buxton[’']s Notes/, + transformer: Utilities.correctSentences + }], + ["longDescription", { + exp: /Bill Buxton[’']s Notes(.*)Device Details/, + transformer: Utilities.correctSentences + }], +]); + +const outDir = path.resolve(__dirname, "json"); +const imageDir = path.resolve(__dirname, "../../../server/public/files/images/buxton"); +const successOut = "buxton.json"; +const failOut = "incomplete.json"; +const deviceKeys = Array.from(RegexMap.keys()); + +export default async function executeImport() { + [outDir, imageDir].forEach(dir => { + rimraf.sync(dir); + mkdirSync(dir); + }); + return parseFiles(); +} + +async function parseFiles(): Promise { + const sourceDirectory = path.resolve(`${__dirname}/source`); + + const candidates = readdirSync(sourceDirectory).filter(file => file.endsWith(".doc") || file.endsWith(".docx")).map(file => `${sourceDirectory}/${file}`); + const imported = await Promise.all(candidates.map(async path => ({ path, body: await extractFileContents(path) }))); + const data = imported.map(({ path, body }) => analyze(path, body)); + + const masterDevices: DeviceDocument[] = []; + const masterErrors: any[] = []; + + data.forEach(({ device, errors }) => { + if (device) { + masterDevices.push(device); + } else { + masterErrors.push(errors); + } + }); + const total = candidates.length; + if (masterDevices.length + masterErrors.length !== total) { + throw new Error(`Encountered a ${masterDevices.length} to ${masterErrors.length} mismatch in device / error split!`); + } + + console.log(); + await writeOutputFile(successOut, masterDevices, total, true); + await writeOutputFile(failOut, masterErrors, total, false); + console.log(); + + return masterDevices; +} + +async function extractFileContents(pathToDocument: string): Promise<{ body: string, images: string[] }> { + const zip = new StreamZip({ file: pathToDocument, storeEntries: true }); + const contents = await new Promise((resolve, reject) => { + zip.on('ready', () => { + let body = ""; + zip.stream("word/document.xml", (error: any, stream: any) => { + if (error) { + reject(error); + } + stream.on('data', (chunk: any) => body += chunk.toString()); + stream.on('end', () => resolve(body)); + }); + }); + }); + const images = (await writeImages(zip)).map(name => `http://localhost:1050/files/images/buxton/${name}`); + zip.close(); + let body = ""; + const components = contents.toString().split(''); + const content = tags[1].replace(/<.*$/, ""); + body += content; + } + return { body, images }; +} + +async function writeImages(zip: any): Promise { + const entryNames = Object.values(zip.entries()).map(({ name }) => name); + const resolved: { mediaPath: string, ext: string }[] = []; + let initialWritePath: string; + entryNames.forEach(name => { + const matches = /^word\/media\/\w+\.(jpeg|jpg|png|gif)/.exec(name); + matches && resolved.push({ mediaPath: name, ext: matches[1] }); + }); + return Promise.all(resolved.map(async ({ mediaPath, ext }) => { + const outName = `upload_${Utils.GenerateGuid()}.${ext}`; + const initialWrite = await new Promise((resolve, reject) => { + zip.stream(mediaPath, (error: any, stream: any) => { + if (error) { + console.error(error); + return reject(error); + } + initialWritePath = `${imageDir}/${outName}`; + const writeStream = createWriteStream(initialWritePath); + stream.on('end', () => resolve(outName)); + stream.on('error', reject); + stream.pipe(writeStream); + }); + }); + const resizers = [ + { resizer: sharp().rotate(), suffix: SizeSuffix.Original }, + ...Object.values(DashUploadUtils.Sizes).map(size => ({ + resizer: sharp().resize(size.width, undefined, { withoutEnlargement: true }).rotate(), + suffix: size.suffix + })) + ]; + const { pngs, jpgs } = AcceptibleMedia; + if (pngs.includes(ext)) { + resizers.forEach(element => element.resizer = element.resizer.png()); + } else if (jpgs.includes(ext)) { + resizers.forEach(element => element.resizer = element.resizer.jpeg()); + } + for (const { resizer, suffix } of resizers) { + await new Promise(resolve => { + const filename = InjectSize(outName, suffix); + console.log(filename); + createReadStream(initialWritePath).pipe(resizer).pipe(createWriteStream(`${imageDir}/${filename}`)) + .on('close', resolve) + .on('error', error => { + console.log(red(error)); + resolve(); + }); + }); + } + unlinkSync(initialWritePath); + return initialWrite; + })); +} + +function analyze(pathToDocument: string, { body, images }: DocumentContents): AnalysisResult { + const filename = path.basename(pathToDocument).replace("Bill_Notes_", ""); + console.log(`Parsing ${filename}...`); + + const device: any = {}; + const errors: any = { filename }; + + for (const key of deviceKeys) { + const { exp, transformer, matchIndex } = RegexMap.get(key)!; + const matches = exp.exec(body); + + let captured = Utilities.tryGetValidCapture(matches, matchIndex ?? 1); + if (!captured) { + errors[key] = `ERR__${key.toUpperCase()}__: outer match ${matches === null ? "wasn't" : "was"} captured.`; + continue; + } + + captured = captured.replace(/\s{2,}/g, " "); + if (transformer) { + const { error, transformed } = transformer(captured); + if (error) { + errors[key] = `__ERR__${key.toUpperCase()}__TRANSFORM__: ${error}`; + continue; + } + captured = transformed; + } + + device[key] = captured; + } + + const errorKeys = Object.keys(errors); + if (errorKeys.length > 1) { + console.log(red(`\n@ ${cyan(filename.toUpperCase())}...`)); + errorKeys.forEach(key => key !== "filename" && console.log(red(errors[key]))); + return { errors }; + } + + device.__images = images; + + return { device }; +} + +async function writeOutputFile(relativePath: string, data: any[], total: number, success: boolean) { + console.log(yellow(`Encountered ${data.length} ${success ? "valid" : "invalid"} documents out of ${total} candidates. Writing ${relativePath}...`)); + return new Promise((resolve, reject) => { + const destination = path.resolve(outDir, relativePath); + const contents = JSON.stringify(data, undefined, 4); + writeFile(destination, contents, err => err ? reject(err) : resolve()); + }); +} \ No newline at end of file diff --git a/src/scraping/buxton/json/buxton.json b/src/scraping/buxton/json/buxton.json deleted file mode 100644 index 8371f2cf2..000000000 --- a/src/scraping/buxton/json/buxton.json +++ /dev/null @@ -1,116 +0,0 @@ -[ - { - "title": "3Dconnexion CadMan 3D Motion Controller", - "company": "3Dconnexion", - "year": 2003, - "primaryKey": [ - "Joystick" - ], - "secondaryKey": [ - "Isometric", - "Joystick" - ], - "originalPrice": 399, - "degreesOfFreedom": 6, - "dimensions": { - "length": 175, - "width": 122, - "height": 43, - "unit": "mm" - }, - "shortDescription": "The CadMan is a 6 degree of freedom (DOF) joystick controller. It represented a significant step towards making this class of is controller affordable. It was mainly directed at 3D modelling and animation and was a “next generation” of the Magellan controller, which is also in the collection.", - "longDescription": "The CadMan is a 6 degree of freedom (DOF) joystick controller. It represented a significant step towards making this class of is controller more affordable. It was mainly directed at 3D modelling and animation and was a “next generation” of the Magellan/SpaceMouse controller, which is also in the collection. Like the Magellan, this is an isometric rate-control joystick. That is, it rests in a neutral central position, not sending and signal. When a force is applied to it, it emits a signal indicating the direction and strength of that force. This signal can then be mapped to a parameter of a selected object, such as a sphere, and – for example – cause that sphere to rotate for as long as, and as fast as, and in the direction determined by, the duration, force, and direction of the applied force. When released, it springs back to neutral position. Note that the force does not need to be directed along a single DOF. In fact, a core feature of the device is that one can simultaneously and independently apply force that asserts control over more than one DOF, and furthermore, vary those forces dynamically. As an aid to understanding, let me walk through some of the underlying concepts at play here by using a more familiar device: a computer mouse. If you move a mouse in a forward/backward direction, the mouse pointer on the screen moves between the screen’s top and bottom. If you think of the screen as a piece of graph paper, that corresponds to moving along the “Y” axis. That is one degree of freedom. On the other hand, you could move the mouse left and right, which causes the mouse to move between the left and right side of the screen. That would correspond to moving along the graph paper’s “X” axis – a second degree of freedom. Yet, you can also move the mouse diagonally. This is an example of independently controlling two degrees of freedom. Now imagine that if you lifted your mouse off your desktop, that your computer could dynamically sense its height as you did so. This would constitute a “flying mouse” (the literal translation of the German word for a “Bat”, which Canadian colleague, Colin Ware, applied to just such a mouse which he built in 1988). If you moved your Bat vertically up and down, perpendicular to the desktop, you would be controlling movement along the “Z” axis - a third degree of freedom. Having already seen that we can move a mouse diagonally, we have established that we need not be constrained to only moving along a single axis. That extends to the movement of our Bat and movement along the “Z” axis. We can control our hand movement in dependently in any or all directions in 3D space. But how does one reconcile the fact that we call the CadMan a “3D controller, and yet also describe it as having 6 degrees of freedom? After all, the example this far demonstrates that our Bat, as described thus far, has freedom on movement in 3 Dimensions. While true, we can extend our example to prove that that freedom to move in 3D is also highly constrained. To demonstrate this, move your hand in 3D space on and above your desktop. However, do so keeping your palm flat, parallel to the desktop with your fingers pointing directly forward. In so doing, you are still moving in 3D. Now, while moving, twist your wrist, while moving the hand, such that your palm is alternatively exposed to the left and right side. This constitutes rotation around the “Y” axis. A fourth DOF. Now add a waving motion to your hand, as if it were a paper airplane diving up and down, while also rocking left and right. But keep your fingers pointing forward. You have now added a fifth DOF, rotation around the “X” axis. Finally, add a twist to your wrist so that your fingers are no longer constrained to pointing forward. This is the sixth degree of freedom, rotation around the “Z” axis. Now don’t be fooled, this exercise could continue. We are not restricted to even six DOF. Imagine doing the above, but where the movement and rotations are measured relative to the Bat’s position and orientation, rather than to the holding/controlling hand, per se. One could imagine the Bat having a scroll wheel, like the one on most mice today. Furthermore, while flying your Bat around in 3D, that wheel could easily be rolled in either forward or backward, and thereby control the size of whatever was being controlled. Hence, with one hand we could assert simultaneous and independent control over 7 DOF in 3D space. This exercise has two intended take-aways. The first is a better working understanding between the notion of Degree of Freedom (DOF) and Dimension in space. Hopefully, the confusion frequently encountered when 3D and 6DOF are used in close context, can now be eliminated. Second, is that, with appropriate sensing, the human hand is capable of exercising control over far more degrees of freedom that six. And if we use the two hands together, the potential number of DOF that one can control goes even further. Finally, it is important to add one more take-away – one which both emerges from, and is frequently encountered when discussing, the previous two. That is, do not equate exercising simultaneous control over a high number of DOF with consciously doing the same number of different things all at once. The example that used to be thrown at me when I started talking about coordinated simultaneously bi-manual action went along the lines of, “Psychology tells us that we cannot do multiple things at once, for example, simultaneously tapping your head and rubbing your stomach. ”Well, first, I can tap my head with one hand while rubbing my stomach with the other. But that is not the point. The whole essence of skill – motor-sensory and cognitive – is “chunking” or task integration. When one appears to be doing many different things at once, if they are skilled, they are consciously doing only one thing. Playing a chord on the piano, for example, or skiing down the hill. Likewise, in flying your imaginary BAT in the previous exercise with the scroll wheel, were you doing 7 things at once, or one thing with 7 DOF? And if you had a Bat in each hand, does that mean you are now doing 14 things at once, or are you doing one thing with 14 DOF? Let me provide a different way of answering this question: if you have ever played air guitar, or “conducted” the orchestra that you are listening to on the radio, you are exercising control over more than 14 DOF. And you are doing exactly what I just said, “playing air guitar” or “conducting an orchestra”. One thing – at the conscious level, which is what matters – despite almost any one thing being able to be deconstructed into hundreds of sub-tasks. As I said the essence of skill: aggregation, or chunking. What is most important for both tool designers and users to be mindful of, is the overwhelming influence that our choice and design of tools impacts the degree to which such integration or chunking can take place. The degree to which the tool matches both the skills that we have already acquired through a lifetime of living in the everyday world, and the demands of the intended task, the more seamless that task can be performed, the more “natural” it will feel, and the less learning will be required. In my experience, it brought particular value when used bimanually, in combination with a mouse, where the preferred hand performed conventional pointing, selection and dragging tasks, while the non-preferred hand could manipulate the parameters of the thing being selected. First variation of the since the 2001 formation of 3Dconnextion. The CadMan came in 5 colours: smoke, orange, red, blue and green. See the notes for the LogiCad3D Magellan for more details on this class of device. It is the “parent” of the CadMan, and despite the change in company name, it comes from the same team." - }, - { - "title": "Adesso ACK-540UB USB Mini-Touch Keyboard with Touchpad", - "company": "Adesso", - "year": 2005, - "primaryKey": [ - "Keyboard" - ], - "secondaryKey": [ - "Pad", - "Touch" - ], - "originalPrice": 59.95, - "degreesOfFreedom": 2, - "dimensions": { - "length": 287, - "width": 140, - "height": 35.5, - "unit": "mm" - }, - "shortDescription": "The Mini-Touch Keyboard is a surprisingly rare device: a laptop-style, small-footprint keyboard with a centrally mounted touch-pad. .", - "longDescription": "First released in 2003 with a PS/2 connector (ACK-540PW & ACK-540PB). USB version released in 2006 in either black (ACK-540UB) or white (ACK-540UW). Marketed under different brands, including SolidTek: http: //www. tigerdirect. com/applications/searchtools/item-details. asp? EdpNo=1472243https: //acecaddigital. com/index. php/products/keyboards/mini-keyboards/kb-540 Deltaco: https: //www. digitalimpuls. no/logitech/116652/deltaco-minitastatur-med-touchpad-usb" - }, - { - "title": "Contour Design UniTrap ", - "company": "Contour Design", - "year": 1999, - "primaryKey": [ - "Re-skin" - ], - "secondaryKey": [ - "Mouse" - ], - "originalPrice": 14.99, - "degreesOfFreedom": 2, - "dimensions": { - "length": 130.5, - "width": 75.7, - "height": 43, - "unit": "mm" - }, - "shortDescription": "This is a plastic shell within which the round Apple iMac G3 “Hockey Puck” mouse can be fit. While the G3 Mouse worked well mechanically, when gripped its round shape gave few cues as to its orientation. Hence, if you moved your hand up, the screen pointer may well have moved diagonally. By reskinning it with the inexpensive Contour UniTrap, the problem went away without the need to buy a whole new mouse.", - "longDescription": "Also add back pointers from devices re-skinned" - }, - { - "title": "Depraz Swiss Mouse", - "company": "Depraz", - "year": 1980, - "primaryKey": [ - "Mouse" - ], - "secondaryKey": [ - "Ball", - "Chord", - "Keyboard", - "Mouse" - ], - "originalPrice": 295, - "degreesOfFreedom": 2, - "dimensions": { - "length": 50.8, - "width": 76.2, - "height": 114.3, - "unit": "mm" - }, - "shortDescription": "This mouse is one of the first commercially available mice to be sold publicly. It is known as the Swiss mouse, and yes, the roller mechanism was designed by a Swiss watchmaker. Coincidentally, the company that made it, Depraz, is based in Apples, Switzerland. Their success in selling this mouse is what caused Logitech to switch from a software development shop to one of the world’s leading suppliers of mice and other input devices.", - "longDescription": "DePraz began manufacturing in 1980, but following design built in 1979. Logitech started selling it in 1982. It was one of the first mass produced mice, one of the first available ball mice, as well as to have an optical shaft encoder – thereby improving linearity. An interesting fact, given its Swiss heritage, is that its designer, André Guignard, was trained as a Swiss watch maker. Unlike most modern mice, the DePraz, or “Swiss” mouse had a quasi-hemispherical shape. Hence, it was held in a so-called “power-grip”, much as one would grip a horizontally held ball – the thumb and small finger applying pressure on each side, with added support from the weight/friction of the palm on the back of the mouse. In this posture, the three middle fingers naturally positioning themselves over the three buttons mounted at the lower edge of the front. Largely freed of grip pressure, by grace of thumb and little finger, the middle fingers had essentially freedom of motion to independently operate the buttons. Each having a dedicated finger, the buttons could be easily pushed independently or in any combination. Like the three valves on a trumpet, this ability to “chord” extended the three physical buttons to have the power of seven. The down-side of this “turtle shell” form factor is that it placed the hand in a posture in which mouse movement relied more of the larger muscle groups of the arm to wrist, rather than wrist to fingers – the latter being the approach taken in most subsequent mice. The original Swiss Mouse was developed at École Polytechnique Fédérale de Lausanne by a project led by Jean-Daniel Nicoud, who was also responsible for the development of its optical shaft encoder. To augment their revenue stream, Logitech, then a software and hardware consulting company for the publishing industry, acquired marketing rights for North America. Mouse revenue quickly overshadowed that from software. In 1983, Logitech acquired DePraz, named the Swiss Mouse the “P4”, and grew to become one of the largest input device manufacturer in the world. One curious coincidence is that they were founded in the town of Apples, Switzerland." - }, - { - "title": "One Laptop Per Child (OLPC) XO-1", - "company": "One Laptop Per Child (OLPC)", - "year": 2007, - "primaryKey": [ - "Computer" - ], - "secondaryKey": [ - "Keyboard", - "Laptop", - "Pad", - "Slate", - "Touch" - ], - "originalPrice": 199, - "degreesOfFreedom": 2, - "dimensions": { - "length": 242, - "width": 228, - "height": 30, - "unit": "mm" - }, - "shortDescription": "The OLPC XO-1 is very innovative device that nevertheless raises serious issues about technology and social responsibility. It is included in the collection primarily as a warning against technological hubris, and the fact that no technologies are neutral from a social-cultural perspective.", - "longDescription": "IntroductionI have this computer in my collection as a reminder of the delicate relationship between object and purpose, and how no matter how well one does on the former, it will likely have no impact on making a wanting concept achieve the stated (and even valid) purpose any better. I include it in the collection as a cautionary tale of how the object may help sell a concept, regardless how ill-conceived – even to those who should know better, had they applied the most basic critical thinking. For consumers, investors and designers, its story serves as a cautionary reminder to the importance of cultivating and retaining a critical mind and questioning perspective, regardless of how intrinsically seductive or well-intentioned a technology may be. From the perspective of hardware and software, what the One Laptop Per Child (OLPC) project was able to accomplish is impressive. In general, the team delivered a computer that could be produced at a remarkably low price – even if about double that which was targeted. Specifically, the display, for example, is innovative, and stands out due to its ability to work both in the bright sun (reflective) as well as in poorly lit spaces (emissive) – something that goes beyond pretty much anything else that is available on today’s (2017) slate computers or e-readers. In short, some excellent work went into this machine, something that is even more impressive, given the nature of the organization from which it emerged. The industrial design was equally impressive. Undertaken by Yves Behar’s FuseprojectUltimately, however, the machine was a means to an end, not the end itself. Rather than a device, the actual mission of the OLPC project was: … to empower the world's poorest children through education. Yet, as described by in their materials, the computer was intended to play a key role in this: With access to this type of tool [the computer], children are engaged in their own education, and learn, share, and create together. They become connected to each other, to the world and to a brighter future. Hence, making a suitable computer suitable to that purpose and the conditions where it would be used, at a price point that would enable broad distribution, was a key part of the project. The Underlying Belief System of the OLPC ProjectSince they are key to the thinking behind the OLPC project, I believe if fair to frame my discussion around the following four questions: Will giving computers to kids in the developing world improve their education? Will having a thus better-educated youth help bring a society out of poverty? Can that educational improvement be accomplished by giving the computers to the kids, with no special training for teachers? Should this be attempted on a global scale without any advance field trials or pilot studies? From the perspective of the OLPC project, the answer to every one of these questions is an unequivocal “yes”. In fact, as we shall see, any suggestion to the contrary is typically answered by condescension and/or mockery. The answers appear to be viewed as self-evident and not worth even questioning. Those who have not subscribed to this doctrine might call such a viewpoint hubris. What staggers me is how the project got so far without the basic assumptions being more broadly questioned, much less such questions being seriously addressed by the proponents. How did seemingly otherwise people commit to the project, through their labour or financial investment, given the apparently naïve and utopian approach that it took? Does the desire to do good cloud judgment that much? Are we that dazzled by a cool technology or big hairy audacious goal? Or by a charismatic personality? To explain my concern, and what this artifact represents to me, let me just touch on the four assumptions on which the project was founded. Will giving computers to kids in the developing world improve education? The literature on this question is, at best, mixed. What is clear is that one cannot make any assumption that such improvements will occur, regardless of whether one is talking about the developing world or suburban USA. For example, in January 2011, The World Bank published the following study: Can Computers Help Students Learn? From Evidence to Policy, January 2011, Number 4, The World Bank. A public-private partnership in Colombia, called Computers for Education, was created in 2002 to increase the availability of computers in public schools for use in education. Since starting, the program has installed more than 73, 000 computers in over 6, 300 public schools in more than 1, 000 municipalities. By 2008, over 2 million students and 83, 000 teachers had taken part. This document reports on a two-year study to determine the impact of the program on student performance. Students in schools that received the computers and teacher training did not do measurably better on tests than students in the control group. Nor was there a positive effect on other measures of learning. Researchers did not find any difference in test scores when they looked at specific components of math and language studies, such as algebra and geometry, and grammar and paraphrase ability in Spanish. But report also notes that results of such studies are mixed: Studies on the relationship between using computers in the classroom and improved test scores in developing countries give mixed results: A review of Israel’s Tomorrow-98 program in the mid-1990s, which put computers in schools across the country, did not find any impact on math and Hebrew language scores. But in India, a study of a computer-assisted learning program showed a significant positive impact on math scores. One thing researchers agree on, more work is needed in this field. Before moving on, a search of the literature will show that these results are consistent with those that were available in the literature at the time that the project was started. The point that I am making is not that the OLPC project could not be made to work; rather, that it was wrong to assume that it would do so without spending at least as much time designing the process to bring that about, as was expended designing the computer itself. Risk is fine, and something that can be mitigated. But diving in under the assumption that it would just work is not calculated risk, it is gambling - with other people’s lives, education and money. Will a better educated population help bring a society out of poverty? I am largely going to punt on this question. The fact is, I would be hard pressed to argue against education. But let us grant that improving education in the developing world is a good thing. The appropriate question is: is the approach of the OLPC project a reasonable or responsible way to disburse the limited resources that are available to address the educational challenges of the developing world? At the very least, I would suggest that this is a topic worthy of debate. An a priori assumption that giving computers is the right solution is akin to the, “If you build it they will come” approach seen in the movie, Field of Dreams. The problem here is that this is not a movie. There are real lives and futures that are at stake here – lives of those who cannot afford to see the movie, much less have precious resources spent on projects that are not well thought through. Can that improvement be accomplished by just giving the computers to the kids without training teachers? Remarkably, the OLPC Project’s answer is an explicit, “Yes”. In a TED talk filmed in December 2007, the founder of the OLPC initiative, Nicholas Negroponte states: “When people tell me, you know, who’s going to teach the teachers to teach the kids, I say to myself, “What planet do you come from? ” Okay, there’s not a person in this room [the TED Conference], I don’t care how techy you are, there’s not a person in this room that doesn’t give their laptop or cell phone to a kid to help them debug it. Okay, we all need help, even those of us who are very seasoned. ”Let us leave aside the naïvete of this statement stemming from the lack of distinction between ability to use applications and devices versus the ability to create and shape them. A failure of logic remains in that those unseasoned kids are part of “us”, as in “we all need help”. Where do the kids go for help? To other kids? What if they don’t know? Often they won’t. After all, the question may well have to do with a concept in calculus, rather than how to use the computer. What then? No answer is offered. Rather, those who dare raise the serious and legitimate concerns regarding teacher preparation are mockingly dismissed as coming from another planet! Well, perhaps they are. But in that case, there should at least be some debate as to who lives on which planet. Is it the people raising the question or the one dismissing the concern that lives in the real world of responsible thought and action? Can this all be accomplished without any advance field trials? Should one just immediately commit to international deployment of the program? As recently as September 2009, Negroponte took part in a panel discussion where he spoke on this matter. He states: I'd like you to imagine that I told you \"I have a technology that is going to change the quality of life. \" And then I tell you \"Really the right thing to do is to set up a pilot project to test my technology. And then the second thing to do is, once the pilot has been running for some period of time, is to go and measure very carefully the benefits of that technology. \"And then I am to tell you that what we are going to is very scientifically evaluate this technology, with control groups - giving it to some, giving it to others. And this all is very reasonable until I tell you the technology is electricity. And you say \"Wait, you don't have to do that!\"But you don't have to do that with laptops and learning either. And the fact that somebody in the room would say the impact is unclear is to me amazing - unbelievably amazing. There's not a person in this room who hasn't bought a laptop for their child, if they could afford it. And you don't know somebody who hasn't done it, if they can afford it. So there's only one question on the table and that's, “How to afford it? ” That's the only question. There is no other question - it's just the economics. And so, when One Laptop Per Child started, I didn't have the picture quite as clear as that, but we did focus on trying to get the price down. We did focus on those things. Unfortunately, Negroponte demonstrates his lack of understanding of both the history of electricity and education in this example. His historical mistake is this: yes, it was pretty obvious that electricity could bring many benefits to society. But what happened when Edison did exactly what Negroponte advocates? He almost lost his company due to his complete (but mistaken) conviction that DC, rather the AC was the correct technology to pursue. As with electricity, yes, it is rather obvious that education could bring significant benefits to the developing world. But in order to avoid making the same kind of expensive mistake that Edison did, perhaps one might want to do one’s best to make sure that the chosen technology is the AC, rather than DC, of education. A little more research, and a little less hubris might have put the investments in Edison and the OLPC to much better use. But the larger question is this: in what way is it responsible for the wealthy western world to advocate an untested and expensive (in every sense) technological solution on the poorest nations in the world? If history has taught us anything, it has taught us that just because our intentions are good, the same is not necessarily true for consequences of our actions. Later in his presentation, Negroponte states: … our problems are swimming against very naïve views of education. With this, I have to agree. It is just whose views on education are naïve, and how can such views emerge from MIT, no less, much less pass with so little critical scrutiny by the public, the press, participants, and funders? In an interview with Paul Marks, published in the New Scientist in December 2008, we see the how the techno-centric aspect of the project plays into the ostensible human centric purpose of the project. Negroponte’s retort regarding some of the initial skepticism that the project provoked was this: “When we first said we could build a laptop for $100 it was viewed as unrealistic and so 'anti-market' and so 'anti' the current laptops which at the time were around $1000 each, \" Negroponte said. \"It was viewed as pure bravado - but look what happened: the netbook market has developed in our wake. \" The project's demands for cheaper components such as keyboards, and processors nudged the industry into finding ways to cut costs, he says. \"What started off as a revolution became a culture. \"Surprise, yes, computers get smaller, faster, and cheaper over the course of time, and yes, one can even grant that the OLPC project may have accelerated that inevitable move. And, I have already stated my admiration and respect for the quality of the technology that was developed. But in the context of the overall objectives of the project, the best that one can say is, “Congratulations on meeting a milestone. ” However, by the same token, one might also legitimately question if starting with the hardware was not an instance of putting the cart before the horse. Yes, it is obviously necessary to have portable computers in the first place, before one can introduce them into the classroom, home, and donate them to children in the developing world. But it is also the case that small portable computers were already in existence and at the time that the project was initiated. While a factor of ten more expensive than the eventual target price, they were both available and adequate to support limited preliminary testing of the underlying premises of the project in an affordable manner. That is, before launching into a major - albeit well-intentioned – hardware development project, it may have been prudent to have tested the underlying premises of its motivation. Here we have to return to the raison d’être of the initiative: … to empower the world's poorest children through educationHence, the extent to which this is achieved from a given investment must be the primary metric of success, as well as the driving force of the project. Yet, that is clearly not what happened. Driven by a blind Edisonian belief in their un-tested premise, the project’s investments were overwhelmingly on the side of technology rather than pedagogy. Perhaps the nature and extent of the naïve (but well-meaning) utopian dream underlying the project is captured in the last part of the interview, above: Negroponte believes that empowering children and their parents with the educational resources offered by computers and the Internet will lead to informed decisions that improve democracy. Indeed, it has led to some gentle ribbing between himself and his brother: John Negroponte - currently deputy secretary of state in the outgoing Bush administration and the first ever director of national intelligence at the National Security Agency. \"I often joke with John that he can bring democracy his way - and I'll bring it mine, \" he says. Apparently providing inexpensive laptops to children in the developing world is not only going to raise educational standards, eradicate poverty, it is also going to bring democracy! All that, with no mention of the numerous poor non-democratic countries that have literacy levels equal to or higher than the USA (Cuba might be one reasonable example). The words naïve technological-utopianism come to mind. I began by admitting that I was conflicted in terms of this project. From the purely technological perspective, there is much to admire in the project’s accomplishments. Sadly, that was not the project’s primary objective. What appears to be missing throughout is an inability to distinguish between the technology and the purpose to which is was intended to serve. My concern in this regard is reflected in a paper by Warschauer & Ames(2010). The analysis reveals that provision of individual laptops is a utopian vision for the children in the poorest countries, whose educational and social futures could be more effectively improved if the same investments were instead made on more sustainable and proven interventions. Middle- and high-income countries may have a stronger rationale for providing individual laptops to children, but will still want to eschew OLPC’s technocentric vision. In summary, OLPC represents the latest in a long line of technologically utopian development schemes that have unsuccessfully attempted to solve complex social problems with overly simplistic solutions. There is a delicate relationship between technology and society, culture, ethics, and values. What this case study reflects is the fact that technologies are not neutral. They never are. Hence, technological initiatives must be accompanied by appropriate social, cultural and ethical considerations – especially in projects such as this where the technologies are being introduced into particularly vulnerable societies. That did not happen here, The fact that this project got the support that it did, and has gone as far as it has, given the way it was approached, is why this reminder – in the form of this device – is included in the collection. And if anyone ever wonders why I am so vocal about the need for public discourse around technology, one need look no further than the OLPC project." - } -] \ No newline at end of file diff --git a/src/scraping/buxton/json/incomplete.json b/src/scraping/buxton/json/incomplete.json deleted file mode 100644 index 4b05a2a86..000000000 --- a/src/scraping/buxton/json/incomplete.json +++ /dev/null @@ -1,468 +0,0 @@ -[ - { - "filename": "3DMag.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "3DPlus.docx", - "year": "ERR__YEAR__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "3DSpace.docx", - "year": "ERR__YEAR__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "3Dconnexion_SpaceNavigator.docx", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "3MErgo.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "ADB2.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "AWrock.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "Abaton.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "Active.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "AlphaSmart_Pro.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "Apple_ADB_Mouse.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." - }, - { - "filename": "Apple_Mac_Portable-Katy’s MacBook Air-2.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Apple_Mac_Portable-Katy’s MacBook Air.docx", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Apple_Scroll_Mouse.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "Apple_iPhone.docx", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Brailler.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Brewster_Stereoscope.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "CasioTC500.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "Citizen_LC_909.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "Citizen_LC_913.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." - }, - { - "filename": "Citizen_LCl_914.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "CoolPix.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "Cross.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Dymo_MK-6.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Emotiv.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Explorer.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Falcon.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "Freeboard.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match was captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "FujitsuPalm.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "FujitsuTouch.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "GRiD1550-Katy’s MacBook Air-2.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "GRiD1550-Katy’s MacBook Air.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "GRiD1550.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "Genius_Ring_Mouse.docx", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "HTC_Touch.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." - }, - { - "filename": "Helios-Klimax.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Honeywell_T86.docx", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "IBMTrack.docx", - "year": "ERR__YEAR__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "IBM_Convertable-Katy’s MacBook Air-2.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "IBM_Convertable-Katy’s MacBook Air.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "IBM_Convertable.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "IBM_PS2_Mouse.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "IBM_Simon.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value." - }, - { - "filename": "IDEO.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Joyboard.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "Kensington_SB_TB-Mouse.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "Leatherman_Tread.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "M1.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured." - }, - { - "filename": "MS-1_Stereoscope.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "MWB_Braille_Writer.docx", - "company": "ERR__COMPANY__: outer match wasn't captured.", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "MaltronLH.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "Marine_Band_Harmonica.docx", - "company": "ERR__COMPANY__: outer match was captured.", - "year": "ERR__YEAR__: outer match was captured.", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Matrox.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "Metaphor_Kbd.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "Metaphor_Mouse.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Motorola_DynaTAC.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "NewO.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "Newton120.docx", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "Nikon_Coolpix-100.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." - }, - { - "filename": "Numonics_Mgr_Mouse.docx", - "company": "ERR__COMPANY__: outer match was captured.", - "year": "ERR__YEAR__: outer match was captured.", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "PadMouse.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "PowerTrack.docx", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "ProAgio.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "Pulsar_time_Computer.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "Ring.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "SafeType_Kbd.docx", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "Samsung_SPH-A500.docx", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "SurfMouse.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." - }, - { - "filename": "TPARCtab.docx", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "Thumbelina.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." - }, - { - "filename": "adecm.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "eMate.docx", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "gravis.docx", - "year": "ERR__YEAR__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "iGesture.docx", - "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", - "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "iGrip.docx", - "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - }, - { - "filename": "iLiad.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." - }, - { - "filename": "round.docx", - "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", - "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", - "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", - "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." - } -] \ No newline at end of file diff --git a/src/scraping/buxton/node_scraper.ts b/src/scraping/buxton/node_scraper.ts deleted file mode 100644 index ab6c9dcb2..000000000 --- a/src/scraping/buxton/node_scraper.ts +++ /dev/null @@ -1,256 +0,0 @@ -import { readdirSync, writeFile, existsSync, mkdirSync } from "fs"; -import * as path from "path"; -import { red, cyan, yellow, green } from "colors"; -import { Opt } from "../../new_fields/Doc"; -const StreamZip = require('node-stream-zip'); - -export interface DeviceDocument { - title: string; - shortDescription: string; - longDescription: string; - company: string; - year: number; - originalPrice: number; - degreesOfFreedom: number; - dimensions: string; - primaryKey: string; - secondaryKey: string; -} - -interface AnalysisResult { - device?: DeviceDocument; - errors?: any; -} - -type Converter = (raw: string) => { transformed?: T, error?: string }; - -interface Processor { - exp: RegExp; - matchIndex?: number; - transformer?: Converter; -} - -const RegexMap = new Map>([ - ["title", { - exp: /contact\s+(.*)Short Description:/ - }], - ["company", { - exp: /Company:\s+([^\|]*)\s+\|/, - transformer: (raw: string) => ({ transformed: raw.replace(/\./g, "") }) - }], - ["year", { - exp: /Year:\s+([^\|]*)\s+\|/, - transformer: numberValue - }], - ["primaryKey", { - exp: /Primary:\s+(.*)(Secondary|Additional):/, - transformer: collectUniqueTokens - }], - ["secondaryKey", { - exp: /(Secondary|Additional):\s+([^\{\}]*)Links/, - transformer: collectUniqueTokens, - matchIndex: 2 - }], - ["originalPrice", { - exp: /Original Price \(USD\)\:\s+\$([0-9\.]+)/, - transformer: numberValue - }], - ["degreesOfFreedom", { - exp: /Degrees of Freedom:\s+([0-9]+)/, - transformer: numberValue - }], - ["dimensions", { - exp: /Dimensions\s+\(L x W x H\):\s+([0-9\.]+\s+x\s+[0-9\.]+\s+x\s+[0-9\.]+\s\([A-Za-z]+\))/, - transformer: (raw: string) => { - const [length, width, group] = raw.split(" x "); - const [height, unit] = group.split(" "); - return { - transformed: { - length: Number(length), - width: Number(width), - height: Number(height), - unit: unit.replace(/[\(\)]+/g, "") - } - }; - } - }], - ["shortDescription", { - exp: /Short Description:\s+(.*)Bill Buxton[’']s Notes/, - transformer: correctSentences - }], - ["longDescription", { - exp: /Bill Buxton[’']s Notes(.*)Device Details/, - transformer: correctSentences - }], -]); - -function numberValue(raw: string) { - const transformed = Number(raw); - if (isNaN(transformed)) { - return { error: `${transformed} cannot be parsed to a numeric value.` }; - } - return { transformed }; -} - -function collectUniqueTokens(raw: string) { - return { transformed: Array.from(new Set(raw.replace(/,|\s+and\s+/g, " ").split(/\s+/).map(token => token.toLowerCase().trim()))).map(capitalize).sort() }; -} - -function correctSentences(raw: string) { - raw = raw.replace(/\./g, ". ").replace(/\:/g, ": ").replace(/\,/g, ", ").replace(/\?/g, "? ").trimRight(); - raw = raw.replace(/\s{2,}/g, " "); - return { transformed: raw }; -} - -const outDir = path.resolve(__dirname, "json"); -const successOut = "buxton.json"; -const failOut = "incomplete.json"; -const deviceKeys = Array.from(RegexMap.keys()); - -function printEntries(zip: any) { - const { entriesCount } = zip; - console.log(`Recognized ${entriesCount} entr${entriesCount === 1 ? "y" : "ies"}.`); - for (const entry of Object.values(zip.entries())) { - const desc = entry.isDirectory ? 'directory' : `${entry.size} bytes`; - console.log(`${entry.name}: ${desc}`); - } -} - -async function wordToPlainText(pathToDocument: string): Promise { - const zip = new StreamZip({ file: pathToDocument, storeEntries: true }); - const contents = await new Promise((resolve, reject) => { - zip.on('ready', () => { - let body = ""; - zip.stream("word/document.xml", (error: any, stream: any) => { - if (error) { - reject(error); - } - stream.on('data', (chunk: any) => body += chunk.toString()); - stream.on('end', () => { - resolve(body); - zip.close(); - }); - }); - }); - }); - let body = ""; - const components = contents.toString().split(''); - const content = tags[1].replace(/<.*$/, ""); - body += content; - } - return body; -} - -function tryGetValidCapture(matches: RegExpExecArray | null, matchIndex: number): Opt { - let captured: string; - if (!matches || !(captured = matches[matchIndex])) { - return undefined; - } - const lower = captured.toLowerCase(); - if (/to come/.test(lower)) { - return undefined; - } - if (lower.includes("xxx")) { - return undefined; - } - if (!captured.toLowerCase().replace(/[….\s]+/g, "").length) { - return undefined; - } - return captured; -} - -function capitalize(word: string): string { - const clean = word.trim(); - if (!clean.length) { - return word; - } - return word.charAt(0).toUpperCase() + word.slice(1); -} - -function analyze(path: string, body: string): AnalysisResult { - const device: any = {}; - - const segments = path.split("/"); - const filename = segments[segments.length - 1].replace("Bill_Notes_", ""); - - const errors: any = { filename }; - - for (const key of deviceKeys) { - const { exp, transformer, matchIndex } = RegexMap.get(key)!; - const matches = exp.exec(body); - - let captured = tryGetValidCapture(matches, matchIndex ?? 1); - if (!captured) { - errors[key] = `ERR__${key.toUpperCase()}__: outer match ${matches === null ? "wasn't" : "was"} captured.`; - continue; - } - - captured = captured.replace(/\s{2,}/g, " "); - if (transformer) { - const { error, transformed } = transformer(captured); - if (error) { - errors[key] = `__ERR__${key.toUpperCase()}__TRANSFORM__: ${error}`; - continue; - } - captured = transformed; - } - - device[key] = captured; - } - - const errorKeys = Object.keys(errors); - if (errorKeys.length > 1) { - console.log(red(`\n@ ${cyan(filename.toUpperCase())}...`)); - errorKeys.forEach(key => key !== "filename" && console.log(red(errors[key]))); - return { errors }; - } - - return { device }; -} - -async function parseFiles(): Promise { - const sourceDirectory = path.resolve(`${__dirname}/source`); - const candidates = readdirSync(sourceDirectory).filter(file => file.endsWith(".doc") || file.endsWith(".docx")).map(file => `${sourceDirectory}/${file}`); - const imported = await Promise.all(candidates.map(async path => ({ path, body: await wordToPlainText(path) }))); - // const imported = [{ path: candidates[10], body: await extract(candidates[10]) }]; - const data = imported.map(({ path, body }) => analyze(path, body)); - const masterDevices: DeviceDocument[] = []; - const masterErrors: any[] = []; - data.forEach(({ device, errors }) => { - if (device) { - masterDevices.push(device); - } else { - masterErrors.push(errors); - } - }); - const total = candidates.length; - if (masterDevices.length + masterErrors.length !== total) { - throw new Error(`Encountered a ${masterDevices.length} to ${masterErrors.length} mismatch in device / error split!`); - } - console.log(); - await writeOutputFile(successOut, masterDevices, total, true); - await writeOutputFile(failOut, masterErrors, total, false); - console.log(); - - return masterDevices; -} - -async function writeOutputFile(relativePath: string, data: any[], total: number, success: boolean) { - console.log(yellow(`Encountered ${data.length} ${success ? "valid" : "invalid"} documents out of ${total} candidates. Writing ${relativePath}...`)); - return new Promise((resolve, reject) => { - const destination = path.resolve(outDir, relativePath); - const contents = JSON.stringify(data, undefined, 4); - writeFile(destination, contents, err => err ? reject(err) : resolve()); - }); -} - -export async function main() { - if (!existsSync(outDir)) { - mkdirSync(outDir); - } - return parseFiles(); -} - -main(); \ No newline at end of file diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index dbf274e93..e590a5b85 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -4,7 +4,7 @@ import { exec } from 'child_process'; import { command_line } from "../ActionUtilities"; import RouteSubscriber from "../RouteSubscriber"; import { red } from "colors"; -import { main } from "../../scraping/buxton/node_scraper"; +import executeImport from "../../scraping/buxton/final/BuxtonImporter"; export default class UtilManager extends ApiManager { @@ -40,29 +40,29 @@ export default class UtilManager extends ApiManager { } }); - register({ - method: Method.GET, - subscription: "/buxton", - secureHandler: async ({ res }) => { - const cwd = './src/scraping/buxton'; + // register({ + // method: Method.GET, + // subscription: "/buxton", + // secureHandler: async ({ res }) => { + // const cwd = './src/scraping/buxton'; - const onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; - const onRejected = (err: any) => { console.error(err.message); res.send(err); }; - const tryPython3 = (reason: any) => { - console.log("Initial scraper failed for the following reason:"); - console.log(red(reason.Error)); - console.log("Falling back to python3..."); - return command_line('python3 scraper.py', cwd).then(onResolved, onRejected); - }; + // const onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; + // const onRejected = (err: any) => { console.error(err.message); res.send(err); }; + // const tryPython3 = (reason: any) => { + // console.log("Initial scraper failed for the following reason:"); + // console.log(red(reason.Error)); + // console.log("Falling back to python3..."); + // return command_line('python3 scraper.py', cwd).then(onResolved, onRejected); + // }; - return command_line('python scraper.py', cwd).then(onResolved, tryPython3); - }, - }); + // return command_line('python scraper.py', cwd).then(onResolved, tryPython3); + // }, + // }); register({ method: Method.GET, - subscription: "/newBuxton", - secureHandler: async ({ res }) => res.send(await main()) + subscription: "/buxton", + secureHandler: async ({ res }) => res.send(await executeImport()) }); register({ diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 71775bed6..896b88631 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -58,7 +58,7 @@ export class CurrentUserUtils { { title: "todo item", icon: "check", ignoreClick: true, drag: 'getCopy(this.dragFactory, true)', dragFactory: notes[notes.length - 1] }, { title: "web page", icon: "globe-asia", ignoreClick: true, drag: 'Docs.Create.WebDocument("https://en.wikipedia.org/wiki/Hedgehog", {_width: 300, _height: 300, title: "New Webpage" })' }, { title: "cat image", icon: "cat", ignoreClick: true, drag: 'Docs.Create.ImageDocument("https://upload.wikimedia.org/wikipedia/commons/thumb/3/3a/Cat03.jpg/1200px-Cat03.jpg", { _width: 200, title: "an image of a cat" })' }, - { title: "buxton", icon: "faObjectGroup", ignoreClick: true, drag: "Docs.Create.Buxton()" }, + { title: "buxton", icon: "cloud-upload-alt", ignoreClick: true, drag: "Docs.Create.Buxton()" }, { title: "record", icon: "microphone", ignoreClick: true, drag: `Docs.Create.AudioDocument("${nullAudio}", { _width: 200, title: "ready to record audio" })` }, { title: "clickable button", icon: "bolt", ignoreClick: true, drag: 'Docs.Create.ButtonDocument({ _width: 150, _height: 50, title: "Button" })' }, { title: "presentation", icon: "tv", click: 'openOnRight(Doc.UserDoc().curPresentation = getCopy(this.dragFactory, true))', drag: `Doc.UserDoc().curPresentation = getCopy(this.dragFactory,true)`, dragFactory: emptyPresentation }, -- cgit v1.2.3-70-g09d2 From 10ad4f200e888f98b53a40d35db758c1f4cd445f Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 8 Feb 2020 19:43:11 -0500 Subject: image upload in importer now functional --- src/scraping/buxton/final/BuxtonImporter.ts | 36 +- src/scraping/buxton/final/json/buxton.json | 409 ++++++++++++++++++ src/scraping/buxton/final/json/incomplete.json | 569 +++++++++++++++++++++++++ src/server/ApiManagers/UtilManager.ts | 20 - 4 files changed, 999 insertions(+), 35 deletions(-) create mode 100644 src/scraping/buxton/final/json/buxton.json create mode 100644 src/scraping/buxton/final/json/incomplete.json (limited to 'src/server/ApiManagers') diff --git a/src/scraping/buxton/final/BuxtonImporter.ts b/src/scraping/buxton/final/BuxtonImporter.ts index 804d65d74..9c98ec5b0 100644 --- a/src/scraping/buxton/final/BuxtonImporter.ts +++ b/src/scraping/buxton/final/BuxtonImporter.ts @@ -1,6 +1,6 @@ import { readdirSync, writeFile, mkdirSync, createWriteStream, createReadStream, unlinkSync } from "fs"; import * as path from "path"; -import { red, cyan, yellow, green } from "colors"; +import { red, cyan, yellow } from "colors"; import { Utils } from "../../../Utils"; import rimraf = require("rimraf"); const StreamZip = require('node-stream-zip'); @@ -199,7 +199,7 @@ async function extractFileContents(pathToDocument: string): Promise<{ body: stri }); }); }); - const images = (await writeImages(zip)).map(name => `http://localhost:1050/files/images/buxton/${name}`); + const images = await writeImages(zip); zip.close(); let body = ""; const components = contents.toString().split(' { const entryNames = Object.values(zip.entries()).map(({ name }) => name); const resolved: { mediaPath: string, ext: string }[] = []; - let initialWritePath: string; entryNames.forEach(name => { - const matches = /^word\/media\/\w+\.(jpeg|jpg|png|gif)/.exec(name); + const matches = /^word\/media\/\w+(\.jpeg|jpg|png|gif)/.exec(name); matches && resolved.push({ mediaPath: name, ext: matches[1] }); }); - return Promise.all(resolved.map(async ({ mediaPath, ext }) => { - const outName = `upload_${Utils.GenerateGuid()}.${ext}`; - const initialWrite = await new Promise((resolve, reject) => { + const outNames: string[] = []; + for (const { mediaPath, ext } of resolved) { + const outName = `upload_${Utils.GenerateGuid()}${ext}`; + const initialWritePath = path.resolve(imageDir, outName); + await new Promise((resolve, reject) => { zip.stream(mediaPath, (error: any, stream: any) => { if (error) { console.error(error); return reject(error); } - initialWritePath = `${imageDir}/${outName}`; const writeStream = createWriteStream(initialWritePath); - stream.on('end', () => resolve(outName)); + stream.on('end', resolve); stream.on('error', reject); stream.pipe(writeStream); }); @@ -242,16 +242,21 @@ async function writeImages(zip: any): Promise { })) ]; const { pngs, jpgs } = AcceptibleMedia; - if (pngs.includes(ext)) { + const lower = ext.toLowerCase(); + if (pngs.includes(lower)) { resizers.forEach(element => element.resizer = element.resizer.png()); - } else if (jpgs.includes(ext)) { + } else if (jpgs.includes(lower)) { resizers.forEach(element => element.resizer = element.resizer.jpeg()); + } else { + throw new Error(red(initialWritePath + " " + lower)); } for (const { resizer, suffix } of resizers) { await new Promise(resolve => { const filename = InjectSize(outName, suffix); - console.log(filename); - createReadStream(initialWritePath).pipe(resizer).pipe(createWriteStream(`${imageDir}/${filename}`)) + createReadStream(initialWritePath).pipe(resizer).on('error', error => { + console.log(red(error.message) + filename); + resolve(); + }).pipe(createWriteStream(path.resolve(imageDir, filename))) .on('close', resolve) .on('error', error => { console.log(red(error)); @@ -260,8 +265,9 @@ async function writeImages(zip: any): Promise { }); } unlinkSync(initialWritePath); - return initialWrite; - })); + outNames.push(`http://localhost:1050/files/images/buxton/${outName}`); + } + return outNames; } function analyze(pathToDocument: string, { body, images }: DocumentContents): AnalysisResult { diff --git a/src/scraping/buxton/final/json/buxton.json b/src/scraping/buxton/final/json/buxton.json new file mode 100644 index 000000000..35be6f623 --- /dev/null +++ b/src/scraping/buxton/final/json/buxton.json @@ -0,0 +1,409 @@ +[ + { + "title": "3Dconnexion CadMan 3D Motion Controller", + "company": "3Dconnexion", + "year": 2003, + "primaryKey": [ + "Joystick" + ], + "secondaryKey": [ + "Isometric", + "Joystick" + ], + "originalPrice": 399, + "degreesOfFreedom": 6, + "dimensions": { + "dim_length": 175, + "dim_width": 122, + "dim_height": 43, + "dim_unit": "mm" + }, + "shortDescription": "The CadMan is a 6 degree of freedom (DOF) joystick controller. It represented a significant step towards making this class of is controller affordable. It was mainly directed at 3D modelling and animation and was a “next generation” of the Magellan controller, which is also in the collection.", + "longDescription": "The CadMan is a 6 degree of freedom (DOF) joystick controller. It represented a significant step towards making this class of is controller more affordable. It was mainly directed at 3D modelling and animation and was a “next generation” of the Magellan/SpaceMouse controller, which is also in the collection. Like the Magellan, this is an isometric rate-control joystick. That is, it rests in a neutral central position, not sending and signal. When a force is applied to it, it emits a signal indicating the direction and strength of that force. This signal can then be mapped to a parameter of a selected object, such as a sphere, and – for example – cause that sphere to rotate for as long as, and as fast as, and in the direction determined by, the duration, force, and direction of the applied force. When released, it springs back to neutral position. Note that the force does not need to be directed along a single DOF. In fact, a core feature of the device is that one can simultaneously and independently apply force that asserts control over more than one DOF, and furthermore, vary those forces dynamically. As an aid to understanding, let me walk through some of the underlying concepts at play here by using a more familiar device: a computer mouse. If you move a mouse in a forward/backward direction, the mouse pointer on the screen moves between the screen’s top and bottom. If you think of the screen as a piece of graph paper, that corresponds to moving along the “Y” axis. That is one degree of freedom. On the other hand, you could move the mouse left and right, which causes the mouse to move between the left and right side of the screen. That would correspond to moving along the graph paper’s “X” axis – a second degree of freedom. Yet, you can also move the mouse diagonally. This is an example of independently controlling two degrees of freedom. Now imagine that if you lifted your mouse off your desktop, that your computer could dynamically sense its height as you did so. This would constitute a “flying mouse” (the literal translation of the German word for a “Bat”, which Canadian colleague, Colin Ware, applied to just such a mouse which he built in 1988). If you moved your Bat vertically up and down, perpendicular to the desktop, you would be controlling movement along the “Z” axis - a third degree of freedom. Having already seen that we can move a mouse diagonally, we have established that we need not be constrained to only moving along a single axis. That extends to the movement of our Bat and movement along the “Z” axis. We can control our hand movement in dependently in any or all directions in 3D space. But how does one reconcile the fact that we call the CadMan a “3D controller, and yet also describe it as having 6 degrees of freedom? After all, the example this far demonstrates that our Bat, as described thus far, has freedom on movement in 3 Dimensions. While true, we can extend our example to prove that that freedom to move in 3D is also highly constrained. To demonstrate this, move your hand in 3D space on and above your desktop. However, do so keeping your palm flat, parallel to the desktop with your fingers pointing directly forward. In so doing, you are still moving in 3D. Now, while moving, twist your wrist, while moving the hand, such that your palm is alternatively exposed to the left and right side. This constitutes rotation around the “Y” axis. A fourth DOF. Now add a waving motion to your hand, as if it were a paper airplane diving up and down, while also rocking left and right. But keep your fingers pointing forward. You have now added a fifth DOF, rotation around the “X” axis. Finally, add a twist to your wrist so that your fingers are no longer constrained to pointing forward. This is the sixth degree of freedom, rotation around the “Z” axis. Now don’t be fooled, this exercise could continue. We are not restricted to even six DOF. Imagine doing the above, but where the movement and rotations are measured relative to the Bat’s position and orientation, rather than to the holding/controlling hand, per se. One could imagine the Bat having a scroll wheel, like the one on most mice today. Furthermore, while flying your Bat around in 3D, that wheel could easily be rolled in either forward or backward, and thereby control the size of whatever was being controlled. Hence, with one hand we could assert simultaneous and independent control over 7 DOF in 3D space. This exercise has two intended take-aways. The first is a better working understanding between the notion of Degree of Freedom (DOF) and Dimension in space. Hopefully, the confusion frequently encountered when 3D and 6DOF are used in close context, can now be eliminated. Second, is that, with appropriate sensing, the human hand is capable of exercising control over far more degrees of freedom that six. And if we use the two hands together, the potential number of DOF that one can control goes even further. Finally, it is important to add one more take-away – one which both emerges from, and is frequently encountered when discussing, the previous two. That is, do not equate exercising simultaneous control over a high number of DOF with consciously doing the same number of different things all at once. The example that used to be thrown at me when I started talking about coordinated simultaneously bi-manual action went along the lines of, “Psychology tells us that we cannot do multiple things at once, for example, simultaneously tapping your head and rubbing your stomach. ”Well, first, I can tap my head with one hand while rubbing my stomach with the other. But that is not the point. The whole essence of skill – motor-sensory and cognitive – is “chunking” or task integration. When one appears to be doing many different things at once, if they are skilled, they are consciously doing only one thing. Playing a chord on the piano, for example, or skiing down the hill. Likewise, in flying your imaginary BAT in the previous exercise with the scroll wheel, were you doing 7 things at once, or one thing with 7 DOF? And if you had a Bat in each hand, does that mean you are now doing 14 things at once, or are you doing one thing with 14 DOF? Let me provide a different way of answering this question: if you have ever played air guitar, or “conducted” the orchestra that you are listening to on the radio, you are exercising control over more than 14 DOF. And you are doing exactly what I just said, “playing air guitar” or “conducting an orchestra”. One thing – at the conscious level, which is what matters – despite almost any one thing being able to be deconstructed into hundreds of sub-tasks. As I said the essence of skill: aggregation, or chunking. What is most important for both tool designers and users to be mindful of, is the overwhelming influence that our choice and design of tools impacts the degree to which such integration or chunking can take place. The degree to which the tool matches both the skills that we have already acquired through a lifetime of living in the everyday world, and the demands of the intended task, the more seamless that task can be performed, the more “natural” it will feel, and the less learning will be required. In my experience, it brought particular value when used bimanually, in combination with a mouse, where the preferred hand performed conventional pointing, selection and dragging tasks, while the non-preferred hand could manipulate the parameters of the thing being selected. First variation of the since the 2001 formation of 3Dconnextion. The CadMan came in 5 colours: smoke, orange, red, blue and green. See the notes for the LogiCad3D Magellan for more details on this class of device. It is the “parent” of the CadMan, and despite the change in company name, it comes from the same team.", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_65cb89ee-88a5-48d9-ac89-194a0ae41f98.jpeg" + ] + }, + { + "title": "Adesso ACK-540UB USB Mini-Touch Keyboard with Touchpad", + "company": "Adesso", + "year": 2005, + "primaryKey": [ + "Keyboard" + ], + "secondaryKey": [ + "Pad", + "Touch" + ], + "originalPrice": 59.95, + "degreesOfFreedom": 2, + "dimensions": { + "dim_length": 287, + "dim_width": 140, + "dim_height": 35.5, + "dim_unit": "mm" + }, + "shortDescription": "The Mini-Touch Keyboard is a surprisingly rare device: a laptop-style, small-footprint keyboard with a centrally mounted touch-pad. .", + "longDescription": "First released in 2003 with a PS/2 connector (ACK-540PW & ACK-540PB). USB version released in 2006 in either black (ACK-540UB) or white (ACK-540UW). Marketed under different brands, including SolidTek: http: //www. tigerdirect. com/applications/searchtools/item-details. asp? EdpNo=1472243https: //acecaddigital. com/index. php/products/keyboards/mini-keyboards/kb-540 Deltaco: https: //www. digitalimpuls. no/logitech/116652/deltaco-minitastatur-med-touchpad-usb", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_fdcdb560-03d7-4d96-8653-eb7497a5a6be.jpeg", + "http://localhost:1050/files/images/buxton/upload_22af6f24-1bd7-497c-a111-e3536790ab31.jpeg", + "http://localhost:1050/files/images/buxton/upload_d942b775-5c7f-4899-8eae-cb8f956df846.jpeg", + "http://localhost:1050/files/images/buxton/upload_94d78d8e-ff44-42a2-93d3-2c6ddf395783.jpeg", + "http://localhost:1050/files/images/buxton/upload_5e9b5174-e6ba-4182-a8c9-fd1081e635e8.jpeg" + ] + }, + { + "title": "Braun AG T3 Transistor Radio", + "company": "Braun AG", + "year": 1958, + "primaryKey": [ + "Radio" + ], + "secondaryKey": [ + "Handheld", + "Object", + "Reference" + ], + "originalPrice": 28.57, + "degreesOfFreedom": 2, + "dimensions": { + "dim_length": 152, + "dim_width": 41, + "dim_height": 83, + "dim_unit": "mm" + }, + "shortDescription": "The 1958 Braun T3 transistor radio, designed by Dieter Rams Dieter Rams in conjunction with the Ulm Hochschüle fur Gestaltung (School of Design). An excellent example of the international style of design of the mid-20th century, the T3 radio was the inspiration for the design language of the Apple iPod Classic.", + "longDescription": "The 1958 Braun T3 transistor radio is a classic of the international design style prevalent in the mid-20th century. By its sparse clean lines, it shares characteristics of the style seen in another familiar example, the font Helvetic, which was designed the previous year. The T3 was designed by Dieter Rams, recruited by Braun in 1955, in collaboration with the Ulm Hochschüle fur Gestaltun. . Its design language had a strong influence on that of the original Apple iPod Classic. The connection is made more obvious if one views the radio rotated 90° clockwise, as in one of the accompanying photographs. Here one can easily see the the similarity of proportions, uniformity of colour, angle of corners, location of display (audio versus visual), and the use of a flush rotary wheel controller.", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_c405d7d8-4e1f-4273-9b25-d2c9fede8ca4.jpeg", + "http://localhost:1050/files/images/buxton/upload_88a638bd-f1b8-4b27-b004-29595e756f99.jpeg", + "http://localhost:1050/files/images/buxton/upload_01bbb598-5ddd-49d3-9844-2bdc4a1ce3c0.jpeg", + "http://localhost:1050/files/images/buxton/upload_4c362bd9-0ad2-463b-b3e8-363eb84f6975.jpeg", + "http://localhost:1050/files/images/buxton/upload_6e5ba351-f26a-4926-9e2b-d1673e990cf2.jpeg", + "http://localhost:1050/files/images/buxton/upload_c073327a-cbcf-40f1-96f8-cd1733e4da97.jpeg", + "http://localhost:1050/files/images/buxton/upload_0133a88b-bb20-44b1-9f5f-3825c928fa4a.jpeg", + "http://localhost:1050/files/images/buxton/upload_609a8e9f-d3cb-4f06-929c-c0fc105b4d15.jpeg", + "http://localhost:1050/files/images/buxton/upload_0b06ad42-6cd3-460d-9f85-019dfc4834e8.jpeg", + "http://localhost:1050/files/images/buxton/upload_339e2c3d-2422-485b-a75f-e2f91409616f.jpeg", + "http://localhost:1050/files/images/buxton/upload_ff5bcbf8-cebd-4e54-a957-133b39e30ed9.jpeg" + ] + }, + { + "title": "Casio CZ-101 Digital Synthesizer", + "company": "Casio", + "year": 1984, + "primaryKey": [ + "Synthesizer" + ], + "secondaryKey": [ + "Chord", + "Keyboard", + "Object", + "Reference", + "Wheel" + ], + "originalPrice": 499, + "degreesOfFreedom": 1, + "dimensions": { + "dim_length": 20, + "dim_width": 65.7, + "dim_height": 58, + "dim_unit": "mm" + }, + "shortDescription": "One of the first programable polyphonic (8 simultaneous voices) digital synthesizers for less than $500. 00. Used a form of digital synthesis known as Phase Distortion to obtain a rich variety of dynamic timbres. Could be used with batteries or plugged in to power. This one was given to me at the product launch.", + "longDescription": "One of the first programable polyphonic (8 simultaneous voices) digital synthesizers for less than $500. 00. Used a form of digital synthesis known as Phase Distortion to obtain a rich variety of dynamic timbres. Could be used with batteries or plugged in to power. This one was given to me at the product launch. The inclusion of this synthesizer in the collection is as a small reminder of the diversity of keyboard types, and especially, as an example to shed light on chord keyboards. In entering text, for example, chord keyboards are those where more than one key must be simultaneously pressed to enter a single character. Technically, this includes any keyboard with a SHIFT key. Interestingly, piano-type like keyboards like that on the Casio-CZ-101 probably don’t conform to this definition of chording, despite its ability to play musical chords. On the other hand, flutes and trumpets definitely do fall within the definition. Why? With piano-like keyboards, each unique note has a single unique key dedicated to it. When one plays a chord, i. e. , simultaneously presses multiple keys, the result is a chord of notes – the note associated with each depressed key sounds. On the other hand, with trumpet valves or flute keys, only one note is produced at a time. It is the combination of keys pressed (coupled with breath) which determines the pitch of that single note. This is far closer to entering text with a chord keyboard, where each chord enters a single unique character.", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_071a80d0-10e7-4c88-9b98-06c8f00cd651.jpeg", + "http://localhost:1050/files/images/buxton/upload_b30deca6-f704-4a2e-a211-a357b7a8cd7f.jpeg", + "http://localhost:1050/files/images/buxton/upload_7170825e-2362-41d6-8793-9da98c4377fb.jpeg", + "http://localhost:1050/files/images/buxton/upload_c81c1438-bc91-4278-8376-0a6daf152bdc.jpeg", + "http://localhost:1050/files/images/buxton/upload_4b6cd630-d985-4851-9a5b-ae8321684cd1.jpeg", + "http://localhost:1050/files/images/buxton/upload_2de8214f-1b39-425f-bf48-a26914028956.jpeg", + "http://localhost:1050/files/images/buxton/upload_2956e41d-3b23-455d-90cc-5fdffabfb40c.jpeg" + ] + }, + { + "title": "Contour Design UniTrap ", + "company": "Contour Design", + "year": 1999, + "primaryKey": [ + "Re-skin" + ], + "secondaryKey": [ + "Mouse" + ], + "originalPrice": 14.99, + "degreesOfFreedom": 2, + "dimensions": { + "dim_length": 130.5, + "dim_width": 75.7, + "dim_height": 43, + "dim_unit": "mm" + }, + "shortDescription": "This is a plastic shell within which the round Apple iMac G3 “Hockey Puck” mouse can be fit. While the G3 Mouse worked well mechanically, when gripped its round shape gave few cues as to its orientation. Hence, if you moved your hand up, the screen pointer may well have moved diagonally. By reskinning it with the inexpensive Contour UniTrap, the problem went away without the need to buy a whole new mouse.", + "longDescription": "Also add back pointers from devices re-skinned", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_95ffc881-bd56-4234-9a91-6b354e5a8811.jpeg", + "http://localhost:1050/files/images/buxton/upload_4539a934-9769-4138-8956-9f987693eada.jpeg", + "http://localhost:1050/files/images/buxton/upload_d3a3623d-5084-45c5-baf6-83580e45f8f9.jpeg", + "http://localhost:1050/files/images/buxton/upload_fdba55ec-2b46-433f-ad27-899e6774ed30.jpeg", + "http://localhost:1050/files/images/buxton/upload_53ef39dd-0cd2-4a61-8269-753a85fff568.jpeg", + "http://localhost:1050/files/images/buxton/upload_b57d9249-95ea-4b9b-a7e5-a6aab2dfd474.jpeg", + "http://localhost:1050/files/images/buxton/upload_09ae658a-e8b6-4e37-b83b-af6264d6b926.jpeg", + "http://localhost:1050/files/images/buxton/upload_9a067bf4-0c21-4dce-a149-9f3224c8a8b3.jpeg", + "http://localhost:1050/files/images/buxton/upload_dbacb2b3-eeb4-45bf-b72d-73ac36620a60.jpeg", + "http://localhost:1050/files/images/buxton/upload_241b31c3-1e11-4303-a445-44e500b3126f.jpeg", + "http://localhost:1050/files/images/buxton/upload_193ed17e-8291-4363-8f44-55b2a5f812af.jpeg", + "http://localhost:1050/files/images/buxton/upload_20f73424-c824-4eb0-b555-655dd718db7d.jpeg", + "http://localhost:1050/files/images/buxton/upload_8fc9b3b4-ce66-47b9-b94d-3ed14f8e9751.jpeg", + "http://localhost:1050/files/images/buxton/upload_ffaedf7e-4189-4f70-bd5e-1a08ba2ea8ba.jpeg" + ] + }, + { + "title": "Depraz Swiss Mouse", + "company": "Depraz", + "year": 1980, + "primaryKey": [ + "Mouse" + ], + "secondaryKey": [ + "Ball", + "Chord", + "Keyboard", + "Mouse" + ], + "originalPrice": 295, + "degreesOfFreedom": 2, + "dimensions": { + "dim_length": 50.8, + "dim_width": 76.2, + "dim_height": 114.3, + "dim_unit": "mm" + }, + "shortDescription": "This mouse is one of the first commercially available mice to be sold publicly. It is known as the Swiss mouse, and yes, the roller mechanism was designed by a Swiss watchmaker. Coincidentally, the company that made it, Depraz, is based in Apples, Switzerland. Their success in selling this mouse is what caused Logitech to switch from a software development shop to one of the world’s leading suppliers of mice and other input devices.", + "longDescription": "DePraz began manufacturing in 1980, but following design built in 1979. Logitech started selling it in 1982. It was one of the first mass produced mice, one of the first available ball mice, as well as to have an optical shaft encoder – thereby improving linearity. An interesting fact, given its Swiss heritage, is that its designer, André Guignard, was trained as a Swiss watch maker. Unlike most modern mice, the DePraz, or “Swiss” mouse had a quasi-hemispherical shape. Hence, it was held in a so-called “power-grip”, much as one would grip a horizontally held ball – the thumb and small finger applying pressure on each side, with added support from the weight/friction of the palm on the back of the mouse. In this posture, the three middle fingers naturally positioning themselves over the three buttons mounted at the lower edge of the front. Largely freed of grip pressure, by grace of thumb and little finger, the middle fingers had essentially freedom of motion to independently operate the buttons. Each having a dedicated finger, the buttons could be easily pushed independently or in any combination. Like the three valves on a trumpet, this ability to “chord” extended the three physical buttons to have the power of seven. The down-side of this “turtle shell” form factor is that it placed the hand in a posture in which mouse movement relied more of the larger muscle groups of the arm to wrist, rather than wrist to fingers – the latter being the approach taken in most subsequent mice. The original Swiss Mouse was developed at École Polytechnique Fédérale de Lausanne by a project led by Jean-Daniel Nicoud, who was also responsible for the development of its optical shaft encoder. To augment their revenue stream, Logitech, then a software and hardware consulting company for the publishing industry, acquired marketing rights for North America. Mouse revenue quickly overshadowed that from software. In 1983, Logitech acquired DePraz, named the Swiss Mouse the “P4”, and grew to become one of the largest input device manufacturer in the world. One curious coincidence is that they were founded in the town of Apples, Switzerland.", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_a4e54d14-93b4-4e7e-8ca3-404679b3610d.jpeg", + "http://localhost:1050/files/images/buxton/upload_2c399205-2e53-4601-a2f4-8f30ae8645c0.jpeg", + "http://localhost:1050/files/images/buxton/upload_979acbde-8d24-43c7-a070-2911f9fe625a.jpeg", + "http://localhost:1050/files/images/buxton/upload_ed8a92e2-cd5f-4b66-959f-57ecd67043fd.jpeg", + "http://localhost:1050/files/images/buxton/upload_7e2fbd60-1fe3-4ed4-9db8-ff377591abab.jpeg", + "http://localhost:1050/files/images/buxton/upload_568a763c-6d2e-49a9-b1fe-dbbac19ca1c4.jpeg", + "http://localhost:1050/files/images/buxton/upload_7ae37170-c881-407e-9f50-e4b04ed6444b.jpeg", + "http://localhost:1050/files/images/buxton/upload_b7318579-d4d6-4a79-b335-f1836bf6dd78.jpeg", + "http://localhost:1050/files/images/buxton/upload_10c77ad9-a215-465e-b413-25aa7d903924.jpeg", + "http://localhost:1050/files/images/buxton/upload_89acc088-062c-4469-8781-fbceb9e8485a.jpeg", + "http://localhost:1050/files/images/buxton/upload_92fea7aa-b629-450b-bd79-5ff0bbea33cd.jpeg" + ] + }, + { + "title": "FingerWorks TouchStream LP", + "company": "FingerWorks", + "year": 2002, + "primaryKey": [ + "Keyboard" + ], + "secondaryKey": [ + "Foldable", + "Gesture", + "Keyboard", + "Multi-touch", + "Reskin", + "Touchpad" + ], + "originalPrice": 339, + "degreesOfFreedom": 2, + "dimensions": { + "dim_length": 180, + "dim_width": 140, + "dim_height": 9, + "dim_unit": "mm" + }, + "shortDescription": "The TouchStream is a keyboard based on a pair of multi-touch pads. These can sense key taps and finger gestures. The “keys” are graphic. They are flush with the pad and have no mechanical movement. There are however, small raised points to help position the hands on the keyboard eyes-free typing, but these still allow the fingers slide easily on the surface when gesturing, such as when emulating a mouse. . The keyboard is independent of the base. It can be folded in half for compact portability. It can also be placed conveniently over a laptop’s keyboard as a replacement which then also enables the gesture enhancements to be used on the road. Although not obvious to the eye, this is the core technology which, after being acquired by Apple, evolved into the iPhone’s multi-touch capability.", + "longDescription": "Named FingerBoard during development, this product was relabeled TouchStream in October 2001 as the release date approached. when finally shipped, was renamed TouchStreamThe very rare original stand for this device was a gift from Sean Gerety, Atlanta, GA.", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_27c393d7-ab12-4684-b5ac-3ff810565c80.jpeg", + "http://localhost:1050/files/images/buxton/upload_4137999f-13bf-4106-83cf-bf68901b74fe.jpeg", + "http://localhost:1050/files/images/buxton/upload_1bea7829-9a36-4446-acc3-5d6e5c3e89da.jpeg", + "http://localhost:1050/files/images/buxton/upload_dc79cd33-92d7-4efa-b15c-8a78b4dc805b.jpeg", + "http://localhost:1050/files/images/buxton/upload_dbd7e531-c4c1-44f8-93eb-54d06cf59e02.jpeg", + "http://localhost:1050/files/images/buxton/upload_7f467bf9-c508-4879-b115-756a9649755c.jpeg", + "http://localhost:1050/files/images/buxton/upload_faac6f26-8082-4f55-b98e-7e7d91f64ea0.jpeg", + "http://localhost:1050/files/images/buxton/upload_1339552a-d9ce-4690-945c-e357f1bf52f4.jpeg", + "http://localhost:1050/files/images/buxton/upload_1b8dff2d-2b84-4748-8b12-5102f9e3fa32.jpeg", + "http://localhost:1050/files/images/buxton/upload_d4e40dcf-a7fe-47c2-a7c8-9f9c74c60047.jpeg", + "http://localhost:1050/files/images/buxton/upload_7cac0f8d-e2af-445f-95d4-b18e28da44c2.jpeg", + "http://localhost:1050/files/images/buxton/upload_b7f1da77-e56a-406d-af43-53fe54ff71ee.jpeg", + "http://localhost:1050/files/images/buxton/upload_6e568712-77d3-4a50-8fdb-9cd997b0b881.jpeg" + ] + }, + { + "title": "One Laptop Per Child (OLPC) XO-1", + "company": "One Laptop Per Child (OLPC)", + "year": 2007, + "primaryKey": [ + "Computer" + ], + "secondaryKey": [ + "Keyboard", + "Laptop", + "Pad", + "Slate", + "Touch" + ], + "originalPrice": 199, + "degreesOfFreedom": 2, + "dimensions": { + "dim_length": 242, + "dim_width": 228, + "dim_height": 30, + "dim_unit": "mm" + }, + "shortDescription": "The OLPC XO-1 is very innovative device that nevertheless raises serious issues about technology and social responsibility. It is included in the collection primarily as a warning against technological hubris, and the fact that no technologies are neutral from a social-cultural perspective.", + "longDescription": "IntroductionI have this computer in my collection as a reminder of the delicate relationship between object and purpose, and how no matter how well one does on the former, it will likely have no impact on making a wanting concept achieve the stated (and even valid) purpose any better. I include it in the collection as a cautionary tale of how the object may help sell a concept, regardless how ill-conceived – even to those who should know better, had they applied the most basic critical thinking. For consumers, investors and designers, its story serves as a cautionary reminder to the importance of cultivating and retaining a critical mind and questioning perspective, regardless of how intrinsically seductive or well-intentioned a technology may be. From the perspective of hardware and software, what the One Laptop Per Child (OLPC) project was able to accomplish is impressive. In general, the team delivered a computer that could be produced at a remarkably low price – even if about double that which was targeted. Specifically, the display, for example, is innovative, and stands out due to its ability to work both in the bright sun (reflective) as well as in poorly lit spaces (emissive) – something that goes beyond pretty much anything else that is available on today’s (2017) slate computers or e-readers. In short, some excellent work went into this machine, something that is even more impressive, given the nature of the organization from which it emerged. The industrial design was equally impressive. Undertaken by Yves Behar’s FuseprojectUltimately, however, the machine was a means to an end, not the end itself. Rather than a device, the actual mission of the OLPC project was: … to empower the world's poorest children through education. Yet, as described by in their materials, the computer was intended to play a key role in this: With access to this type of tool [the computer], children are engaged in their own education, and learn, share, and create together. They become connected to each other, to the world and to a brighter future. Hence, making a suitable computer suitable to that purpose and the conditions where it would be used, at a price point that would enable broad distribution, was a key part of the project. The Underlying Belief System of the OLPC ProjectSince they are key to the thinking behind the OLPC project, I believe if fair to frame my discussion around the following four questions: Will giving computers to kids in the developing world improve their education? Will having a thus better-educated youth help bring a society out of poverty? Can that educational improvement be accomplished by giving the computers to the kids, with no special training for teachers? Should this be attempted on a global scale without any advance field trials or pilot studies? From the perspective of the OLPC project, the answer to every one of these questions is an unequivocal “yes”. In fact, as we shall see, any suggestion to the contrary is typically answered by condescension and/or mockery. The answers appear to be viewed as self-evident and not worth even questioning. Those who have not subscribed to this doctrine might call such a viewpoint hubris. What staggers me is how the project got so far without the basic assumptions being more broadly questioned, much less such questions being seriously addressed by the proponents. How did seemingly otherwise people commit to the project, through their labour or financial investment, given the apparently naïve and utopian approach that it took? Does the desire to do good cloud judgment that much? Are we that dazzled by a cool technology or big hairy audacious goal? Or by a charismatic personality? To explain my concern, and what this artifact represents to me, let me just touch on the four assumptions on which the project was founded. Will giving computers to kids in the developing world improve education? The literature on this question is, at best, mixed. What is clear is that one cannot make any assumption that such improvements will occur, regardless of whether one is talking about the developing world or suburban USA. For example, in January 2011, The World Bank published the following study: Can Computers Help Students Learn? From Evidence to Policy, January 2011, Number 4, The World Bank. A public-private partnership in Colombia, called Computers for Education, was created in 2002 to increase the availability of computers in public schools for use in education. Since starting, the program has installed more than 73, 000 computers in over 6, 300 public schools in more than 1, 000 municipalities. By 2008, over 2 million students and 83, 000 teachers had taken part. This document reports on a two-year study to determine the impact of the program on student performance. Students in schools that received the computers and teacher training did not do measurably better on tests than students in the control group. Nor was there a positive effect on other measures of learning. Researchers did not find any difference in test scores when they looked at specific components of math and language studies, such as algebra and geometry, and grammar and paraphrase ability in Spanish. But report also notes that results of such studies are mixed: Studies on the relationship between using computers in the classroom and improved test scores in developing countries give mixed results: A review of Israel’s Tomorrow-98 program in the mid-1990s, which put computers in schools across the country, did not find any impact on math and Hebrew language scores. But in India, a study of a computer-assisted learning program showed a significant positive impact on math scores. One thing researchers agree on, more work is needed in this field. Before moving on, a search of the literature will show that these results are consistent with those that were available in the literature at the time that the project was started. The point that I am making is not that the OLPC project could not be made to work; rather, that it was wrong to assume that it would do so without spending at least as much time designing the process to bring that about, as was expended designing the computer itself. Risk is fine, and something that can be mitigated. But diving in under the assumption that it would just work is not calculated risk, it is gambling - with other people’s lives, education and money. Will a better educated population help bring a society out of poverty? I am largely going to punt on this question. The fact is, I would be hard pressed to argue against education. But let us grant that improving education in the developing world is a good thing. The appropriate question is: is the approach of the OLPC project a reasonable or responsible way to disburse the limited resources that are available to address the educational challenges of the developing world? At the very least, I would suggest that this is a topic worthy of debate. An a priori assumption that giving computers is the right solution is akin to the, “If you build it they will come” approach seen in the movie, Field of Dreams. The problem here is that this is not a movie. There are real lives and futures that are at stake here – lives of those who cannot afford to see the movie, much less have precious resources spent on projects that are not well thought through. Can that improvement be accomplished by just giving the computers to the kids without training teachers? Remarkably, the OLPC Project’s answer is an explicit, “Yes”. In a TED talk filmed in December 2007, the founder of the OLPC initiative, Nicholas Negroponte states: “When people tell me, you know, who’s going to teach the teachers to teach the kids, I say to myself, “What planet do you come from? ” Okay, there’s not a person in this room [the TED Conference], I don’t care how techy you are, there’s not a person in this room that doesn’t give their laptop or cell phone to a kid to help them debug it. Okay, we all need help, even those of us who are very seasoned. ”Let us leave aside the naïvete of this statement stemming from the lack of distinction between ability to use applications and devices versus the ability to create and shape them. A failure of logic remains in that those unseasoned kids are part of “us”, as in “we all need help”. Where do the kids go for help? To other kids? What if they don’t know? Often they won’t. After all, the question may well have to do with a concept in calculus, rather than how to use the computer. What then? No answer is offered. Rather, those who dare raise the serious and legitimate concerns regarding teacher preparation are mockingly dismissed as coming from another planet! Well, perhaps they are. But in that case, there should at least be some debate as to who lives on which planet. Is it the people raising the question or the one dismissing the concern that lives in the real world of responsible thought and action? Can this all be accomplished without any advance field trials? Should one just immediately commit to international deployment of the program? As recently as September 2009, Negroponte took part in a panel discussion where he spoke on this matter. He states: I'd like you to imagine that I told you \"I have a technology that is going to change the quality of life. \" And then I tell you \"Really the right thing to do is to set up a pilot project to test my technology. And then the second thing to do is, once the pilot has been running for some period of time, is to go and measure very carefully the benefits of that technology. \"And then I am to tell you that what we are going to is very scientifically evaluate this technology, with control groups - giving it to some, giving it to others. And this all is very reasonable until I tell you the technology is electricity. And you say \"Wait, you don't have to do that!\"But you don't have to do that with laptops and learning either. And the fact that somebody in the room would say the impact is unclear is to me amazing - unbelievably amazing. There's not a person in this room who hasn't bought a laptop for their child, if they could afford it. And you don't know somebody who hasn't done it, if they can afford it. So there's only one question on the table and that's, “How to afford it? ” That's the only question. There is no other question - it's just the economics. And so, when One Laptop Per Child started, I didn't have the picture quite as clear as that, but we did focus on trying to get the price down. We did focus on those things. Unfortunately, Negroponte demonstrates his lack of understanding of both the history of electricity and education in this example. His historical mistake is this: yes, it was pretty obvious that electricity could bring many benefits to society. But what happened when Edison did exactly what Negroponte advocates? He almost lost his company due to his complete (but mistaken) conviction that DC, rather the AC was the correct technology to pursue. As with electricity, yes, it is rather obvious that education could bring significant benefits to the developing world. But in order to avoid making the same kind of expensive mistake that Edison did, perhaps one might want to do one’s best to make sure that the chosen technology is the AC, rather than DC, of education. A little more research, and a little less hubris might have put the investments in Edison and the OLPC to much better use. But the larger question is this: in what way is it responsible for the wealthy western world to advocate an untested and expensive (in every sense) technological solution on the poorest nations in the world? If history has taught us anything, it has taught us that just because our intentions are good, the same is not necessarily true for consequences of our actions. Later in his presentation, Negroponte states: … our problems are swimming against very naïve views of education. With this, I have to agree. It is just whose views on education are naïve, and how can such views emerge from MIT, no less, much less pass with so little critical scrutiny by the public, the press, participants, and funders? In an interview with Paul Marks, published in the New Scientist in December 2008, we see the how the techno-centric aspect of the project plays into the ostensible human centric purpose of the project. Negroponte’s retort regarding some of the initial skepticism that the project provoked was this: “When we first said we could build a laptop for $100 it was viewed as unrealistic and so 'anti-market' and so 'anti' the current laptops which at the time were around $1000 each, \" Negroponte said. \"It was viewed as pure bravado - but look what happened: the netbook market has developed in our wake. \" The project's demands for cheaper components such as keyboards, and processors nudged the industry into finding ways to cut costs, he says. \"What started off as a revolution became a culture. \"Surprise, yes, computers get smaller, faster, and cheaper over the course of time, and yes, one can even grant that the OLPC project may have accelerated that inevitable move. And, I have already stated my admiration and respect for the quality of the technology that was developed. But in the context of the overall objectives of the project, the best that one can say is, “Congratulations on meeting a milestone. ” However, by the same token, one might also legitimately question if starting with the hardware was not an instance of putting the cart before the horse. Yes, it is obviously necessary to have portable computers in the first place, before one can introduce them into the classroom, home, and donate them to children in the developing world. But it is also the case that small portable computers were already in existence and at the time that the project was initiated. While a factor of ten more expensive than the eventual target price, they were both available and adequate to support limited preliminary testing of the underlying premises of the project in an affordable manner. That is, before launching into a major - albeit well-intentioned – hardware development project, it may have been prudent to have tested the underlying premises of its motivation. Here we have to return to the raison d’être of the initiative: … to empower the world's poorest children through educationHence, the extent to which this is achieved from a given investment must be the primary metric of success, as well as the driving force of the project. Yet, that is clearly not what happened. Driven by a blind Edisonian belief in their un-tested premise, the project’s investments were overwhelmingly on the side of technology rather than pedagogy. Perhaps the nature and extent of the naïve (but well-meaning) utopian dream underlying the project is captured in the last part of the interview, above: Negroponte believes that empowering children and their parents with the educational resources offered by computers and the Internet will lead to informed decisions that improve democracy. Indeed, it has led to some gentle ribbing between himself and his brother: John Negroponte - currently deputy secretary of state in the outgoing Bush administration and the first ever director of national intelligence at the National Security Agency. \"I often joke with John that he can bring democracy his way - and I'll bring it mine, \" he says. Apparently providing inexpensive laptops to children in the developing world is not only going to raise educational standards, eradicate poverty, it is also going to bring democracy! All that, with no mention of the numerous poor non-democratic countries that have literacy levels equal to or higher than the USA (Cuba might be one reasonable example). The words naïve technological-utopianism come to mind. I began by admitting that I was conflicted in terms of this project. From the purely technological perspective, there is much to admire in the project’s accomplishments. Sadly, that was not the project’s primary objective. What appears to be missing throughout is an inability to distinguish between the technology and the purpose to which is was intended to serve. My concern in this regard is reflected in a paper by Warschauer & Ames(2010). The analysis reveals that provision of individual laptops is a utopian vision for the children in the poorest countries, whose educational and social futures could be more effectively improved if the same investments were instead made on more sustainable and proven interventions. Middle- and high-income countries may have a stronger rationale for providing individual laptops to children, but will still want to eschew OLPC’s technocentric vision. In summary, OLPC represents the latest in a long line of technologically utopian development schemes that have unsuccessfully attempted to solve complex social problems with overly simplistic solutions. There is a delicate relationship between technology and society, culture, ethics, and values. What this case study reflects is the fact that technologies are not neutral. They never are. Hence, technological initiatives must be accompanied by appropriate social, cultural and ethical considerations – especially in projects such as this where the technologies are being introduced into particularly vulnerable societies. That did not happen here, The fact that this project got the support that it did, and has gone as far as it has, given the way it was approached, is why this reminder – in the form of this device – is included in the collection. And if anyone ever wonders why I am so vocal about the need for public discourse around technology, one need look no further than the OLPC project.", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_df83dd41-0787-4729-b0f8-1feb5e0c999c.jpeg", + "http://localhost:1050/files/images/buxton/upload_7cf46050-92bb-467d-b10b-710ead4598fb.jpeg", + "http://localhost:1050/files/images/buxton/upload_518436fb-59bf-46ec-ad39-8bd6d7aa80a1.jpeg", + "http://localhost:1050/files/images/buxton/upload_14579515-1051-4f38-a816-128f3e5a2e33.jpeg", + "http://localhost:1050/files/images/buxton/upload_ffb0bfe5-936c-4c61-93f2-a9959f1d64e5.jpeg", + "http://localhost:1050/files/images/buxton/upload_6078b1e5-a996-494c-85ba-f8b9d07c97aa.jpeg", + "http://localhost:1050/files/images/buxton/upload_352ed224-547a-49c9-ad3a-2e7826ad20fa.jpeg", + "http://localhost:1050/files/images/buxton/upload_f3ca3ef5-61ca-4c77-9d9c-fed6b91b5f85.jpeg", + "http://localhost:1050/files/images/buxton/upload_e0391087-61a4-415a-84f3-15f38856d29c.jpeg", + "http://localhost:1050/files/images/buxton/upload_8980a470-05bc-4350-a357-ceb049b64b2f.jpeg", + "http://localhost:1050/files/images/buxton/upload_1b63c6c5-eb8e-4ef7-b2d2-55dca9d1a721.jpeg" + ] + }, + { + "title": "Blue Orb Inc. OrbiTouch", + "company": "Blue Orb Inc", + "year": 2002, + "primaryKey": [ + "Joystick" + ], + "secondaryKey": [ + "Keyboard" + ], + "originalPrice": 695, + "degreesOfFreedom": 4, + "dimensions": { + "dim_length": 482.6, + "dim_width": 228.6, + "dim_height": 74.2, + "dim_unit": "mm" + }, + "shortDescription": "On the one hand, this device has the overall footprint of a keyboard, and it is used to enter text. And yet, it is two wide, flat, spring-loaded, self-returning joysticks, which are used to enter characters, rather than the keys that we typically employ. To add to the unconventional nature of this device, one enters text via these two joysticks by means of something called radial menus, one for each hand. And, in keeping with many keyboards, such as those with an integrated touch pad, the OrbiTouch also enables mouse like capabilities, such as pointing and selecting, also by means of one of the joysticks.", + "longDescription": "Keyboards, Joysticks and Hierarchic Radial MenusIntroductionWhen you first look at this device, you might guess that it is some kind of keyboard. It even says so on the box and on the device itself. The keyboard-like footprint might reinforce this notion, as might the alphanumeric characters in the grey ring around the circular orb on the right-hand. On the other hand, if this is a keyboard, where are the keys? Reading the labels more carefully sheds light on the paradox: there are none. This is a “keyless keyboard. ” Yes, this is a contradiction in terms. But it is just such curiosities that make devices like this potentially interesting. Hence, we shall take a reasonably deep dive to see what might be revealed. Let’s start by trying to understand what the rationale was for landing on this particular design. The orbiTouch was developed by an industrial engineering doctoral student at the University of Central Florida, Peter McAlindon. His goal was to develop a means of text entry that minimized hand and wrist motion. The intent was to reduce the incidence of repetitive stress injury. A fair bit of research was undertaken between initial concept and commercial release. This can be accessed online, and doing so is a worthwhile exercise. Let us now turn our eye to the physical device in order to get a sense of where all of this landed. The Physical DeviceThe orbiTouch is dominated by two large circular “orbs. ” To my eye, their form initially practically screamed out, “I am a rotary control - Turn me!” However, appearances can be deceptive. Rather than dials, the orbs turn out to be a pair of a joysticks of a particular type. Rather than the stick-tilting motion typical of most, these “joysticks” are operated by moving them along the horizontal plane. In this they are a close cousins of the Altra Felix and KA Design Turbo Puck, both also in the collection. However, in contrast with the Felix and Turbo Puck, whose handles are “floating” (if you let go, they remain in the position where you released your grip), the orbs are “self-centering. ” That is, when released, internal springs return the orbs to their neutral central “home” position. In this, they behave much like the Gravis joystick in the collection, for example. At a finer level of detail, the orbs are specific class of joystick: “8-way joy-switches”. The term”8-way” indicates that only movement along the 8 main axes of the compass are sensed. As to the word “switch”, think of each orb as 8 switches, any one of which can be turned on by moving the orb in one of the 8 directions. (Conversely, they are turned off when the orb is released and returns to home position). Unlike an analogue joystick, such switches do not, and cannot, report how far or fast the orb has moved in any particular direction, nor how much pressure might be applied in the process. While limited, joy-switches provide a less complex and lower cost solution that are appropriate in situations where this additional data is not needed. There are several examples of joy-switches in the collection, especially video game controllers. One of the most iconic examples is the Atari CX-40 controller, which is a 4-way joy-switch. To recap, the orbiTouch is a bi-manual device for entering text by means of two orb-shaped planer-moving 8-way self-centering joy-switches. Having swallowed that mouth-full, let us now explore how text is entered using such a transducer. Entering TextIn general, a character or function is input by moving the two orbs. Which character or function depends on the direction (if any) each of the orbs has moved. For example, if both the left and right orb move west (left), the character “a” is entered. On the other hand, if the right orb again moves west, but the left one east (right), then the character input is “e”. How or why this is the case can be explained with the help of some images. For easier reading, the figure below shows the labels around the orbs in an exploded view. Notice that for both orbs, there is a label segment for each of its 8 directions. Since the example discussed entering an “a” and an “e”, each of which involved the right orb moving west (left) let’s look at the associated label segment in even more detail. Like all of the label segments for the right orb, this one consists of six areas containing text, each with a distinct background colour: red, yellow, green, orange and blue for the letters A through E, respectively, and black for the region containing “BACKSPACE”. Now look again at previous image and notice that each of these colours matches the label associated with one of the directions of the left orb. Text is entered using a two part process. Moving the right orb to the left/west specifies that you are going to enter one of: a, b, c, d, e, or BACKSPACE. (Like most keyboards, despite the labels on the key-caps being upper case, lower-case characters are entered unless the shift key is depressed. )Moving the left orb in the direction whose label corresponds to the background colour of the desired character causes that character to be entered. Hence, with the right orb held in the left/west position, one can enter the sequence, “abcde”, followed by a Backspace, by sequentially moving the left orb west (red), north-west (yellow), north (green), north-east (orange), east (blue) and south (black). The same technique can then be used to access all the characters and commands found in the right orb’s labels. Special ModesThere is one thing to add at this point: While entering printing characters always requires the use of both orbs, some actions can be performed using the left orb only. This can be inferred by the text that accompanies some of the left orb’s labels. For example, moving the left orb north (green) in quick succession (analogous to a double-click on a mouse), indicates that SHIFT will apply to the next character entered. Likewise, doing the same thing in the south-west (grey) direction applies the Caps Lock mode, i. e. , SHIFT will be applied to all subsequent entries until the mode is cancelled. These one-handed special modes/functions are summarized in the image below. Of these, the only one that I want to discuss at the moment is the ability of the orbiTouch to switch from entering text to controlling the screen cursor. This is done by moving the left orb south (black) twice in quick succession. When this is done, the right orb controls the cursor movement – the cursor moves continuously in the direction that you move the orb. In this, any doubts that you had about me characterizing the orbs as joysticks should disappear, since this cursor control is classic joystick behaviour. One issue of note is that the label describes this as “mouse” not “joystick”, which while understandable, is incorrect. Finally, before moving on to the next topic, note that while the right orb controls the movement of the screen cursor in mouse mode, movement of the left or left/west or right/east is taken as a left and right mouse button press, respectively. Remembering that the premise here is that the hands don’t have to move from the orbiTouch in order switch between typing and pointing tasks. But that doesn’t mean that the overhead in switching between the tasks is removed. One type of overhead is just substituted for another. And, the moded nature of the orbiTouch means that the option of parallel pointing-typing actions are eliminated. Rather than criticism, I mention these points to indicate the need to be mindful of the trade-offs and consequences of different design decisions - consequences that the designer should be aware of. Going Meta: What’s Really Going On? I want to approach doing so by stepping back, and approaching the underlying method of “typing” by going “meta”. That is, I want to jump up a lever of abstraction, beyond the physical device (for the moment), and explain what is going on at the conceptual level. The rest of the text is in much rougher form …. What will be revealed, if we do so, is that text is entered by means of the parallel use of two 8-direction radial menus. So what is a radial menu? These are the neglected cousins of the linear menus that populate conventional graphical user interfaces. The difference is that one makes a selection by the direction of movement, rather than the distance (as in the case with linear menus). It turns out that people can learn these quickly if the directions correspond to the 8 main points of the compass. For example, in a program menu, moving up (North) might mean Print, down (South) could mean Save, and moving down to the right (South East), Save As. Like linear menus, these menus can also be hierarchic. So, for example, after moving South East in order to specify Save As, a stroke to the left (West) might mean that it should be saved as a PDF file, whereas it would be saved as a Plain Text file if the secondary connected stroke was to the right (East). The reason for this brief tutorial on radial menus is that they pretty much define at the conceptual level how text is entered using the orbiTouch. The eight directions that you can move the orbs defines the menu item selected. And, by having the actual output depending on the combination of the selection made by each of the two orbs, the device can perhaps be best described as entering text using a two-level hierarchic radial menu, where menu selections are made using two planar moving 8-way joy switches. That is quite a mouth-full, and it has taken all of the text above to bring us to the point where there is a reasonable chance that it makes sense. And we still haven’t gotten into the details! it uses hierarchic (2-level) radial menus, but where the hierarchy is space multiplexed, rather than time multiplexed. That is, rather than doing one menu selection after the other, you do them simultaneously, by using a different hand to articulate the selection from each of the two menus. (While the text on the description is sparse still, look at the training cards, etc. and the photos on the page. )At the level of the mental model, there is no question in my mind (actually, I shouldn’t say that, because I am supposed to be an objective researcher who needs empirical data to inform decisions, but what the hell!) that you could give someone who knew how to use this device two isotonic joysticks, such as used with a video game controller, and they would be able to enter text just as fast as with this device. Furthermore, I am sure that if one had a slate capable of sensing both touch and stylus simultaneously, I am certain that the skill would transfer equally to using a touch radial gesture in the non-dominant hand, and stylus (or touch) radial gesture with the other. At the basic level, it is a 2-level radial menu, but where each level is operated independently and quasi-simultaneously by a different one of the operator’s two hands. Level 1: Right HandThis lets the operator select one of eight regionsThe label for each region consists of 6 characters (5 printing and one “special)In selecting one of the regions, one is not selecting any one of the characters of that region; rather, they are just indicating that the character that they want is one of the six in that regionEach of the characters in a region has a different background colour: blue, orange, green, yellow, red and black. Level 2: Left HandThis lets the operator select one of eight regionsEach region is labeled by a single colourAmong the colours that label the eight regions are the same ones used as character background colours in the regions of the right-hand control: blue, orange, green, yellow, red and blackBy the left hand selecting one of these six colours, one indicates which character is to be entered from among the six characters in the region indicated by the right hand – the selected character being the one whose background colour corresponds to the colour selected by the left hand. Hence, there are two 8-way, single level radial menus used. I believe it fair to say that it is, nevertheless, a 2 level radial menu, since both need to be used in order to enter one token. In actual fact, things are more complex, since none of the above covers issues such as all of the special character, punctuation, etc. , that do not appear on the labels of the right hand. To keep things brief, this is why only 6 of the left-hand menu options are used in what is discussed above. The other two options are needed to fill in the gaps. And, even then, the device resorts to something like double-clicks to get special modes and capabilities. For example, double clicking the black (south) region of the left hand turns the right-hand dome into a pointing device, i. e. , a mouse substitute for pointing, etc. I went through the – as it turned out – interesting exercise of translating the two parallel depth-1 radial menus of the orbiTouch UI into two different depth-2, breadth-8 hierarchic radial menus. You can see them in the attached images. The one assumes that the LH “dome” as the first-level selection, and then make the second-level selection with the right-hand dome. The other does the opposite, i. e. , the right-hand dome selection is the first level. It is interesting to compare the two with each other, as well as with both the labeling on the orbiTouch and the Quickstart documentation: The RH level-1 version seems easier to get rudimentary understanding compared to the LH due to clustering of letters and numbers on outer menus. Likewise, for the special characters that are the upper case of the numbersThe physical device is fine for letting you hunt-and-peck, so to speak, for characters, but it is useless for numbers, and most special characters. The documentation provided with the Quick Start (attached is not especially useful in terms of providing heuristics for memorization. While the orbiTouch certainly uses radial menus, it decidedly does not employ marking menus. One of the key things missing is the ability to check and correct before committing to an input, and the lack of ability to backtrack to the start, and therefore abort without entering anything. One thing that I have learned from this exercise is the difference that results due to having self-returning joysticks. Gestures don’t have that attribute. It matters esp w. r. t. the last point. What I like about this story, is how looking at something seemingly very different at the right level of abstraction, teaches us/me something new about something I was supposed to be an expert in. That is, that 2-level hierarchic marking menus can be achieved by two simultaneous single-level MMs. This is why I have the collection, and why I love what I do. There is still delight, despite being a 63-year-old geezer grandfather. The orbiTouch Keyless Keyboard was first known as the Keybowl, and the company was formerly known as Keybowl Inc. , and then Blue Orb Inc.", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_4810cb96-ee04-4f89-95cc-87baaa37b8d5.jpeg", + "http://localhost:1050/files/images/buxton/upload_8da6444e-d0d4-4975-a3fa-1f1ccae6b53b.jpeg", + "http://localhost:1050/files/images/buxton/upload_24a751d3-5364-4a17-941e-6c2b0a29377c.jpeg", + "http://localhost:1050/files/images/buxton/upload_b3706d22-b08d-4fac-af77-0cb9a5805476.jpeg", + "http://localhost:1050/files/images/buxton/upload_76c47d1b-963e-45e6-85ea-479155235eda.jpeg", + "http://localhost:1050/files/images/buxton/upload_5c85b83f-6b51-4f77-88e0-dbadfff84b43.jpeg", + "http://localhost:1050/files/images/buxton/upload_ed156a9a-d2cb-4d3e-834e-6f444f202ae9.jpeg", + "http://localhost:1050/files/images/buxton/upload_a19eb9d8-e25b-42bc-9828-2b74dfdf11d5.jpeg", + "http://localhost:1050/files/images/buxton/upload_7874c972-37aa-4fe1-854d-30e473d95c05.jpeg", + "http://localhost:1050/files/images/buxton/upload_08f18b47-0f58-45ac-8995-f673286563d3.jpeg", + "http://localhost:1050/files/images/buxton/upload_c4dc755b-7424-4758-8c3e-5e57a01341a1.jpeg", + "http://localhost:1050/files/images/buxton/upload_5e53cb03-6bee-45df-949e-8f5727e55a1b.jpeg", + "http://localhost:1050/files/images/buxton/upload_a2557ac5-1d4e-48c9-8b19-352cae25ec21.jpeg", + "http://localhost:1050/files/images/buxton/upload_eef6a171-ba65-4132-bce3-9066fdc7bc0b.jpeg", + "http://localhost:1050/files/images/buxton/upload_3624cda2-0500-457c-892d-5493955364d7.jpeg", + "http://localhost:1050/files/images/buxton/upload_08c68d86-da4a-4f4f-8daa-56d7def0c2bb.jpeg", + "http://localhost:1050/files/images/buxton/upload_b888548e-fe12-42b5-8afc-df312d6b6f75.jpeg", + "http://localhost:1050/files/images/buxton/upload_53f13990-2fb5-4d7e-be62-0d31833445da.jpeg", + "http://localhost:1050/files/images/buxton/upload_193fdbfe-cd1a-46f3-a1e9-08eedb8319de.jpeg", + "http://localhost:1050/files/images/buxton/upload_a1e95b03-dc01-4891-b17f-c8b88b32921f.jpeg", + "http://localhost:1050/files/images/buxton/upload_d7994f7e-58c8-4405-bd95-b1f925fe70b3.jpeg", + "http://localhost:1050/files/images/buxton/upload_5479f0b3-87c7-45fb-b086-b306d2414a44.jpeg", + "http://localhost:1050/files/images/buxton/upload_413df73a-ac67-4068-89be-57d3d122bbf0.jpeg" + ] + }, + { + "title": "TASA Model 55 ASCII Keyboard", + "company": "TASA (Touch Activated Switch Arrays)", + "year": 1979, + "primaryKey": [ + "Keyboard" + ], + "secondaryKey": [ + "Pad", + "Touch" + ], + "originalPrice": 80, + "degreesOfFreedom": 0, + "dimensions": { + "dim_length": 382.27, + "dim_width": 158.75, + "dim_height": 8.255, + "dim_unit": "mm" + }, + "shortDescription": "This touch-sensitive keyboard is especially suited for super clean environments, such as hospitals, and those which are just the opposite. The reason is that, being completely flat, there are no crack or gaps where dirt or bacteria can accumulate. This same property enables it to be easily cleaned. However, the reason that I got this keyboard because it was silent – there are no mechanical key-clicks. Hence, for example, it enabled me to soundlessly enter data to my digital musical instrument during a concert or while recording.", + "longDescription": "This is a solid-state touch-sensitive keyboard with no moving parts. Because its surface is flat, the only way one knows that it is a QWERTY keyboard is by the graphical representation on its surface. One types by placing one’s fingers on pictures of keys, rather than physical/mechanical keycaps. Because of the lack of the tactile feedback associated with conventional keyboards, as expected, typing speed and/or accuracy will be compromised with this keyboard. And yet, this keyboard brings real value in certain situations, and in so doing, it provides a good example of the rule: Everything is best for something and worst for something else. Because the is especially suited for super clean environments, such as hospitals, and those which are just the opposite. The reason is that, being completely flat, there are no crack or gaps where dirt or bacteria can accumulate. This same property enables it to be easily cleaned. However, the reason that I got this keyboard because it was silent – there are no mechanical key-clicks. Hence, for example, it enabled me to soundlessly enter data to my digital musical instrument during a concert or while recording. This is one of a number of capacitive touch-sensing input devices produced in the period around 1981 by Touch Activated Switch Arrays (TASA). The others included a touch-sensitive linear controller, the Ferinstat, which could function as a linear slider/fader, for applications such as audio or process control. These came in two lengths and are included in the collection. There were also the Model 16 Micro Proximity Keyboards, which were 16-button keyboards, arranged in a 4x4 array of touch-sensitive buttons that included a touch-sensitive numerical keypad. They also demonstrated a small, capacitive touch-sensitive touch pad, not unlike what one sees on today’s laptops, for example.", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_c9fc05c4-868e-4723-b2b8-51ce4c6f3fd6.jpeg", + "http://localhost:1050/files/images/buxton/upload_9d120c30-bc36-46ef-b0b4-f34d9797606c.jpeg", + "http://localhost:1050/files/images/buxton/upload_de375a30-1d3e-4e1f-8324-ffff307c69e4.jpeg", + "http://localhost:1050/files/images/buxton/upload_c323f012-c284-4aa0-bbae-ee9faf9a0b85.jpeg", + "http://localhost:1050/files/images/buxton/upload_2855f368-040a-48e3-9162-a9873cdd4392.jpeg", + "http://localhost:1050/files/images/buxton/upload_54d09848-d253-4283-af11-29dd30f70771.jpeg", + "http://localhost:1050/files/images/buxton/upload_ead68112-d9f9-4ab5-aed0-764e14c254ae.jpeg", + "http://localhost:1050/files/images/buxton/upload_6ae5554e-abb0-48cd-b16a-7e5c713f8125.jpeg", + "http://localhost:1050/files/images/buxton/upload_0493351a-3be6-4f90-94b8-529d31c94cb2.jpeg", + "http://localhost:1050/files/images/buxton/upload_9a96a1dd-5e8c-4311-bf81-5e0398b6d1f5.jpeg", + "http://localhost:1050/files/images/buxton/upload_df2b52eb-b7a7-41ea-8e7a-9247c096ea40.jpeg" + ] + }, + { + "title": "HandyKey (TekGear) Twiddler ", + "company": "HandyKey (TekGear)", + "year": 1991, + "primaryKey": [ + "Chord", + "Keyboard" + ], + "secondaryKey": [ + "Gesture", + "Joystick", + "Keyboard", + "Reality", + "Virtual", + "Vr", + "Wearable" + ], + "originalPrice": 199, + "degreesOfFreedom": 2, + "dimensions": { + "dim_length": 128, + "dim_width": 45, + "dim_height": 50, + "dim_unit": "mm" + }, + "shortDescription": "The Twiddler is a one-hand chord keyboard with integrated pointing capability, which can control the cursor in a joystick-like manner. This was a favourite device of the early Cyborg wearable-computer community.", + "longDescription": "……. . Note: Lyons, et al. abstract: An experienced user of the Twiddler, a one--handed chording keyboard, averages speeds of 60 words per minute with letter--by--letter typing of standard test phrases. This fast typing rate coupled with the Twiddler's 3x4 button design, similar to that of a standard mobile telephone, makes it a potential alternative to multi--tap for text entry on mobile phones. Despite this similarity, there is very little data on the Twiddler's performance and learnability. We present a longitudinal study of novice users' learning rates on the Twiddler. Ten participants typed for 20 sessions using two different methods. Each session is composed of 20 minutes of typing with multi--tap and 20 minutes of one--handed chording on the Twiddler. We found that users initially have a faster average typing rate with multi--tap; however, after four sessions the difference becomes negligible, and by the eighth session participants type faster with chording on the Twiddler. Furthermore, after 20 sessions typing rates for the Twiddler are still increasing.", + "__images": [ + "http://localhost:1050/files/images/buxton/upload_87851e1d-d9ae-46f6-b5d1-667247a334c7.jpeg", + "http://localhost:1050/files/images/buxton/upload_86a88c71-2464-4918-afb4-9ecffe9ebbbb.jpeg", + "http://localhost:1050/files/images/buxton/upload_8c06fc22-ff8f-4386-a716-fffc90d7a5f2.jpeg", + "http://localhost:1050/files/images/buxton/upload_6e0f4cb4-3b2d-4ae5-8dcd-873a93f2ff61.jpeg", + "http://localhost:1050/files/images/buxton/upload_546e97ee-6c3b-4146-ba03-7a5f8cec7695.jpeg", + "http://localhost:1050/files/images/buxton/upload_df840af0-48a4-4a08-8893-89545c520b6d.jpeg", + "http://localhost:1050/files/images/buxton/upload_727448e6-2702-4462-bd21-a86836e9ae45.jpeg", + "http://localhost:1050/files/images/buxton/upload_acf24b43-16ba-48cc-b8aa-62d9d4209ca6.jpeg", + "http://localhost:1050/files/images/buxton/upload_a677ad5d-9200-4ac0-883d-da35bfce7c23.jpeg", + "http://localhost:1050/files/images/buxton/upload_2824f09a-95a2-4c16-86cb-41be8fa4ae46.jpeg", + "http://localhost:1050/files/images/buxton/upload_ac7bdaf5-8cf6-44ab-a767-74a8ef172980.jpeg", + "http://localhost:1050/files/images/buxton/upload_987fff8f-c41b-409c-a1dc-a331d8129c55.jpeg", + "http://localhost:1050/files/images/buxton/upload_9b3cc2ce-6851-4144-ab8f-94c9a029fa7c.jpeg", + "http://localhost:1050/files/images/buxton/upload_9c19b8a2-76b7-4498-93f8-9779e623174e.jpeg", + "http://localhost:1050/files/images/buxton/upload_a47629fd-a32f-4cf3-bb5a-1d1b8ea9239d.jpeg", + "http://localhost:1050/files/images/buxton/upload_6c1cc20d-54aa-43bc-a9b3-eaa55e16e77d.jpeg", + "http://localhost:1050/files/images/buxton/upload_6726a5e4-3547-4a92-8f8e-5fadd55fa761.jpeg", + "http://localhost:1050/files/images/buxton/upload_c8db5eb9-dac1-4e70-a66e-de9f90bca112.jpeg", + "http://localhost:1050/files/images/buxton/upload_036304b6-a8b3-4d42-b8c8-eb0925afab25.jpeg" + ] + } +] \ No newline at end of file diff --git a/src/scraping/buxton/final/json/incomplete.json b/src/scraping/buxton/final/json/incomplete.json new file mode 100644 index 000000000..a9ed39e21 --- /dev/null +++ b/src/scraping/buxton/final/json/incomplete.json @@ -0,0 +1,569 @@ +[ + { + "filename": "3Dconnexion_SpaceNavigator.docx", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "3DMag.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "3DPlus.docx", + "year": "ERR__YEAR__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "3DSpace.docx", + "year": "ERR__YEAR__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "3MErgo.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Abaton.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Active.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "ADB2.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "adecm.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "AlphaSmart_Pro.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "Amazon_Kindle_Keyboard.docx", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Apple_ADB_Mouse.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." + }, + { + "filename": "Apple_Adj_Keyboard.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Apple_iPhone.docx", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Apple_Mac_Portable-Katy’s MacBook Air-2.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Apple_Mac_Portable-Katy’s MacBook Air.docx", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Apple_Mac_Portable.docx", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Apple_Scroll_Mouse.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "AWrock.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "BAT.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Bill_Notes_CyKey.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "Brailler.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Brewster_Stereoscope.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "CasioC801.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "CasioTC500.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Casio_Mini.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Citizen_LCl_914.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "Citizen_LC_909.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Citizen_LC_913.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." + }, + { + "filename": "CoolPix.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "Cross.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Dymo_MK-6.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "eMate.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Emotiv.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Explorer.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Falcon.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "FingerWorks_Prototype.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Freeboard.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match was captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "FrogPad.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "FujitsuPalm.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "FujitsuTouch.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "Gavilan_SC.docx", + "company": "ERR__COMPANY__: outer match wasn't captured.", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Genius_Ring_Mouse.docx", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Grandjean_Stenotype.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "gravis.docx", + "year": "ERR__YEAR__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "GRiD1550-Katy’s MacBook Air-2.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "GRiD1550-Katy’s MacBook Air.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "GRiD1550.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "Helios-Klimax.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Honeywell_T86.docx", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "HTC_Touch.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." + }, + { + "filename": "IBMTrack.docx", + "year": "ERR__YEAR__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "IBM_Convertable-Katy’s MacBook Air-2.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "IBM_Convertable-Katy’s MacBook Air.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "IBM_Convertable.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "IBM_PS2_Mouse.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "IBM_Simon.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value." + }, + { + "filename": "IDEO.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "iGesture.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "iGrip.docx", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "iLiad.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "Joyboard.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "Kensington_SB_TB-Mouse.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Kindle_3G_lighted_cover.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "Leatherman_Tread.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "M1.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured." + }, + { + "filename": "MaltronLH.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "Marine_Band_Harmonica.docx", + "company": "ERR__COMPANY__: outer match was captured.", + "year": "ERR__YEAR__: outer match was captured.", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Matrox.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "Metaphor_Kbd.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "Metaphor_Mouse.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Microwriter.docx", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Motorola_DynaTAC.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "MousePen.docx", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "MS-1_Stereoscope.docx", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "MWB_Braille_Writer.docx", + "company": "ERR__COMPANY__: outer match wasn't captured.", + "year": "__ERR__YEAR__TRANSFORM__: NaN cannot be parsed to a numeric value.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "NB75D.docx", + "year": "ERR__YEAR__: outer match was captured.", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "NewO.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Newton120.docx", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Nikon_Coolpix-100.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured." + }, + { + "filename": "Numonics_Mgr_Mouse.docx", + "company": "ERR__COMPANY__: outer match was captured.", + "year": "ERR__YEAR__: outer match was captured.", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "shortDescription": "ERR__SHORTDESCRIPTION__: outer match was captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "PadMouse.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "PARCkbd.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." + }, + { + "filename": "Philco_Mystery_Control.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "PowerTrack.docx", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "ProAgio (1).docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "ProAgio.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "Pulsar_time_Computer.docx", + "primaryKey": "ERR__PRIMARYKEY__: outer match was captured.", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "Ring.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "round.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "SafeType_Kbd.docx", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured." + }, + { + "filename": "Samsung_SPH-A500.docx", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "SurfMouse.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured." + }, + { + "filename": "The_Tap.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured.", + "longDescription": "ERR__LONGDESCRIPTION__: outer match was captured." + }, + { + "filename": "Thumbelina.docx", + "secondaryKey": "ERR__SECONDARYKEY__: outer match wasn't captured.", + "degreesOfFreedom": "ERR__DEGREESOFFREEDOM__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + }, + { + "filename": "TPARCtab.docx", + "originalPrice": "ERR__ORIGINALPRICE__: outer match wasn't captured.", + "dimensions": "ERR__DIMENSIONS__: outer match wasn't captured." + } +] \ No newline at end of file diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index e590a5b85..4cb57a4e7 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -1,7 +1,6 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method } from "../RouteManager"; import { exec } from 'child_process'; -import { command_line } from "../ActionUtilities"; import RouteSubscriber from "../RouteSubscriber"; import { red } from "colors"; import executeImport from "../../scraping/buxton/final/BuxtonImporter"; @@ -40,25 +39,6 @@ export default class UtilManager extends ApiManager { } }); - // register({ - // method: Method.GET, - // subscription: "/buxton", - // secureHandler: async ({ res }) => { - // const cwd = './src/scraping/buxton'; - - // const onResolved = (stdout: string) => { console.log(stdout); res.redirect("/"); }; - // const onRejected = (err: any) => { console.error(err.message); res.send(err); }; - // const tryPython3 = (reason: any) => { - // console.log("Initial scraper failed for the following reason:"); - // console.log(red(reason.Error)); - // console.log("Falling back to python3..."); - // return command_line('python3 scraper.py', cwd).then(onResolved, onRejected); - // }; - - // return command_line('python scraper.py', cwd).then(onResolved, tryPython3); - // }, - // }); - register({ method: Method.GET, subscription: "/buxton", -- cgit v1.2.3-70-g09d2 From e8fcbbf57b2a2f443d9c280ce10558cf9d51c632 Mon Sep 17 00:00:00 2001 From: vellichora Date: Sun, 9 Feb 2020 17:11:24 -0500 Subject: can upload collection from mobile to desktop --- src/client/DocServer.ts | 9 +- src/client/util/DragManager.ts | 2 +- .../collections/CollectionMasonryViewFieldRow.tsx | 1 + .../views/collections/CollectionStackingView.tsx | 2 + .../CollectionStackingViewFieldColumn.tsx | 1 + src/client/views/collections/CollectionSubView.tsx | 2 +- src/client/views/collections/CollectionView.tsx | 2 +- .../views/collections/CollectionViewChromes.tsx | 1 + src/mobile/ImageUpload.tsx | 1 + src/mobile/MobileInkOverlay.tsx | 53 +++++++- src/mobile/MobileInterface.scss | 8 ++ src/mobile/MobileInterface.tsx | 150 ++++++++++++--------- src/server/ApiManagers/UploadManager.ts | 1 + src/server/Message.ts | 6 + src/server/Websocket/Websocket.ts | 7 +- .../authentication/models/current_user_utils.ts | 21 +-- src/server/server_Initialization.ts | 24 ++-- 17 files changed, 195 insertions(+), 96 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/DocServer.ts b/src/client/DocServer.ts index c03764471..e09251855 100644 --- a/src/client/DocServer.ts +++ b/src/client/DocServer.ts @@ -1,5 +1,5 @@ import * as OpenSocket from 'socket.io-client'; -import { MessageStore, YoutubeQueryTypes, GestureContent, MobileInkOverlayContent, UpdateMobileInkOverlayPositionContent } from "./../server/Message"; +import { MessageStore, YoutubeQueryTypes, GestureContent, MobileInkOverlayContent, UpdateMobileInkOverlayPositionContent, MobileDocumentUploadContent } from "./../server/Message"; import { Opt, Doc } from '../new_fields/Doc'; import { Utils, emptyFunction } from '../Utils'; import { SerializationHelper } from './util/SerializationHelper'; @@ -81,6 +81,10 @@ export namespace DocServer { Utils.Emit(_socket, MessageStore.UpdateMobileInkOverlayPosition, content); } + export function dispatchMobileDocumentUpload(content: MobileDocumentUploadContent) { + Utils.Emit(_socket, MessageStore.MobileDocumentUpload, content); + } + } export function init(protocol: string, hostname: string, port: number, identifier: string) { @@ -116,6 +120,9 @@ export namespace DocServer { _socket.addEventListener("receiveUpdateOverlayPosition", (content: UpdateMobileInkOverlayPositionContent) => { MobileInkOverlay.Instance.updatePosition(content); }); + _socket.addEventListener("receiveMobileDocumentUpload", (content: MobileDocumentUploadContent) => { + MobileInkOverlay.Instance.uploadDocument(content); + }); } function errorFunc(): never { diff --git a/src/client/util/DragManager.ts b/src/client/util/DragManager.ts index df2f5fe3c..0bb8b531d 100644 --- a/src/client/util/DragManager.ts +++ b/src/client/util/DragManager.ts @@ -179,7 +179,7 @@ export namespace DragManager { ); } element.dataset.canDrop = "true"; - const handler = (e: Event) => dropFunc(e, (e as CustomEvent).detail); + const handler = (e: Event) => { console.log("drop target reveied docs"); dropFunc(e, (e as CustomEvent).detail); }; element.addEventListener("dashOnDrop", handler); return () => { element.removeEventListener("dashOnDrop", handler); diff --git a/src/client/views/collections/CollectionMasonryViewFieldRow.tsx b/src/client/views/collections/CollectionMasonryViewFieldRow.tsx index 80752303c..26e0cc35a 100644 --- a/src/client/views/collections/CollectionMasonryViewFieldRow.tsx +++ b/src/client/views/collections/CollectionMasonryViewFieldRow.tsx @@ -73,6 +73,7 @@ export class CollectionMasonryViewFieldRow extends React.Component { + console.log("masronry row drop"); this._createAliasSelected = false; if (de.complete.docDragData) { (this.props.parent.Document.dropConverter instanceof ScriptField) && diff --git a/src/client/views/collections/CollectionStackingView.tsx b/src/client/views/collections/CollectionStackingView.tsx index 992820fc7..83c90810e 100644 --- a/src/client/views/collections/CollectionStackingView.tsx +++ b/src/client/views/collections/CollectionStackingView.tsx @@ -240,6 +240,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @undoBatch @action drop = (e: Event, de: DragManager.DropEvent) => { + console.log("DROP STACKIN G2"); const where = [de.x, de.y]; let targInd = -1; let plusOne = 0; @@ -270,6 +271,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @undoBatch @action onDrop = async (e: React.DragEvent): Promise => { + console.log("DROP STACKING"); const where = [e.clientX, e.clientY]; let targInd = -1; this._docXfs.map((cd, i) => { diff --git a/src/client/views/collections/CollectionStackingViewFieldColumn.tsx b/src/client/views/collections/CollectionStackingViewFieldColumn.tsx index 39b4e4e1d..65c4b3195 100644 --- a/src/client/views/collections/CollectionStackingViewFieldColumn.tsx +++ b/src/client/views/collections/CollectionStackingViewFieldColumn.tsx @@ -57,6 +57,7 @@ export class CollectionStackingViewFieldColumn extends React.Component { + console.log("column drop stacking"); this._createAliasSelected = false; if (de.complete.docDragData) { const key = StrCast(this.props.parent.props.Document.sectionFilter); diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index e94f24f2c..b35af2314 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -6,7 +6,7 @@ import { Id } from "../../../new_fields/FieldSymbols"; import { List } from "../../../new_fields/List"; import { listSpec } from "../../../new_fields/Schema"; import { ScriptField } from "../../../new_fields/ScriptField"; -import { Cast } from "../../../new_fields/Types"; +import { Cast, StrCast } from "../../../new_fields/Types"; import { CurrentUserUtils } from "../../../server/authentication/models/current_user_utils"; import { Utils } from "../../../Utils"; import { DocServer } from "../../DocServer"; diff --git a/src/client/views/collections/CollectionView.tsx b/src/client/views/collections/CollectionView.tsx index 88023783b..1d399e26f 100644 --- a/src/client/views/collections/CollectionView.tsx +++ b/src/client/views/collections/CollectionView.tsx @@ -138,7 +138,7 @@ export class CollectionView extends Touchable { let index = value.reduce((p, v, i) => (v instanceof Doc && v === doc) ? i : p, -1); index = index !== -1 ? index : value.reduce((p, v, i) => (v instanceof Doc && Doc.AreProtosEqual(v, doc)) ? i : p, -1); - ContextMenu.Instance.clearItems(); + ContextMenu.Instance && ContextMenu.Instance.clearItems(); if (index !== -1) { value.splice(index, 1); return true; diff --git a/src/client/views/collections/CollectionViewChromes.tsx b/src/client/views/collections/CollectionViewChromes.tsx index a870b6043..01dc21f95 100644 --- a/src/client/views/collections/CollectionViewChromes.tsx +++ b/src/client/views/collections/CollectionViewChromes.tsx @@ -231,6 +231,7 @@ export class CollectionViewBaseChrome extends React.Component { + console.log("toggle collapse"); this.props.CollectionView.props.Document.chromeStatus = this.props.CollectionView.props.Document.chromeStatus === "enabled" ? "collapsed" : "enabled"; if (this.props.collapse) { this.props.collapse(this.props.CollectionView.props.Document.chromeStatus !== "enabled"); diff --git a/src/mobile/ImageUpload.tsx b/src/mobile/ImageUpload.tsx index 3304e8e22..10bd78075 100644 --- a/src/mobile/ImageUpload.tsx +++ b/src/mobile/ImageUpload.tsx @@ -47,6 +47,7 @@ class Uploader extends React.Component { const upload = window.location.origin + "/upload"; this.status = "uploading image"; + console.log("uploading image", formData); const res = await fetch(upload, { method: 'POST', body: formData diff --git a/src/mobile/MobileInkOverlay.tsx b/src/mobile/MobileInkOverlay.tsx index 6b65aa436..ed4cca5b9 100644 --- a/src/mobile/MobileInkOverlay.tsx +++ b/src/mobile/MobileInkOverlay.tsx @@ -1,9 +1,13 @@ import React = require('react'); import { observer } from "mobx-react"; -import { MobileInkOverlayContent, GestureContent, UpdateMobileInkOverlayPositionContent } from "../server/Message"; +import { MobileInkOverlayContent, GestureContent, UpdateMobileInkOverlayPositionContent, MobileDocumentUploadContent } from "../server/Message"; import { observable, action } from "mobx"; import { GestureUtils } from "../pen-gestures/GestureUtils"; import "./MobileInkOverlay.scss"; +import { StrCast } from '../new_fields/Types'; +import { DragManager } from "../client/util/DragManager"; +import { DocServer } from '../client/DocServer'; +import { Doc } from '../new_fields/Doc'; @observer @@ -67,7 +71,7 @@ export default class MobileInkOverlay extends React.Component { height: bounds.height * this._scale, }; - const target = document.elementFromPoint(points[0].X, points[0].Y); + const target = document.elementFromPoint(this._x + 10, this._y + 10); target?.dispatchEvent( new CustomEvent("dashOnGesture", { @@ -82,6 +86,43 @@ export default class MobileInkOverlay extends React.Component { ); } + uploadDocument = async (content: MobileDocumentUploadContent) => { + const { docId } = content; + console.log("receive upload document id", docId); + const doc = await DocServer.GetRefField(docId); + + if (doc && doc instanceof Doc) { + console.log("parsed upload document into doc", StrCast(doc.proto!.title)); + + const target = document.elementFromPoint(this._x + 10, this._y + 10); + console.log("the target is", target); + + const dragData = new DragManager.DocumentDragData([doc]); + const complete = new DragManager.DragCompleteEvent(false, dragData); + console.log("the drag data is", dragData); + + if (target) { + target.dispatchEvent( + new CustomEvent("dashOnDrop", + { + bubbles: true, + detail: { + x: this._x, + y: this._y, + complete: complete, + altKey: false, + metaKey: false, + ctrlKey: false + } + } + ) + ); + } else { + alert("TARGET IS UNDEFINED"); + } + } + } + @action dragStart = (e: React.PointerEvent) => { document.removeEventListener("pointermove", this.dragging); @@ -132,15 +173,17 @@ export default class MobileInkOverlay extends React.Component { transform: `translate(${this._x}px, ${this._y}px)`, zIndex: 30000, pointerEvents: "none", - borderStyle: this._isDragging ? "solid" : "dashed" - }} + borderStyle: this._isDragging ? "solid" : "dashed", + backgroundColor: "rgba(255, 0, 0, 0.3)" + } + } ref={this._mainCont} >
- + ); } } \ No newline at end of file diff --git a/src/mobile/MobileInterface.scss b/src/mobile/MobileInterface.scss index 8abe5a40d..8b0ebcd53 100644 --- a/src/mobile/MobileInterface.scss +++ b/src/mobile/MobileInterface.scss @@ -1,7 +1,15 @@ .mobileInterface-inkInterfaceButtons { position: absolute; + top: -50px; display: flex; justify-content: space-between; width: 100%; z-index: 9999; + height: 50px; +} + +.mobileInterface-container { + height: calc(100% - 50px); + margin-top: 50px; + position: relative; } \ No newline at end of file diff --git a/src/mobile/MobileInterface.tsx b/src/mobile/MobileInterface.tsx index 665d9a168..a1ef0a5d1 100644 --- a/src/mobile/MobileInterface.tsx +++ b/src/mobile/MobileInterface.tsx @@ -22,6 +22,18 @@ import { SelectionManager } from '../client/util/SelectionManager'; import { DateField } from '../new_fields/DateField'; import { GestureUtils } from '../pen-gestures/GestureUtils'; import { DocServer } from '../client/DocServer'; +import { DocumentDecorations } from '../client/views/DocumentDecorations'; +import { OverlayView } from '../client/views/OverlayView'; +import { DictationOverlay } from '../client/views/DictationOverlay'; +import SharingManager from '../client/util/SharingManager'; +import { PreviewCursor } from '../client/views/PreviewCursor'; +import { ContextMenu } from '../client/views/ContextMenu'; +import { RadialMenu } from '../client/views/nodes/RadialMenu'; +import PDFMenu from '../client/views/pdf/PDFMenu'; +import MarqueeOptionsMenu from '../client/views/collections/collectionFreeForm/MarqueeOptionsMenu'; +import GoogleAuthenticationManager from '../client/apis/GoogleAuthenticationManager'; +import { listSpec } from '../new_fields/Schema'; +import { Id } from '../new_fields/FieldSymbols'; library.add(faLongArrowAltLeft); @@ -31,7 +43,7 @@ export default class MobileInterface extends React.Component { @computed private get userDoc() { return CurrentUserUtils.UserDocument; } @computed private get mainContainer() { return this.userDoc ? FieldValue(Cast(this.userDoc.activeMobile, Doc)) : CurrentUserUtils.GuestMobile; } // @observable private currentView: "main" | "ink" | "upload" = "main"; - private mainDoc: Doc = CurrentUserUtils.setupMobileDoc(this.userDoc); + private mainDoc: any = CurrentUserUtils.setupMobileDoc(this.userDoc); @observable private renderView?: () => JSX.Element; // private inkDoc?: Doc; @@ -49,7 +61,6 @@ export default class MobileInterface extends React.Component { library.add(...[faPenNib, faHighlighter, faEraser, faMousePointer]); if (this.userDoc && !this.mainContainer) { - // const doc = CurrentUserUtils.setupMobileDoc(this.userDoc); this.userDoc.activeMobile = this.mainDoc; } } @@ -76,48 +87,22 @@ export default class MobileInterface extends React.Component { }); } - // @action - // switchCurrentView = (view: "main" | "ink" | "upload") => { - // this.currentView = view; - - // if (this.userDoc) { - // switch (view) { - // case "main": { - // // const doc = CurrentUserUtils.setupMobileDoc(this.userDoc); - // this.userDoc.activeMobile = this.mainDoc; - // break; - // } - // case "ink": { - // this.inkDoc = CurrentUserUtils.setupMobileInkingDoc(this.userDoc); - // this.userDoc.activeMobile = this.inkDoc; - // InkingControl.Instance.switchTool(InkTool.Pen); - // this.drawingInk = true; - - // DocServer.Mobile.dispatchOverlayTrigger({ - // enableOverlay: true, - // width: window.innerWidth, - // height: window.innerHeight - // }); - - // break; - // } - // case "upload": { - // this.uploadDoc = CurrentUserUtils.setupMobileUploadDoc(this.userDoc); - // this.userDoc.activeMobile = this.uploadDoc; - - // } - // } - // } - // } + onSwitchUpload = () => { + DocServer.Mobile.dispatchOverlayTrigger({ + enableOverlay: true, + width: 100, + height: 100 + }); + } renderDefaultContent = () => { - console.log("rendering default content"); + console.log("rendering default content", this.mainContainer); if (this.mainContainer) { return { console.log("want to add doc to default content", StrCast(doc.title)); return false; }} addDocTab={returnFalse} pinToPres={emptyFunction} removeDocument={undefined} @@ -192,42 +177,54 @@ export default class MobileInterface extends React.Component { - - window.innerHeight} - PanelWidth={() => window.innerWidth} - focus={emptyFunction} - isSelected={returnFalse} - select={emptyFunction} - active={returnFalse} - ContentScaling={returnOne} - whenActiveChanged={returnFalse} - ScreenToLocalTransform={Transform.Identity} - ruleProvider={undefined} - renderDepth={0} - ContainingCollectionView={undefined} - ContainingCollectionDoc={undefined}> - - + window.innerHeight} + PanelWidth={() => window.innerWidth} + focus={emptyFunction} + isSelected={returnFalse} + select={emptyFunction} + active={returnFalse} + ContentScaling={returnOne} + whenActiveChanged={returnFalse} + ScreenToLocalTransform={Transform.Identity} + ruleProvider={undefined} + renderDepth={0} + ContainingCollectionView={undefined} + ContainingCollectionDoc={undefined}> + ); } } - upload = () => { + upload = async (e: React.MouseEvent) => { + if (this.mainContainer) { + const data = Cast(this.mainContainer.data, listSpec(Doc)); + if (data) { + const uploadDoc = await data[1]; // TODO: ensure this is the collection to upload + console.log("UPLOADING DOCUMENT FROM MOBILE", uploadDoc[Id], StrCast(uploadDoc.proto!.title)); + if (uploadDoc) { + DocServer.Mobile.dispatchMobileDocumentUpload({ + docId: uploadDoc[Id] + }); + } + } + } + e.stopPropagation(); + e.preventDefault(); } renderUploadContent() { if (this.mainContainer) { return ( -
+
@@ -240,7 +237,7 @@ export default class MobileInterface extends React.Component { Document={this.mainContainer} DataDoc={undefined} LibraryPath={emptyPath} - addDocument={returnFalse} + addDocument={(doc: Doc) => { console.log("want to add doc", StrCast(doc.title)); return false; }} addDocTab={returnFalse} pinToPres={emptyFunction} removeDocument={undefined} @@ -266,13 +263,35 @@ export default class MobileInterface extends React.Component { } } + onDragOver = (e: React.DragEvent) => { + e.preventDefault(); + e.stopPropagation(); + } + render() { // const content = this.currentView === "main" ? this.mainContent : // this.currentView === "ink" ? this.inkContent : // this.currentView === "upload" ? this.uploadContent : <>; return ( -
- {this.renderView ? this.renderView() : this.renderDefaultContent()} +
+ {/* + + {this.renderView ? this.renderView() : this.renderDefaultContent()} + */} + + {/* + + */} + + + {this.renderView ? this.renderView() : this.renderDefaultContent()} + + + {/* */} + + {/* + + */}
); } @@ -281,5 +300,6 @@ export default class MobileInterface extends React.Component { Scripting.addGlobal(function switchMobileView(doc: (userDoc: Doc) => Doc, renderView?: () => JSX.Element, onSwitch?: () => void) { return MobileInterface.Instance.switchCurrentView(doc, renderView, onSwitch); }); Scripting.addGlobal(function onSwitchMobileInking() { return MobileInterface.Instance.onSwitchInking(); }); Scripting.addGlobal(function renderMobileInking() { return MobileInterface.Instance.renderInkingContent(); }); +Scripting.addGlobal(function onSwitchMobileUpload() { return MobileInterface.Instance.onSwitchUpload(); }); Scripting.addGlobal(function renderMobileUpload() { return MobileInterface.Instance.renderUploadContent(); }); diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 74f45ae62..e76d9b7a2 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -42,6 +42,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: "/upload", secureHandler: async ({ req, res }) => { + console.log("/upload register"); const form = new formidable.IncomingForm(); form.uploadDir = pathToDirectory(Directory.parsed_files); form.keepExtensions = true; diff --git a/src/server/Message.ts b/src/server/Message.ts index 236df3f3c..c23a2f0a8 100644 --- a/src/server/Message.ts +++ b/src/server/Message.ts @@ -1,5 +1,6 @@ import { Utils } from "../Utils"; import { Point } from "../pen-gestures/ndollar"; +import { Doc } from "../new_fields/Doc"; export class Message { private _name: string; @@ -62,6 +63,10 @@ export interface UpdateMobileInkOverlayPositionContent { readonly dsize?: number; } +export interface MobileDocumentUploadContent { + readonly docId: string; +} + export namespace MessageStore { export const Foo = new Message("Foo"); export const Bar = new Message("Bar"); @@ -75,6 +80,7 @@ export namespace MessageStore { export const GesturePoints = new Message("Gesture Points"); export const MobileInkOverlayTrigger = new Message("Trigger Mobile Ink Overlay"); export const UpdateMobileInkOverlayPosition = new Message("Update Mobile Ink Overlay Position"); + export const MobileDocumentUpload = new Message("Upload Document From Mobile"); export const GetRefField = new Message("Get Ref Field"); export const GetRefFields = new Message("Get Ref Fields"); diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index 77816c897..798bdae67 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -1,5 +1,5 @@ import { Utils } from "../../Utils"; -import { MessageStore, Transferable, Types, Diff, YoutubeQueryInput, YoutubeQueryTypes, GestureContent, MobileInkOverlayContent, UpdateMobileInkOverlayPositionContent } from "../Message"; +import { MessageStore, Transferable, Types, Diff, YoutubeQueryInput, YoutubeQueryTypes, GestureContent, MobileInkOverlayContent, UpdateMobileInkOverlayPositionContent, MobileDocumentUploadContent } from "../Message"; import { Client } from "../Client"; import { Socket } from "socket.io"; import { Database } from "../database"; @@ -57,6 +57,7 @@ export namespace WebSocket { Utils.AddServerHandler(socket, MessageStore.GesturePoints, content => processGesturePoints(socket, content)); Utils.AddServerHandler(socket, MessageStore.MobileInkOverlayTrigger, content => processOverlayTrigger(socket, content)); Utils.AddServerHandler(socket, MessageStore.UpdateMobileInkOverlayPosition, content => processUpdateOverlayPosition(socket, content)); + Utils.AddServerHandler(socket, MessageStore.MobileDocumentUpload, content => processMobileDocumentUpload(socket, content)); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); @@ -83,6 +84,10 @@ export namespace WebSocket { socket.broadcast.emit("receiveUpdateOverlayPosition", content); } + function processMobileDocumentUpload(socket: Socket, content: MobileDocumentUploadContent) { + socket.broadcast.emit("receiveMobileDocumentUpload", content); + } + function HandleYoutubeQuery([query, callback]: [YoutubeQueryInput, (result?: any[]) => void]) { const { ProjectCredentials } = GoogleCredentialsLoader; switch (query.type) { diff --git a/src/server/authentication/models/current_user_utils.ts b/src/server/authentication/models/current_user_utils.ts index 817cf40b1..3e5953ac1 100644 --- a/src/server/authentication/models/current_user_utils.ts +++ b/src/server/authentication/models/current_user_utils.ts @@ -103,7 +103,7 @@ export class CurrentUserUtils { { title: "use scrubber", icon: "eraser", click: 'activateScrubber(this.activePen.pen = sameDocs(this.activePen.pen, this) ? undefined : this);', ischecked: `sameDocs(this.activePen.pen, this)`, backgroundColor: "green", activePen: doc }, { title: "use drag", icon: "mouse-pointer", click: 'deactivateInk();this.activePen.pen = this;', ischecked: `sameDocs(this.activePen.pen, this)`, backgroundColor: "white", activePen: doc }, { title: "draw", icon: "pen-nib", click: 'switchMobileView(setupMobileInkingDoc, renderMobileInking, onSwitchMobileInking);', ischecked: `sameDocs(this.activePen.pen, this)`, backgroundColor: "red", activePen: doc }, - { title: "upload", icon: "upload", click: 'switchMobileView(setupMobileUploadDoc, renderMobileUpload);', backgroundColor: "orange" }, + { title: "upload", icon: "upload", click: 'switchMobileView(setupMobileUploadDoc, renderMobileUpload, onSwitchMobileUpload);', backgroundColor: "orange" }, { title: "upload", icon: "upload", click: 'uploadImageMobile();', backgroundColor: "cyan" }, ]; return docProtoData.filter(d => !buttons || !buttons.includes(d.title)).map(data => Docs.Create.FontIconDocument({ @@ -138,7 +138,9 @@ export class CurrentUserUtils { } static setupMobileDoc(userDoc: Doc) { - return userDoc.activeMoble ?? Docs.Create.MasonryDocument(CurrentUserUtils.setupMobileButtons(userDoc), { + const webDoc = Docs.Create.WebDocument("https://wikipedia.com", { title: "Mobile Upload Web", chromeStatus: "enabled" }); + + return userDoc.activeMoble ?? Docs.Create.MasonryDocument([webDoc, ...CurrentUserUtils.setupMobileButtons(userDoc)], { columnWidth: 100, ignoreClick: true, lockedPosition: true, chromeStatus: "disabled", title: "buttons", autoHeight: true, yMargin: 5 }); } @@ -148,13 +150,14 @@ export class CurrentUserUtils { } static setupMobileUploadDoc(userDoc: Doc) { - console.log("setup mobile upload", window.innerWidth, window.innerHeight); - const webDoc = Docs.Create.WebDocument("https://wikipedia.com", { title: "Mobile Upload Web", chromeStatus: "enabled" }); - const uploadDoc = Docs.Create.StackingDocument([], { title: "Mobile Upload", backgroundColor: "pink" }); - return Docs.Create.StackingDocument([webDoc, uploadDoc], { - title: "Mobile Upload", backgroundColor: "white", - columnWidth: window.innerWidth, ignoreClick: true, lockedPosition: true, chromeStatus: "disabled", autoHeight: true, yMargin: 5, - width: window.innerWidth, height: window.innerHeight + const webDoc = Docs.Create.WebDocument("https://yahoo.com", { title: "Upload Images From the Web", chromeStatus: "enabled" }); + const uploadDoc = Docs.Create.StackingDocument([], { title: "Mobile Upload Collection", backgroundColor: "pink" }); + console.log("window size", window.innerWidth, window.innerHeight); + // return Docs.Create.StackingDocument([webDoc, uploadDoc], { + // columnWidth: window.innerWidth, //ignoreClick: true, lockedPosition: true, chromeStatus: "disabled", title: "Mobile Upload", autoHeight: true, yMargin: 5 + // }); + return Docs.Create.StackingDocument([webDoc, uploadDoc], {//...CurrentUserUtils.setupMobileButtons(userDoc)], { + columnWidth: 100, lockedPosition: true, chromeStatus: "disabled", title: "Upload", autoHeight: true, yMargin: 30 }); } diff --git a/src/server/server_Initialization.ts b/src/server/server_Initialization.ts index cbe070293..5f1ecc733 100644 --- a/src/server/server_Initialization.ts +++ b/src/server/server_Initialization.ts @@ -42,18 +42,18 @@ export default async function InitializeServer(routeSetter: RouteSetter) { } }; app.use(cors(corsOptions)); - app.use("*", ({ user, originalUrl }, res, next) => { - if (user && !originalUrl.includes("Heartbeat")) { - const userEmail = (user as any).email; - if (userEmail) { - timeMap[userEmail] = Date.now(); - } - } - if (!user && originalUrl === "/") { - return res.redirect("/login"); - } - next(); - }); + // app.use("*", ({ user, originalUrl }, res, next) => { + // if (user && !originalUrl.includes("Heartbeat")) { + // const userEmail = (user as any).email; + // if (userEmail) { + // timeMap[userEmail] = Date.now(); + // } + // } + // if (!user && originalUrl === "/") { + // return res.redirect("/login"); + // } + // next(); + // }); app.use(wdm(compiler, { publicPath: config.output.publicPath })); app.use(whm(compiler)); -- cgit v1.2.3-70-g09d2 From bac95953f7f09c9356022034c0fe5d610ab7abe3 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 11 Feb 2020 07:05:46 -0500 Subject: fixed remote cursors: --- .../CollectionFreeFormRemoteCursors.tsx | 85 ++++++++++------------ src/server/ApiManagers/UserManager.ts | 2 +- 2 files changed, 39 insertions(+), 48 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormRemoteCursors.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormRemoteCursors.tsx index bb9ae4326..92fa2781c 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormRemoteCursors.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormRemoteCursors.tsx @@ -8,74 +8,65 @@ import { CollectionViewProps } from "../CollectionSubView"; import "./CollectionFreeFormView.scss"; import React = require("react"); import v5 = require("uuid/v5"); +import { computed } from "mobx"; +import { FieldResult } from "../../../../new_fields/Doc"; +import { List } from "../../../../new_fields/List"; @observer export class CollectionFreeFormRemoteCursors extends React.Component { - protected getCursors(): CursorField[] { + @computed protected get cursors(): CursorField[] { const doc = this.props.Document; - const id = CurrentUserUtils.id; - if (!id) { + let cursors: FieldResult>; + const { id } = CurrentUserUtils; + if (!id || !(cursors = Cast(doc.cursors, listSpec(CursorField)))) { return []; } - - const cursors = Cast(doc.cursors, listSpec(CursorField)); - const now = mobxUtils.now(); - // const now = Date.now(); - return (cursors || []).filter(cursor => cursor.data.metadata.id !== id && (now - cursor.data.metadata.timestamp) < 1000); + return (cursors || []).filter(({ data: { metadata } }) => metadata.id !== id && (now - metadata.timestamp) < 1000); } - private crosshairs?: HTMLCanvasElement; - drawCrosshairs = (backgroundColor: string) => { - if (this.crosshairs) { - const ctx = this.crosshairs.getContext('2d'); - if (ctx) { - ctx.fillStyle = backgroundColor; - ctx.fillRect(0, 0, 20, 20); - - ctx.fillStyle = "black"; - ctx.lineWidth = 0.5; - - ctx.beginPath(); + @computed get renderedCursors() { + return this.cursors.map(({ data: { metadata, position: { x, y } } }) => { + return ( +
+ { + if (el) { + const ctx = el.getContext('2d'); + if (ctx) { + ctx.fillStyle = "#" + v5(metadata.id, v5.URL).substring(0, 6).toUpperCase() + "22"; + ctx.fillRect(0, 0, 20, 20); - ctx.moveTo(10, 0); - ctx.lineTo(10, 8); + ctx.fillStyle = "black"; + ctx.lineWidth = 0.5; - ctx.moveTo(10, 20); - ctx.lineTo(10, 12); + ctx.beginPath(); - ctx.moveTo(0, 10); - ctx.lineTo(8, 10); + ctx.moveTo(10, 0); + ctx.lineTo(10, 8); - ctx.moveTo(20, 10); - ctx.lineTo(12, 10); + ctx.moveTo(10, 20); + ctx.lineTo(10, 12); - ctx.stroke(); + ctx.moveTo(0, 10); + ctx.lineTo(8, 10); - // ctx.font = "10px Arial"; - // ctx.fillText(Doc.CurrentUserEmail[0].toUpperCase(), 10, 10); - } - } - } + ctx.moveTo(20, 10); + ctx.lineTo(12, 10); - get sharedCursors() { - return this.getCursors().map(c => { - const m = c.data.metadata; - const l = c.data.position; - this.drawCrosshairs("#" + v5(m.id, v5.URL).substring(0, 6).toUpperCase() + "22"); - return ( -
- { if (el) this.crosshairs = el; }} + ctx.stroke(); + } + } + }} width={20} height={20} />

- {m.identifier[0].toUpperCase()} + {metadata.identifier[0].toUpperCase()}

); @@ -83,6 +74,6 @@ export class CollectionFreeFormRemoteCursors extends React.Component res.send(JSON.stringify(user)), + secureHandler: ({ res, user: { _id, email } }) => res.send(JSON.stringify({ id: _id, email })), publicHandler: ({ res }) => res.send(JSON.stringify({ id: "__guest__", email: "" })) }); -- cgit v1.2.3-70-g09d2 From dd6937f8eaeb9abd45061a572fcb75586b5f5ef7 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 11 Feb 2020 12:11:59 -0500 Subject: google doc fixes --- src/new_fields/RichTextUtils.ts | 9 +++------ src/scraping/buxton/final/BuxtonImporter.ts | 2 +- src/server/ApiManagers/GooglePhotosManager.ts | 11 +++++------ 3 files changed, 9 insertions(+), 13 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/new_fields/RichTextUtils.ts b/src/new_fields/RichTextUtils.ts index c50f8cc48..2fedac53e 100644 --- a/src/new_fields/RichTextUtils.ts +++ b/src/new_fields/RichTextUtils.ts @@ -123,9 +123,7 @@ export namespace RichTextUtils { const objects = Object.keys(inlineObjects).map(objectId => inlineObjects[objectId]); const mediaItems: MediaItem[] = objects.map(object => { const embeddedObject = object.inlineObjectProperties!.embeddedObject!; - const baseUrl = embeddedObject.imageProperties!.contentUri!; - const filename = `upload_${Utils.GenerateGuid()}.png`; - return { baseUrl, filename }; + return { baseUrl: embeddedObject.imageProperties!.contentUri! }; }); const uploads = await Networking.PostToServer("/googlePhotosMediaDownload", { mediaItems }); @@ -136,11 +134,11 @@ export namespace RichTextUtils { for (let i = 0; i < objects.length; i++) { const object = objects[i]; - const { fileNames } = uploads[i]; + const { clientAccessPath } = uploads[i]; const embeddedObject = object.inlineObjectProperties!.embeddedObject!; const size = embeddedObject.size!; const width = size.width!.magnitude!; - const url = Utils.fileUrl(fileNames.clean); + const url = Utils.prepend(clientAccessPath); inlineObjectMap.set(object.objectId!, { title: embeddedObject.title || `Imported Image from ${document.title}`, @@ -156,7 +154,6 @@ export namespace RichTextUtils { interface MediaItem { baseUrl: string; - filename: string; } export const Import = async (documentId: GoogleApiClientUtils.Docs.DocumentId, textNote: Doc): Promise> => { diff --git a/src/scraping/buxton/final/BuxtonImporter.ts b/src/scraping/buxton/final/BuxtonImporter.ts index 66d1e761a..6e3c66690 100644 --- a/src/scraping/buxton/final/BuxtonImporter.ts +++ b/src/scraping/buxton/final/BuxtonImporter.ts @@ -285,7 +285,7 @@ async function writeImages(zip: any): Promise { continue; } - const ext = `.${type}`; + const ext = `.${type}`.toLowerCase(); const generatedFileName = `upload_${Utils.GenerateGuid()}${ext}`; await DashUploadUtils.outputResizedImages(streamImage, imageDir, generatedFileName, ext); diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 1727cc5a6..157f6bdca 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -21,7 +21,6 @@ interface GooglePhotosUploadFailure { } interface MediaItem { baseUrl: string; - filename: string; } interface NewMediaItem { description: string; @@ -83,12 +82,12 @@ export default class GooglePhotosManager extends ApiManager { method: Method.POST, subscription: "/googlePhotosMediaDownload", secureHandler: async ({ req, res }) => { - const contents: { mediaItems: MediaItem[] } = req.body; + const { mediaItems } = req.body as { mediaItems: MediaItem[] }; let failed = 0; - if (contents) { + if (mediaItems) { const completed: Opt[] = []; - for (const item of contents.mediaItems) { - const results = await DashUploadUtils.InspectImage(item.baseUrl); + for (const { baseUrl } of mediaItems) { + const results = await DashUploadUtils.InspectImage(baseUrl); if (results instanceof Error) { failed++; continue; @@ -96,7 +95,7 @@ export default class GooglePhotosManager extends ApiManager { const { contentSize, ...attributes } = results; const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize); if (!found) { - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, item.filename, prefix).catch(error => _error(res, downloadError, error)); + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix).catch(error => _error(res, downloadError, error)); if (upload) { completed.push(upload); await Database.Auxiliary.LogUpload(upload); -- cgit v1.2.3-70-g09d2 From ed947b320de772d63e7b462e78910db11c0a8fd3 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 11 Feb 2020 12:57:34 -0500 Subject: goole docs fixed except for uploading embedded images --- src/client/apis/google_docs/GoogleApiClientUtils.ts | 2 +- src/client/util/RichTextSchema.tsx | 2 +- src/new_fields/RichTextUtils.ts | 6 ++++-- src/server/ApiManagers/GooglePhotosManager.ts | 2 +- src/server/DashUploadUtils.ts | 4 ++-- 5 files changed, 9 insertions(+), 7 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/apis/google_docs/GoogleApiClientUtils.ts b/src/client/apis/google_docs/GoogleApiClientUtils.ts index d2a79f189..0d44ee8e0 100644 --- a/src/client/apis/google_docs/GoogleApiClientUtils.ts +++ b/src/client/apis/google_docs/GoogleApiClientUtils.ts @@ -248,7 +248,7 @@ export namespace GoogleApiClientUtils { return undefined; } requests.push(...options.content.requests); - const replies: any = await update({ documentId: documentId, requests }); + const replies: any = await update({ documentId, requests }); if ("errors" in replies) { console.log("Write operation failed:"); console.log(replies.errors.map((error: any) => error.message)); diff --git a/src/client/util/RichTextSchema.tsx b/src/client/util/RichTextSchema.tsx index 269a045a0..f12b3632c 100644 --- a/src/client/util/RichTextSchema.tsx +++ b/src/client/util/RichTextSchema.tsx @@ -883,7 +883,7 @@ export class DashFieldView { e.stopPropagation(); if ((e.key === "a" && e.ctrlKey) || (e.key === "a" && e.metaKey)) { if (window.getSelection) { - var range = document.createRange(); + const range = document.createRange(); range.selectNodeContents(self._fieldSpan); window.getSelection()!.removeAllRanges(); window.getSelection()!.addRange(range); diff --git a/src/new_fields/RichTextUtils.ts b/src/new_fields/RichTextUtils.ts index 2fedac53e..016bcc4ca 100644 --- a/src/new_fields/RichTextUtils.ts +++ b/src/new_fields/RichTextUtils.ts @@ -1,5 +1,5 @@ import { EditorState, Transaction, TextSelection } from "prosemirror-state"; -import { Node, Fragment, Mark, MarkType } from "prosemirror-model"; +import { Node, Fragment, Mark } from "prosemirror-model"; import { RichTextField } from "./RichTextField"; import { docs_v1 } from "googleapis"; import { GoogleApiClientUtils } from "../client/apis/google_docs/GoogleApiClientUtils"; @@ -17,6 +17,7 @@ import { Id } from "./FieldSymbols"; import { DocumentView } from "../client/views/nodes/DocumentView"; import { AssertionError } from "assert"; import { Networking } from "../client/Network"; +import { extname } from "path"; export namespace RichTextUtils { @@ -138,7 +139,8 @@ export namespace RichTextUtils { const embeddedObject = object.inlineObjectProperties!.embeddedObject!; const size = embeddedObject.size!; const width = size.width!.magnitude!; - const url = Utils.prepend(clientAccessPath); + const ext = extname(clientAccessPath); + const url = Utils.prepend(clientAccessPath.replace(ext, "_m" + ext)); inlineObjectMap.set(object.objectId!, { title: embeddedObject.title || `Imported Image from ${document.title}`, diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 157f6bdca..3236d1ee2 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -95,7 +95,7 @@ export default class GooglePhotosManager extends ApiManager { const { contentSize, ...attributes } = results; const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize); if (!found) { - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix).catch(error => _error(res, downloadError, error)); + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error)); if (upload) { completed.push(upload); await Database.Auxiliary.LogUpload(upload); diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 4e6bbaa3f..83a0064e8 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -223,7 +223,7 @@ export namespace DashUploadUtils { }); } - export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = ""): Promise => { + export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = "", cleanUp = true): Promise => { const { requestable, source, ...remaining } = metadata; const extension = `.${remaining.contentType.split("/")[1].toLowerCase()}`; const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}${extension}`; @@ -237,7 +237,7 @@ export namespace DashUploadUtils { for (const suffix of Object.keys(writtenFiles)) { information.serverAccessPaths[suffix] = serverPathToFile(Directory.images, writtenFiles[suffix]); } - if (isLocal().test(source)) { + if (isLocal().test(source) && cleanUp) { unlinkSync(source); } return information; -- cgit v1.2.3-70-g09d2 From ec5c878fb4c5f7e03fdd214c0841cf2ebf983e8c Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 11 Feb 2020 15:48:06 -0500 Subject: refactored image upload data response format, google docs working for specific interactions --- .../util/Import & Export/DirectoryImportBox.tsx | 4 +- src/client/util/RichTextSchema.tsx | 1 + src/client/views/collections/CollectionSubView.tsx | 8 +- src/client/views/nodes/DocumentView.tsx | 4 +- src/client/views/nodes/ImageBox.tsx | 4 +- src/new_fields/RichTextUtils.ts | 30 ++--- src/scraping/buxton/final/json/buxton.json | 130 ++++++++++----------- src/server/ApiManagers/DownloadManager.ts | 2 +- src/server/DashUploadUtils.ts | 37 ++++-- src/server/apis/google/GooglePhotosUploadUtils.ts | 3 +- 10 files changed, 121 insertions(+), 102 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index 071015193..d04f56e57 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -120,8 +120,8 @@ export default class DirectoryImportBox extends React.Component runInAction(() => this.completed += batch.length); }); - await Promise.all(uploads.map(async ({ name, type, clientAccessPath, exifData }) => { - const path = Utils.prepend(clientAccessPath); + await Promise.all(uploads.map(async ({ name, type, accessPaths, exifData }) => { + const path = Utils.prepend(accessPaths.agnostic.client); const document = await Docs.Get.DocumentFromType(type, path, { _width: 300, title: name }); const { data, error } = exifData; if (document) { diff --git a/src/client/util/RichTextSchema.tsx b/src/client/util/RichTextSchema.tsx index f12b3632c..3cf0561dc 100644 --- a/src/client/util/RichTextSchema.tsx +++ b/src/client/util/RichTextSchema.tsx @@ -133,6 +133,7 @@ export const nodes: { [index: string]: NodeSpec } = { inline: true, attrs: { src: {}, + agnostic: { default: null }, width: { default: 100 }, alt: { default: null }, title: { default: null }, diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index 293a8491a..20941493f 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -232,8 +232,8 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { const split = img.split("src=\"")[1].split("\"")[0]; let source = split; if (split.startsWith("data:image") && split.includes("base64")) { - const [{ clientAccessPath }] = await Networking.PostToServer("/uploadRemoteImage", { sources: [split] }); - source = Utils.prepend(clientAccessPath); + const [{ accessPaths }] = await Networking.PostToServer("/uploadRemoteImage", { sources: [split] }); + source = Utils.prepend(accessPaths.agnostic.client); } const doc = Docs.Create.ImageDocument(source, { ...options, _width: 300 }); ImageUtils.ExtractExif(doc); @@ -312,9 +312,9 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { const dropFileName = file ? file.name : "-empty-"; promises.push(Networking.PostFormDataToServer("/uploadFormData", formData).then(results => { results.map(action((result: any) => { - const { clientAccessPath, nativeWidth, nativeHeight, contentSize } = result; + const { accessPaths, nativeWidth, nativeHeight, contentSize } = result; const full = { ...options, _width: 300, title: dropFileName }; - const pathname = Utils.prepend(clientAccessPath); + const pathname = Utils.prepend(accessPaths.agnostic.client); Docs.Get.DocumentFromType(type, pathname, full).then(doc => { if (doc) { const proto = Doc.GetProto(doc); diff --git a/src/client/views/nodes/DocumentView.tsx b/src/client/views/nodes/DocumentView.tsx index e4ab3e746..5c5a87cb9 100644 --- a/src/client/views/nodes/DocumentView.tsx +++ b/src/client/views/nodes/DocumentView.tsx @@ -277,7 +277,7 @@ export class DocumentView extends DocComponent(Docu } preventDefault && e.preventDefault(); } - }) + }); buttonClick = async (altKey: boolean, ctrlKey: boolean) => { const linkDocs = DocListCast(this.props.Document.links); @@ -636,7 +636,7 @@ export class DocumentView extends DocComponent(Docu if (StrCast(tempDoc.title) === layout) { foundLayout = tempDoc; } - }) + }); DocumentView. makeCustomViewClicked(this.props.Document, this.props.DataDoc, Docs.Create.StackingDocument, layout, foundLayout); } else { diff --git a/src/client/views/nodes/ImageBox.tsx b/src/client/views/nodes/ImageBox.tsx index db9800de4..c0e102195 100644 --- a/src/client/views/nodes/ImageBox.tsx +++ b/src/client/views/nodes/ImageBox.tsx @@ -321,12 +321,12 @@ export class ImageBox extends DocAnnotatableComponent this.uploadIcon = loading); - const [{ clientAccessPath }] = await Networking.PostToServer("/uploadRemoteImage", { sources: [primary] }); + const [{ accessPaths }] = await Networking.PostToServer("/uploadRemoteImage", { sources: [primary] }); dataDoc.originalUrl = primary; let succeeded = true; let data: ImageField | undefined; try { - data = new ImageField(Utils.prepend(clientAccessPath)); + data = new ImageField(Utils.prepend(accessPaths.agnostic.client)); } catch { succeeded = false; } diff --git a/src/new_fields/RichTextUtils.ts b/src/new_fields/RichTextUtils.ts index 016bcc4ca..7c1fc39d8 100644 --- a/src/new_fields/RichTextUtils.ts +++ b/src/new_fields/RichTextUtils.ts @@ -114,6 +114,7 @@ export namespace RichTextUtils { width: number; title: string; url: string; + agnostic: string; } const parseInlineObjects = async (document: docs_v1.Schema$Document): Promise> => { @@ -135,17 +136,17 @@ export namespace RichTextUtils { for (let i = 0; i < objects.length; i++) { const object = objects[i]; - const { clientAccessPath } = uploads[i]; + const { accessPaths } = uploads[i]; + const { agnostic, _m } = accessPaths; const embeddedObject = object.inlineObjectProperties!.embeddedObject!; const size = embeddedObject.size!; const width = size.width!.magnitude!; - const ext = extname(clientAccessPath); - const url = Utils.prepend(clientAccessPath.replace(ext, "_m" + ext)); inlineObjectMap.set(object.objectId!, { title: embeddedObject.title || `Imported Image from ${document.title}`, width, - url + url: Utils.prepend(_m.client), + agnostic: Utils.prepend(agnostic.client) }); } } @@ -267,19 +268,19 @@ export namespace RichTextUtils { }; const imageNode = (schema: any, image: ImageTemplate, textNote: Doc) => { - const { url: src, width } = image; + const { url: src, width, agnostic } = image; let docid: string; - const guid = Utils.GenerateDeterministicGuid(src); + const guid = Utils.GenerateDeterministicGuid(agnostic); const backingDocId = StrCast(textNote[guid]); if (!backingDocId) { - const backingDoc = Docs.Create.ImageDocument(src, { _width: 300, _height: 300 }); + const backingDoc = Docs.Create.ImageDocument(agnostic, { _width: 300, _height: 300 }); DocumentView.makeCustomViewClicked(backingDoc, undefined, Docs.Create.FreeformDocument); docid = backingDoc[Id]; textNote[guid] = docid; } else { docid = backingDocId; } - return schema.node("image", { src, width, docid, float: null, location: "onRight" }); + return schema.node("image", { src, agnostic, width, docid, float: null, location: "onRight" }); }; const textNode = (schema: any, run: docs_v1.Schema$TextRun) => { @@ -435,7 +436,7 @@ export namespace RichTextUtils { const width = attrs.width; requests.push(await EncodeImage({ startIndex: position + nodeSize - 1, - uri: attrs.src, + uri: attrs.agnostic, width: Number(typeof width === "string" ? width.replace("px", "") : width) })); } @@ -498,15 +499,18 @@ export namespace RichTextUtils { }; }; - const EncodeImage = async (information: ImageInformation) => { - const source = [Docs.Create.ImageDocument(information.uri)]; + const EncodeImage = async ({ uri, width, startIndex }: ImageInformation) => { + if (!uri) { + return {}; + } + const source = [Docs.Create.ImageDocument(uri)]; const baseUrls = await GooglePhotos.Transactions.UploadThenFetch(source); if (baseUrls) { return { insertInlineImage: { uri: baseUrls[0], - objectSize: { width: { magnitude: information.width, unit: "PT" } }, - location: { index: information.startIndex } + objectSize: { width: { magnitude: width, unit: "PT" } }, + location: { index: startIndex } } }; } diff --git a/src/scraping/buxton/final/json/buxton.json b/src/scraping/buxton/final/json/buxton.json index 5c2e2c90c..16b28916b 100644 --- a/src/scraping/buxton/final/json/buxton.json +++ b/src/scraping/buxton/final/json/buxton.json @@ -18,9 +18,9 @@ "3DCad_Brochure.jpg" ], "__images": [ - "/files/images/buxton/upload_19ce4ebd-f207-4724-b8bf-610c8e1cf322.png", - "/files/images/buxton/upload_4dde8de0-f1ec-4ffb-b7cc-57b8c4ae6c94.png", - "/files/images/buxton/upload_825e8ed8-063c-428b-917d-eb62ea218f05.png" + "/files/images/buxton/upload_793981b0-d7da-45d1-a643-596cc3834166.png", + "/files/images/buxton/upload_6259e28a-055e-4b02-b547-e72c5e6fa3c0.png", + "/files/images/buxton/upload_4f4fb80f-467a-41e7-9bf5-1b1f377b2ec4.png" ], "title": "3Dconnexion CadMan 3D Motion Controller", "company": "3Dconnexion", @@ -63,11 +63,11 @@ "SpaceMouse_Plus_Info_Sheet.jpg" ], "__images": [ - "/files/images/buxton/upload_f84cb54e-9bdb-4fbc-9503-6cfa8ddd65be.jpg", - "/files/images/buxton/upload_506b5361-a9fd-4440-a19b-5b4af461878a.png", - "/files/images/buxton/upload_60380383-3603-4624-98cf-50a0d4afe4c9.png", - "/files/images/buxton/upload_2fad6fb2-f1e1-4fe3-915b-33586f05fcad.png", - "/files/images/buxton/upload_effcd815-49eb-4cd4-82cf-2519e58eb729.png" + "/files/images/buxton/upload_4a6cbfa8-964b-41d3-9f24-7d1aed22cd79.jpg", + "/files/images/buxton/upload_8c33d572-07da-4599-944c-eb2370b16a63.png", + "/files/images/buxton/upload_9afc5cca-6208-4944-a0f6-1c3fcd41fdcf.png", + "/files/images/buxton/upload_74e79798-9c9e-4c54-b42f-5813f218bc63.png", + "/files/images/buxton/upload_2bea6876-dc4e-4681-a1d3-433caec163c1.png" ], "title": "3Dconnexion Magellan/SpaceMouse Plus", "company": "3Dconnexion", @@ -114,13 +114,13 @@ "SpaceBall_5000_Data_Sheet.jpg" ], "__images": [ - "/files/images/buxton/upload_81972990-8cad-4299-926a-fe5d49711a80.jpg", - "/files/images/buxton/upload_04103620-72e4-431f-b34f-61d0f158277a.png", - "/files/images/buxton/upload_9bfe7c5a-1c5e-46e3-8f8d-4e0eec3e38e3.png", - "/files/images/buxton/upload_77da85f5-6575-4a8d-acb6-52f2df3307b0.png", - "/files/images/buxton/upload_1c041a33-b258-42d1-b9f2-d610a4838fb3.jpg", - "/files/images/buxton/upload_841f0d98-27d7-4298-a71d-c5cba72a262a.jpg", - "/files/images/buxton/upload_0e0eaf99-f8bb-4f02-ad46-651cd67c13b1.jpg" + "/files/images/buxton/upload_94e39580-5cca-46e9-ae49-ab6cd2fe3cad.jpg", + "/files/images/buxton/upload_eacee8a5-9849-4401-9c38-aa46fc0f517b.png", + "/files/images/buxton/upload_9b0b1f35-ac7c-4160-8806-8d243223872b.png", + "/files/images/buxton/upload_cf0c6e39-9ae6-48ce-90bd-d9f335503439.png", + "/files/images/buxton/upload_b4285b9f-26cd-4de5-8e37-c4e563c3e7f6.jpg", + "/files/images/buxton/upload_5b6ff31b-81b2-4a74-8591-27730c24225b.jpg", + "/files/images/buxton/upload_64dcea06-3b35-4979-bf40-5dfa5d4668b1.jpg" ], "title": "3Dconnexion Spaceball 5000", "company": "3Dconnexion", @@ -171,13 +171,13 @@ "SpaceNavigator_Press_Release.jpg" ], "__images": [ - "/files/images/buxton/upload_b44ec511-334e-46c0-a315-63d90fac8117.jpg", - "/files/images/buxton/upload_734ae360-faaf-4c78-89b4-a3f120fe233e.png", - "/files/images/buxton/upload_d20bbf6a-7310-407a-8b85-c607e9cf7f73.png", - "/files/images/buxton/upload_e9236600-dd5f-4805-92ca-805d5a301aa1.jpg", - "/files/images/buxton/upload_c85c977d-ef99-44be-b55c-8e8dde124e83.png", - "/files/images/buxton/upload_a6034d66-7de6-4d4b-bd64-ea95cc00ae8a.png", - "/files/images/buxton/upload_07415a65-964a-4936-aa6e-79861e8ebcbf.png" + "/files/images/buxton/upload_73511beb-f2bf-4f28-a1ea-64ef52a00426.jpg", + "/files/images/buxton/upload_4e088fb4-e5ff-4a1b-ba88-5f4575c86fe3.png", + "/files/images/buxton/upload_876a9921-3eb1-4c35-99e3-b82924fb88c4.png", + "/files/images/buxton/upload_d23c2886-c2b9-4558-9bfb-966aca7be20e.jpg", + "/files/images/buxton/upload_aef9171b-2e19-4fcb-b2dc-2281e89d498b.png", + "/files/images/buxton/upload_bbdaf252-1689-4680-8b19-ec5e79088e44.png", + "/files/images/buxton/upload_4cf49440-cf6b-484d-8391-3a2b0fae2b7e.png" ], "title": "3Dconnexion SpaceNavigator ", "company": "3Dconnexion", @@ -222,12 +222,12 @@ "3M_2006_Catalogue_p18.jpg" ], "__images": [ - "/files/images/buxton/upload_52b83c2a-5fe4-42aa-a488-5667b0a5beae.jpg", - "/files/images/buxton/upload_e05d8710-d82b-4f3b-8940-3745db1c855c.jpg", - "/files/images/buxton/upload_557c6fc7-04a3-4698-acde-4e53ceb6793c.jpg", - "/files/images/buxton/upload_d9723beb-bfe3-4556-b17d-aedf2db59777.jpg", - "/files/images/buxton/upload_5cb15815-3cab-406f-bf95-f9410997b4f4.jpg", - "/files/images/buxton/upload_a7fc91e0-7672-4f81-a8f3-66072997bd44.jpg" + "/files/images/buxton/upload_02df5121-8eff-452c-a52b-a44238afc06d.jpg", + "/files/images/buxton/upload_169aa4ee-4c34-43a2-8302-48bb3b7a01b6.jpg", + "/files/images/buxton/upload_a4d11c27-e4fa-4912-96da-3d971ec49ca0.jpg", + "/files/images/buxton/upload_7e9e1862-72e7-4f0d-8451-36de8c1bbe9c.jpg", + "/files/images/buxton/upload_76fa705f-2470-439c-acf6-03482d7a9570.jpg", + "/files/images/buxton/upload_7b6dd2c9-b88f-4958-a4d0-94bb42172e26.jpg" ], "title": "3M EM500 Ergonomic Mouse", "company": "3M", @@ -268,13 +268,13 @@ "Abaton_ProPoint_Brochure.jpg" ], "__images": [ - "/files/images/buxton/upload_f6f038a9-3583-4d21-9a43-f762e1df0b52.jpg", - "/files/images/buxton/upload_ad5f2bbd-66be-4dde-afe4-60e353a63f74.jpg", - "/files/images/buxton/upload_2e384260-5d2b-47a4-9508-028462a5a1e7.jpg", - "/files/images/buxton/upload_4dfcd3af-f06d-4c3e-8dff-7c43acd79a16.jpg", - "/files/images/buxton/upload_19eea9af-abec-4617-8b8d-fcb6637420de.jpg", - "/files/images/buxton/upload_89702b8a-fcb5-426d-b5af-c58e62d1c8ff.jpg", - "/files/images/buxton/upload_44419996-994e-471c-8dc7-cda22b00ac24.jpg" + "/files/images/buxton/upload_37169968-d329-40c5-9483-6bc198edc43c.jpg", + "/files/images/buxton/upload_64731ea4-a3aa-43ec-a56d-d4f6ecba7cf0.jpg", + "/files/images/buxton/upload_c41feac8-3b98-4d85-b9c7-79b3482a0695.jpg", + "/files/images/buxton/upload_5f67807b-f61b-4816-b93f-6d88c95b2418.jpg", + "/files/images/buxton/upload_a3e3cab8-48ab-43c2-bed9-5cc0ce68928d.jpg", + "/files/images/buxton/upload_4c3ac379-6703-4c87-a4a0-ca450a1151d2.jpg", + "/files/images/buxton/upload_8df14dc0-d75d-41f0-9e70-a221598d3df3.jpg" ], "title": "Abaton ProPoint Optical Trackball", "company": "Abaton", @@ -305,8 +305,8 @@ "Active_Book_Brochure_p1.jpg" ], "__images": [ - "/files/images/buxton/upload_2159da7b-9ebd-41ed-8b60-9eb36a6c6685.jpg", - "/files/images/buxton/upload_6d470914-fc41-4151-858b-f945807e62a2.png" + "/files/images/buxton/upload_9c8aa6a2-570f-4915-b840-504c8531c341.jpg", + "/files/images/buxton/upload_fbb7ad12-2a5d-4f76-89a9-ec30d7260031.png" ], "title": "Active Book Company Active Book Prototype", "company": "Active Book Company", @@ -342,11 +342,11 @@ "Adesso_ACK-540PW_June 21_2003.jpg" ], "__images": [ - "/files/images/buxton/upload_189469a3-1133-4a4f-8720-19c04f29684e.jpg", - "/files/images/buxton/upload_73f5d066-f9b5-41ce-9ce3-e27253ef51e7.jpg", - "/files/images/buxton/upload_498e5cbd-d65e-4b01-bed2-bcb2723ffee5.jpg", - "/files/images/buxton/upload_97ee9d03-07e0-448c-89f6-6ac61948de68.jpg", - "/files/images/buxton/upload_661e7387-dcd7-4ca1-8493-2e94f8803ef7.jpg" + "/files/images/buxton/upload_967656ff-44d7-4693-bcb0-dc0eab204413.jpg", + "/files/images/buxton/upload_9ffd4ccb-8ca5-4926-8fa2-38277b2ccfb6.jpg", + "/files/images/buxton/upload_22399981-f8c0-46e0-953c-89f7eb043ad2.jpg", + "/files/images/buxton/upload_15ad02a9-123f-4e7e-94c5-d959076520f0.jpg", + "/files/images/buxton/upload_8905097a-3568-4d37-9ad3-126a0e429f12.jpg" ], "title": "Adesso ACK-540PB PS/2 Mini PS/2 Touchpad Keyboard", "company": "Adesso", @@ -405,17 +405,17 @@ "Adesso_KP_Mouse_11_Product.JPG" ], "__images": [ - "/files/images/buxton/upload_0a127e30-9014-4300-bade-07945fba940f.jpg", - "/files/images/buxton/upload_ca20611c-9061-4e56-9d72-4c4a3c798d3f.jpg", - "/files/images/buxton/upload_12b46021-eeef-47f0-9e55-ca9e9b76f9e4.jpg", - "/files/images/buxton/upload_5e86ad07-77af-4d3e-a82e-eaa9cafdbbe4.jpg", - "/files/images/buxton/upload_4be64833-53e9-4216-b7ec-d521e0b87933.jpg", - "/files/images/buxton/upload_7cb731b5-5c52-47e0-a33b-c113530478f2.jpg", - "/files/images/buxton/upload_b20407b3-3578-4fa5-8f16-e0bf729e4daa.jpg", - "/files/images/buxton/upload_bee2fcb8-702a-46aa-9ec3-eb9b50ef1ba7.jpg", - "/files/images/buxton/upload_0ee0f6d5-34fc-41f3-9c8e-cf462dbcc40b.jpg", - "/files/images/buxton/upload_8f442582-b901-4a38-8df6-5a9b3195df9f.jpg", - "/files/images/buxton/upload_3f96763c-c3af-46a7-8831-3206c8bc460e.jpg" + "/files/images/buxton/upload_5bc99192-0935-4f6d-8d6c-a2218d55b359.jpg", + "/files/images/buxton/upload_52a6212e-c79d-4ec6-86a0-a3ca4ea54387.jpg", + "/files/images/buxton/upload_997acb64-0e08-4af2-b0e3-0b86a18bace5.jpg", + "/files/images/buxton/upload_00586980-0a08-43e1-9721-c2f5dcd1079e.jpg", + "/files/images/buxton/upload_3f695b11-dc1d-4d54-abec-eacfac80bdc7.jpg", + "/files/images/buxton/upload_161f1420-9594-4dca-ba62-1ca32e9a2028.jpg", + "/files/images/buxton/upload_a8b4ea6e-f1f8-47db-a796-72f566e60e6f.jpg", + "/files/images/buxton/upload_b6365e4e-4656-4bbf-aec6-feb920be6df9.jpg", + "/files/images/buxton/upload_1a0e9061-39e1-4dec-8add-5cc1abd110b5.jpg", + "/files/images/buxton/upload_d9bba544-f2b1-4a15-81e5-0d10e60d7881.jpg", + "/files/images/buxton/upload_56d486df-d7bb-44b0-80ad-c41c63590bae.jpg" ], "title": "Adesso 2-in-1 Optical Keypad Calculator Mouse AKP-170", "company": "Adesso Inc", @@ -477,18 +477,18 @@ "NB75D_Mouse_Manual.jpg" ], "__images": [ - "/files/images/buxton/upload_dae8f950-38bf-4fdf-8db0-153e3cae9cae.jpg", - "/files/images/buxton/upload_3e054d83-d01f-4d3d-aad2-fa8ee1d22a97.jpg", - "/files/images/buxton/upload_e9c2ddb0-0684-4bb4-a88c-5ee8074e456e.jpg", - "/files/images/buxton/upload_9c829a69-7d45-4d25-b906-98153aa03001.jpg", - "/files/images/buxton/upload_4faa425c-a100-4ce4-869c-a6c6705779ee.jpg", - "/files/images/buxton/upload_ed20ac31-69f6-45b8-b8e0-b8ad829ba002.jpg", - "/files/images/buxton/upload_f3d3d8f5-01cf-4cd1-9d1c-0a11a8c3e239.jpg", - "/files/images/buxton/upload_af1fa3b6-4537-416a-b626-34bed1154f35.jpg", - "/files/images/buxton/upload_5127f377-b697-4ad7-9fb3-fd6f82017c3c.jpg", - "/files/images/buxton/upload_061ae6a2-e083-492a-bc8a-826c4be00790.jpg", - "/files/images/buxton/upload_32e91966-7f1b-4cd7-858b-9cdb63affd13.jpg", - "/files/images/buxton/upload_ccc9de21-b047-4f9c-89c6-08fb3d206c7a.jpg" + "/files/images/buxton/upload_2db5c520-b370-4f03-8481-908e42428466.jpg", + "/files/images/buxton/upload_b28643eb-57a0-4602-b7cc-c41d553628b9.jpg", + "/files/images/buxton/upload_2478a89a-4eea-4e4c-af83-ecd292f44f3d.jpg", + "/files/images/buxton/upload_113d7356-f550-4491-849d-f69087c73918.jpg", + "/files/images/buxton/upload_44c40c44-ade6-499d-951c-4b436131e66e.jpg", + "/files/images/buxton/upload_d91f9756-09bc-447f-8f04-dc78344a5f11.jpg", + "/files/images/buxton/upload_b8c51011-5f93-43f3-85aa-b611a3b3cf93.jpg", + "/files/images/buxton/upload_483544a3-c3f7-4c8f-a236-def10d32f4f0.jpg", + "/files/images/buxton/upload_ea048294-3f57-42c3-b5b4-27062a5755dc.jpg", + "/files/images/buxton/upload_8eb1b9b1-9ca3-49c8-9bd4-7302a9c1f07e.jpg", + "/files/images/buxton/upload_029aa1d6-056d-4cf0-8eaf-b23b3d0e4317.jpg", + "/files/images/buxton/upload_3acc0955-da14-4a53-ac58-2e494e04ca78.jpg" ], "title": "A4 Tech BatteryFREE Wireless Optical Mouse Model NB-75D", "company": "A4Tech", diff --git a/src/server/ApiManagers/DownloadManager.ts b/src/server/ApiManagers/DownloadManager.ts index fad5e6789..01d2dfcad 100644 --- a/src/server/ApiManagers/DownloadManager.ts +++ b/src/server/ApiManagers/DownloadManager.ts @@ -254,7 +254,7 @@ async function writeHierarchyRecursive(file: Archiver.Archiver, hierarchy: Hiera // and dropped in the browser and thus hosted remotely) so we upload it // to our server and point the zip file to it, so it can bundle up the bytes const information = await DashUploadUtils.UploadImage(result); - path = information instanceof Error ? "" : information.serverAccessPaths[SizeSuffix.Original]; + path = information instanceof Error ? "" : information.accessPaths[SizeSuffix.Original].server; } // write the file specified by the path to the directory in the // zip file given by the prefix. diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 83a0064e8..0f1758c26 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -61,13 +61,16 @@ export namespace DashUploadUtils { const type = "content-type"; export interface ImageUploadInformation { - clientAccessPath: string; - serverAccessPaths: { [key: string]: string }; + accessPaths: AccessPathInfo; exifData: EnrichedExifData; contentSize?: number; contentType?: string; } + export interface AccessPathInfo { + [suffix: string]: { client: string, server: string }; + } + const { imageFormats, videoFormats, applicationFormats } = AcceptibleMedia; export async function upload(file: File): Promise { @@ -94,7 +97,7 @@ export namespace DashUploadUtils { } console.log(red(`Ignoring unsupported file (${name}) with upload type (${type}).`)); - return { clientAccessPath: undefined }; + return { accessPaths: undefined }; } async function UploadPdf(absolutePath: string) { @@ -213,29 +216,40 @@ export namespace DashUploadUtils { }; }; - export async function MoveParsedFile(absolutePath: string, destination: Directory): Promise<{ clientAccessPath: Opt }> { - return new Promise<{ clientAccessPath: Opt }>(resolve => { + export async function MoveParsedFile(absolutePath: string, destination: Directory): Promise> { + return new Promise>(resolve => { const filename = basename(absolutePath); const destinationPath = serverPathToFile(destination, filename); rename(absolutePath, destinationPath, error => { - resolve({ clientAccessPath: error ? undefined : clientPathToFile(destination, filename) }); + resolve(error ? undefined : { + agnostic: getAccessPaths(destination, filename) + }); }); }); } + function getAccessPaths(directory: Directory, fileName: string) { + return { + client: clientPathToFile(directory, fileName), + server: serverPathToFile(directory, fileName) + }; + } + export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = "", cleanUp = true): Promise => { const { requestable, source, ...remaining } = metadata; const extension = `.${remaining.contentType.split("/")[1].toLowerCase()}`; const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}${extension}`; + const { images } = Directory; const information: ImageUploadInformation = { - clientAccessPath: clientPathToFile(Directory.images, resolved), - serverAccessPaths: {}, + accessPaths: { + agnostic: getAccessPaths(images, resolved) + }, ...remaining }; const outputPath = pathToDirectory(Directory.images); const writtenFiles = await outputResizedImages(() => request(requestable), outputPath, resolved, extension); for (const suffix of Object.keys(writtenFiles)) { - information.serverAccessPaths[suffix] = serverPathToFile(Directory.images, writtenFiles[suffix]); + information.accessPaths[suffix] = getAccessPaths(images, writtenFiles[suffix]); } if (isLocal().test(source) && cleanUp) { unlinkSync(source); @@ -270,10 +284,9 @@ export namespace DashUploadUtils { export async function outputResizedImages(readStreamSource: () => ReadStreamLike | Promise, outputPath: string, fileName: string, ext: string) { const writtenFiles: { [suffix: string]: string } = {}; for (const { resizer, suffix } of resizers(ext)) { - const resizedPath = path.resolve(outputPath, InjectSize(fileName, suffix)); - writtenFiles[suffix] = resizedPath; + const resolved = writtenFiles[suffix] = InjectSize(fileName, suffix); await new Promise(async (resolve, reject) => { - const writeStream = createWriteStream(resizedPath); + const writeStream = createWriteStream(path.resolve(outputPath, resolved)); let readStream: ReadStreamLike; const source = readStreamSource(); if (source instanceof Promise) { diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts index 8ae63caa3..d305eed0a 100644 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ b/src/server/apis/google/GooglePhotosUploadUtils.ts @@ -84,6 +84,7 @@ export namespace GooglePhotosUploadUtils { if (!DashUploadUtils.validateExtension(url)) { return undefined; } + const body = await request(url, { encoding: null }); // returns a readable stream with the unencoded binary image data const parameters = { method: 'POST', uri: prepend('uploads'), @@ -92,7 +93,7 @@ export namespace GooglePhotosUploadUtils { 'X-Goog-Upload-File-Name': filename || path.basename(url), 'X-Goog-Upload-Protocol': 'raw' }, - body: await request(url, { encoding: null }) // returns a readable stream with the unencoded binary image data + body }; return new Promise((resolve, reject) => request(parameters, (error, _response, body) => { if (error) { -- cgit v1.2.3-70-g09d2 From ddd7696c15fa26b99650dd09a2738885063f7f61 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 11 Feb 2020 16:52:28 -0500 Subject: added secure backdoor delete route --- src/server/ApiManagers/SessionManager.ts | 9 +++++++++ src/server/DashSession/DashSessionAgent.ts | 1 + 2 files changed, 10 insertions(+) (limited to 'src/server/ApiManagers') diff --git a/src/server/ApiManagers/SessionManager.ts b/src/server/ApiManagers/SessionManager.ts index f1629b8f0..cd8d19778 100644 --- a/src/server/ApiManagers/SessionManager.ts +++ b/src/server/ApiManagers/SessionManager.ts @@ -53,6 +53,15 @@ export default class SessionManager extends ApiManager { }) }); + register({ + method: Method.GET, + subscription: this.secureSubscriber("delete"), + secureHandler: this.authorizedAction(({ res }) => { + sessionAgent.serverWorker.emit("delete"); + res.redirect("/home"); + }) + }); + } } \ No newline at end of file diff --git a/src/server/DashSession/DashSessionAgent.ts b/src/server/DashSession/DashSessionAgent.ts index 85bfe14de..1ed98cdbe 100644 --- a/src/server/DashSession/DashSessionAgent.ts +++ b/src/server/DashSession/DashSessionAgent.ts @@ -37,6 +37,7 @@ export class DashSessionAgent extends AppliedSessionAgent { monitor.addReplCommand("debug", [/\S+\@\S+/], async ([to]) => this.dispatchZippedDebugBackup(to)); monitor.on("backup", this.backup); monitor.on("debug", async ({ to }) => this.dispatchZippedDebugBackup(to)); + monitor.on("delete", WebSocket.deleteFields); monitor.coreHooks.onCrashDetected(this.dispatchCrashReport); return sessionKey; } -- cgit v1.2.3-70-g09d2 From 84fd5c047fa33bf529c19a1bda03eaeab47b6489 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Tue, 11 Feb 2020 16:55:23 -0500 Subject: backdoor async --- src/server/ApiManagers/SessionManager.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/server/ApiManagers/SessionManager.ts b/src/server/ApiManagers/SessionManager.ts index cd8d19778..bcaa6598f 100644 --- a/src/server/ApiManagers/SessionManager.ts +++ b/src/server/ApiManagers/SessionManager.ts @@ -56,9 +56,9 @@ export default class SessionManager extends ApiManager { register({ method: Method.GET, subscription: this.secureSubscriber("delete"), - secureHandler: this.authorizedAction(({ res }) => { - sessionAgent.serverWorker.emit("delete"); - res.redirect("/home"); + secureHandler: this.authorizedAction(async ({ res }) => { + const { error } = await sessionAgent.serverWorker.emit("delete"); + res.send(error ? error.message : "Your request was successful: the server successfully deleted the database. Return to /home."); }) }); -- cgit v1.2.3-70-g09d2 From 307e011a5fbe0433b75cd3d00c0d4d50d578fea0 Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Wed, 12 Feb 2020 03:35:02 -0500 Subject: fixed bugs and intentional behaviors of importer, namely destroying the size stream and making DOF and price optional fields, respectively --- src/client/documents/Documents.ts | 4 +++- src/scraping/buxton/final/BuxtonImporter.ts | 22 ++++++++++++---------- src/server/ApiManagers/UtilManager.ts | 5 ++++- 3 files changed, 19 insertions(+), 12 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index 49d1820f5..b00a1a91d 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -373,10 +373,12 @@ export namespace Docs { const { ImageDocument, StackingDocument } = Docs.Create; if (Array.isArray(__images)) { const constructed = __images.map(relative => Utils.prepend(relative)); + const hero = constructed[0]; + constructed.splice(0, 1); const deviceImages = constructed.map((url, i) => ImageDocument(url, { title: `image${i}.${extname(url)}` })); const doc = StackingDocument(deviceImages, { title: device.title, _LODdisable: true }); const deviceProto = Doc.GetProto(doc); - deviceProto.hero = new ImageField(constructed[0]); + deviceProto.hero = new ImageField(hero); Docs.Get.DocumentHierarchyFromJson(device, undefined, deviceProto); Doc.AddDocToList(parentProto, "data", doc); } diff --git a/src/scraping/buxton/final/BuxtonImporter.ts b/src/scraping/buxton/final/BuxtonImporter.ts index 3e067bc6a..3d7421e90 100644 --- a/src/scraping/buxton/final/BuxtonImporter.ts +++ b/src/scraping/buxton/final/BuxtonImporter.ts @@ -84,7 +84,6 @@ namespace Utilities { if (error) { reject(error); } - console.log(stream); stream.on('data', (chunk: any) => body += chunk.toString()); stream.on('end', () => resolve(body)); }); @@ -121,17 +120,20 @@ const RegexMap = new Map>([ transformer: raw => ({ transformed: Utilities.collectUniqueTokens(raw).transformed[0] }), }], ["originalPrice", { - exp: /Original Price \(USD\)\:\s+(\$[0-9]+\.[0-9]+|NFS)/, + exp: /Original Price \(USD\)\:\s+(\$[0-9\,]+\.[0-9]+|NFS)/, transformer: (raw: string) => { + raw = raw.replace(/\,/g, ""); if (raw === "NFS") { return { transformed: -1 }; } return Utilities.numberValue(raw.slice(1)); - } + }, + required: false }], ["degreesOfFreedom", { exp: /Degrees of Freedom:\s+([0-9]+)/, - transformer: Utilities.numberValue + transformer: Utilities.numberValue, + required: false }], ["dimensions", { exp: /Dimensions\s+\(L x W x H\):\s+([0-9\.]+\s+x\s+[0-9\.]+\s+x\s+[0-9\.]+\s\([A-Za-z]+\))/, @@ -226,9 +228,7 @@ const hyperlinkXPath = '//*[name()="Relationship" and contains(@Type, "hyperlink async function extractFileContents(pathToDocument: string): Promise { console.log('Extracting text...'); const zip = new StreamZip({ file: pathToDocument, storeEntries: true }); - console.log(zip); await new Promise(resolve => zip.on('ready', resolve)); - console.log("Zip ready!"); // extract the body of the document and, specifically, its captions const document = await Utilities.readAndParseXml(zip, "word/document.xml"); @@ -276,20 +276,22 @@ async function writeImages(zip: any): Promise { const imageUrls: string[] = []; for (const mediaPath of imageEntries) { - console.log(`Considering ${mediaPath}`); const streamImage = () => new Promise((resolve, reject) => { zip.stream(mediaPath, (error: any, stream: any) => error ? reject(error) : resolve(stream)); }); const { width, height, type } = await new Promise(async resolve => { - const sizeStream = createImageSizeStream().on('size', resolve); - (await streamImage()).pipe(sizeStream); + const sizeStream = createImageSizeStream().on('size', (dimensions: Dimensions) => { + readStream.destroy(); + resolve(dimensions) + }); + const readStream = await streamImage(); + readStream.pipe(sizeStream); }); if (Math.abs(width - height) < 10) { continue; } - console.log(`Streaming!`); const ext = `.${type}`.toLowerCase(); const generatedFileName = `upload_${Utils.GenerateGuid()}${ext}`; diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index 4cb57a4e7..5aac8261e 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -42,7 +42,10 @@ export default class UtilManager extends ApiManager { register({ method: Method.GET, subscription: "/buxton", - secureHandler: async ({ res }) => res.send(await executeImport()) + secureHandler: async ({ req, res }) => { + req.setTimeout(300000); + res.send(await executeImport()); + } }); register({ -- cgit v1.2.3-70-g09d2 From 2daa348eb760dfd322a271bf3f9f69ebb713a91c Mon Sep 17 00:00:00 2001 From: Sam Wilkins <35748010+samwilkins333@users.noreply.github.com> Date: Wed, 12 Feb 2020 04:36:31 -0500 Subject: switched importer to websocket for continual updates --- src/client/documents/Documents.ts | 52 +++++++++++++++-------------- src/scraping/buxton/final/BuxtonImporter.ts | 52 ++++++++++++++++++----------- src/server/ApiManagers/UtilManager.ts | 9 ----- src/server/Message.ts | 5 ++- src/server/Websocket/Websocket.ts | 7 ++++ 5 files changed, 70 insertions(+), 55 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index b00a1a91d..cc18dc0a6 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -55,6 +55,7 @@ import { InkingControl } from "../views/InkingControl"; import { RichTextField } from "../../new_fields/RichTextField"; import { Networking } from "../Network"; import { extname } from "path"; +import { MessageStore } from "../../server/Message"; const requestImageSize = require('../util/request-image-size'); const path = require('path'); @@ -346,6 +347,7 @@ export namespace Docs { export namespace Create { export function Buxton() { + let responded = false; const loading = new Doc; loading.title = "Please wait for the import script..."; const parent = TreeDocument([loading], { @@ -354,36 +356,36 @@ export namespace Docs { _height: 400, _LODdisable: true }); - Networking.FetchFromServer("/buxton").then(response => { - const devices = JSON.parse(response); - if (!Array.isArray(devices)) { - if ("error" in devices) { - loading.title = devices.error; - } else { - console.log(devices); - alert("The importer returned an unexpected import format. Check the console."); - } - return; + const parentProto = Doc.GetProto(parent); + const { _socket } = DocServer; + Utils.AddServerHandler(_socket, MessageStore.BuxtonDocumentResult, ({ device, errors }) => { + if (!responded) { + responded = true; + parentProto.data = new List(); } - const parentProto = Doc.GetProto(parent); - parentProto.data = new List(); - devices.forEach(device => { + if (device) { const { __images } = device; delete device.__images; const { ImageDocument, StackingDocument } = Docs.Create; - if (Array.isArray(__images)) { - const constructed = __images.map(relative => Utils.prepend(relative)); - const hero = constructed[0]; - constructed.splice(0, 1); - const deviceImages = constructed.map((url, i) => ImageDocument(url, { title: `image${i}.${extname(url)}` })); - const doc = StackingDocument(deviceImages, { title: device.title, _LODdisable: true }); - const deviceProto = Doc.GetProto(doc); - deviceProto.hero = new ImageField(hero); - Docs.Get.DocumentHierarchyFromJson(device, undefined, deviceProto); - Doc.AddDocToList(parentProto, "data", doc); - } - }); + const constructed = __images.map(relative => Utils.prepend(relative)); + const deviceImages = constructed.map((url, i) => ImageDocument(url, { title: `image${i}.${extname(url)}` })); + const doc = StackingDocument(deviceImages, { title: device.title, _LODdisable: true }); + const deviceProto = Doc.GetProto(doc); + deviceProto.hero = new ImageField(constructed[0]); + Docs.Get.DocumentHierarchyFromJson(device, undefined, deviceProto); + Doc.AddDocToList(parentProto, "data", doc); + } else if (errors) { + console.log(errors); + } else { + alert("A Buxton document import was completely empty (??)"); + } + }); + Utils.AddServerHandler(_socket, MessageStore.BuxtonImportComplete, ({ deviceCount, errorCount }) => { + _socket.off(MessageStore.BuxtonDocumentResult.Message); + _socket.off(MessageStore.BuxtonImportComplete.Message); + alert(`Successfully imported ${deviceCount} device${deviceCount === 1 ? "" : "s"}, with ${errorCount} error${errorCount === 1 ? "" : "s"}.`); }); + Utils.Emit(_socket, MessageStore.BeginBuxtonImport, ""); return parent; } diff --git a/src/scraping/buxton/final/BuxtonImporter.ts b/src/scraping/buxton/final/BuxtonImporter.ts index 3d7421e90..d9d48d68c 100644 --- a/src/scraping/buxton/final/BuxtonImporter.ts +++ b/src/scraping/buxton/final/BuxtonImporter.ts @@ -8,7 +8,6 @@ const StreamZip = require('node-stream-zip'); const createImageSizeStream = require("image-size-stream"); import { parseXml } from "libxmljs"; import { strictEqual } from "assert"; -import { BatchedArray, TimeUnit } from "array-batcher"; interface DocumentContents { body: string; @@ -24,21 +23,33 @@ export interface DeviceDocument { longDescription: string; company: string; year: number; - originalPrice: number; - degreesOfFreedom: number; + originalPrice?: number; + degreesOfFreedom?: number; dimensions?: string; primaryKey: string; secondaryKey: string; attribute: string; + __images: string[]; + hyperlinks: string[]; + captions: string[]; + embeddedFileNames: string[]; } -interface AnalysisResult { +export interface AnalysisResult { device?: DeviceDocument; - errors?: any; + errors?: { [key: string]: string }; } type Transformer = (raw: string) => { transformed?: T, error?: string }; +export interface ImportResults { + deviceCount: number, + errorCount: number +} + +type ResultCallback = (result: AnalysisResult) => void; +type TerminatorCallback = (result: ImportResults) => void; + interface Processor { exp: RegExp; matchIndex?: number; @@ -168,7 +179,7 @@ const successOut = "buxton.json"; const failOut = "incomplete.json"; const deviceKeys = Array.from(RegexMap.keys()); -export default async function executeImport() { +export default async function executeImport(emitter: ResultCallback, terminator: TerminatorCallback) { try { const contents = readdirSync(sourceDir); const wordDocuments = contents.filter(file => /.*\.docx?$/.test(file)).map(file => `${sourceDir}/${file}`); @@ -176,7 +187,7 @@ export default async function executeImport() { rimraf.sync(dir); mkdirSync(dir); }); - return parseFiles(wordDocuments); + return parseFiles(wordDocuments, emitter, terminator); } catch (e) { const message = [ "Unable to find a source directory.", @@ -188,23 +199,22 @@ export default async function executeImport() { } } -async function parseFiles(wordDocuments: string[]): Promise { - const imported = await BatchedArray.from(wordDocuments, { batchSize: 8 }).batchedMapPatientInterval<{ fileName: string, contents: DocumentContents }>({ magnitude: 10, unit: TimeUnit.Seconds }, async (batch, collector) => { - for (const filePath of batch) { - const fileName = path.basename(filePath).replace("Bill_Notes_", ""); - console.log(cyan(`\nExtracting contents from ${fileName}...`)); - collector.push({ fileName, contents: await extractFileContents(filePath) }); - } - }); - console.log(yellow("\nAnalyzing the extracted document text...\n")); - const results = imported.map(({ fileName, contents }) => analyze(fileName, contents)); +async function parseFiles(wordDocuments: string[], emitter: ResultCallback, terminator: TerminatorCallback): Promise { + const results: AnalysisResult[] = []; + for (const filePath of wordDocuments) { + const fileName = path.basename(filePath).replace("Bill_Notes_", ""); + console.log(cyan(`\nExtracting contents from ${fileName}...`)); + const result = analyze(fileName, await extractFileContents(filePath)); + emitter(result); + results.push(result); + } const masterDevices: DeviceDocument[] = []; - const masterErrors: any[] = []; + const masterErrors: { [key: string]: string }[] = []; results.forEach(({ device, errors }) => { if (device) { masterDevices.push(device); - } else { + } else if (errors) { masterErrors.push(errors); } }); @@ -219,6 +229,8 @@ async function parseFiles(wordDocuments: string[]): Promise { await writeOutputFile(failOut, masterErrors, total, false); console.log(); + terminator({ deviceCount: masterDevices.length, errorCount: masterErrors.length }); + return masterDevices; } @@ -311,7 +323,7 @@ function analyze(fileName: string, contents: DocumentContents): AnalysisResult { embeddedFileNames, __images: imageUrls }; - const errors: any = { fileName }; + const errors: { [key: string]: string } = { fileName }; for (const key of deviceKeys) { const { exp, transformer, matchIndex, required } = RegexMap.get(key)!; diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index 5aac8261e..8adc3da81 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -39,15 +39,6 @@ export default class UtilManager extends ApiManager { } }); - register({ - method: Method.GET, - subscription: "/buxton", - secureHandler: async ({ req, res }) => { - req.setTimeout(300000); - res.send(await executeImport()); - } - }); - register({ method: Method.GET, subscription: "/version", diff --git a/src/server/Message.ts b/src/server/Message.ts index 79b6fa1e0..2a03e2311 100644 --- a/src/server/Message.ts +++ b/src/server/Message.ts @@ -1,4 +1,5 @@ import { Utils } from "../Utils"; +import { AnalysisResult, ImportResults } from "../scraping/buxton/final/BuxtonImporter"; export class Message { private _name: string; @@ -56,6 +57,9 @@ export namespace MessageStore { export const GetDocument = new Message("Get Document"); export const DeleteAll = new Message("Delete All"); export const ConnectionTerminated = new Message("Connection Terminated"); + export const BeginBuxtonImport = new Message("Begin Buxton Import"); + export const BuxtonDocumentResult = new Message("Buxton Document Result"); + export const BuxtonImportComplete = new Message("Buxton Import Complete"); export const GetRefField = new Message("Get Ref Field"); export const GetRefFields = new Message("Get Ref Fields"); @@ -65,5 +69,4 @@ export namespace MessageStore { export const DeleteField = new Message("Delete field"); export const DeleteFields = new Message("Delete fields"); - } diff --git a/src/server/Websocket/Websocket.ts b/src/server/Websocket/Websocket.ts index ba7ca8f35..724221be1 100644 --- a/src/server/Websocket/Websocket.ts +++ b/src/server/Websocket/Websocket.ts @@ -12,6 +12,7 @@ import { timeMap } from "../ApiManagers/UserManager"; import { green } from "colors"; import { networkInterfaces, type } from "os"; import { object } from "serializr"; +import executeImport from "../../scraping/buxton/final/BuxtonImporter"; export namespace WebSocket { @@ -106,6 +107,12 @@ export namespace WebSocket { Utils.AddServerHandler(socket, MessageStore.DeleteFields, ids => DeleteFields(socket, ids)); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefField, GetRefField); Utils.AddServerHandlerCallback(socket, MessageStore.GetRefFields, GetRefFields); + Utils.AddServerHandler(socket, MessageStore.BeginBuxtonImport, () => { + executeImport( + deviceOrError => Utils.Emit(socket, MessageStore.BuxtonDocumentResult, deviceOrError), + results => Utils.Emit(socket, MessageStore.BuxtonImportComplete, results) + ); + }); disconnect = () => { socket.broadcast.emit("connection_terminated", Date.now()); -- cgit v1.2.3-70-g09d2 From 040f86c99465071301daf43481ec7c54fb593234 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 15 Feb 2020 17:19:48 -0500 Subject: commented google photos manager --- .../apis/google_docs/GooglePhotosClientUtils.ts | 4 +- src/new_fields/RichTextUtils.ts | 2 +- src/server/ApiManagers/GooglePhotosManager.ts | 132 +++++++++++++++------ 3 files changed, 102 insertions(+), 36 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/apis/google_docs/GooglePhotosClientUtils.ts b/src/client/apis/google_docs/GooglePhotosClientUtils.ts index 7e5d5fe1b..f8723f02d 100644 --- a/src/client/apis/google_docs/GooglePhotosClientUtils.ts +++ b/src/client/apis/google_docs/GooglePhotosClientUtils.ts @@ -306,7 +306,7 @@ export namespace GooglePhotos { }; export const WriteMediaItemsToServer = async (body: { mediaItems: any[] }): Promise => { - const uploads = await Networking.PostToServer("/googlePhotosMediaDownload", body); + const uploads = await Networking.PostToServer("/googlePhotosMediaGet", body); return uploads; }; @@ -344,7 +344,7 @@ export namespace GooglePhotos { media.push({ url, description }); } if (media.length) { - const results = await Networking.PostToServer("/googlePhotosMediaUpload", { media, album }); + const results = await Networking.PostToServer("/googlePhotosMediaPost", { media, album }); return results; } }; diff --git a/src/new_fields/RichTextUtils.ts b/src/new_fields/RichTextUtils.ts index 7c1fc39d8..1d90c984d 100644 --- a/src/new_fields/RichTextUtils.ts +++ b/src/new_fields/RichTextUtils.ts @@ -128,7 +128,7 @@ export namespace RichTextUtils { return { baseUrl: embeddedObject.imageProperties!.contentUri! }; }); - const uploads = await Networking.PostToServer("/googlePhotosMediaDownload", { mediaItems }); + const uploads = await Networking.PostToServer("/googlePhotosMediaGet", { mediaItems }); if (uploads.length !== mediaItems.length) { throw new AssertionError({ expected: mediaItems.length, actual: uploads.length, message: "Error with internally uploading inlineObjects!" }); diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 3236d1ee2..04b724f4b 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -7,28 +7,33 @@ import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils" import { Opt } from "../../new_fields/Doc"; import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils"; import { Database } from "../database"; +import { red } from "colors"; +const prefix = "google_photos_"; +const remoteUploadError = "None of the preliminary uploads to Google's servers was successful."; const authenticationError = "Unable to authenticate Google credentials before uploading to Google Photos!"; const mediaError = "Unable to convert all uploaded bytes to media items!"; -const UploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; +const localUploadError = (count: number) => `Unable to upload ${count} images to Dash's server`; const requestError = "Unable to execute download: the body's media items were malformed."; const downloadError = "Encountered an error while executing downloads."; + interface GooglePhotosUploadFailure { batch: number; index: number; url: string; reason: string; } + interface MediaItem { baseUrl: string; } + interface NewMediaItem { description: string; simpleMediaItem: { uploadToken: string; }; } -const prefix = "google_photos_"; /** * This manager handles the creation of routes for google photos functionality. @@ -37,27 +42,47 @@ export default class GooglePhotosManager extends ApiManager { protected initialize(register: Registration): void { + /** + * This route receives a list of urls that point to images stored + * on Dash's file system, and, in a two step process, uploads them to Google's servers and + * returns the information Google generates about the associated uploaded remote images. + */ register({ method: Method.POST, - subscription: "/googlePhotosMediaUpload", + subscription: "/googlePhotosMediaPost", secureHandler: async ({ user, req, res }) => { const { media } = req.body; + + // first we need to ensure that we know the google account to which these photos will be uploaded const token = await GoogleApiServerUtils.retrieveAccessToken(user.id); if (!token) { return _error(res, authenticationError); } + + // next, having one large list or even synchronously looping over things trips a threshold + // set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in + // batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers. const failed: GooglePhotosUploadFailure[] = []; const batched = BatchedArray.from(media, { batchSize: 25 }); + const interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; const newMediaItems = await batched.batchedMapPatientInterval( - { magnitude: 100, unit: TimeUnit.Milliseconds }, + interval, async (batch: any, collector: any, { completedBatches }: any) => { for (let index = 0; index < batch.length; index++) { const { url, description } = batch[index]; + // a local function used to record failure of an upload const fail = (reason: string) => failed.push({ reason, batch: completedBatches + 1, index, url }); - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, InjectSize(url, SizeSuffix.Original)).catch(fail); + // see image resizing - we store the size-agnostic url in our logic, but write out size-suffixed images to the file system + // so here, given a size agnostic url, we're just making that conversion so that the file system knows which bytes to actually upload + const imageToUpload = InjectSize(url, SizeSuffix.Original); + // STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token + // which acts as a pointer to those bytes that we can use to locate them later on + const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, imageToUpload).catch(fail); if (!uploadToken) { fail(`${path.extname(url)} is not an accepted extension`); } else { + // gather the upload token return from Google (a pointer they give us to the raw, currently useless bytes + // we've uploaded to their servers) and put in the JSON format that the API accepts for image creation (used soon, below) collector.push({ description, simpleMediaItem: { uploadToken } @@ -66,11 +91,24 @@ export default class GooglePhotosManager extends ApiManager { } } ); - const failedCount = failed.length; - if (failedCount) { - console.error(`Unable to upload ${failedCount} image${failedCount === 1 ? "" : "s"} to Google's servers`); + + // inform the developer / server console of any failed upload attempts + // does not abort the operation, since some subset of the uploads may have been successful + const { length } = failed; + if (length) { + console.error(`Unable to upload ${length} image${length === 1 ? "" : "s"} to Google's servers`); console.log(failed.map(({ reason, batch, index, url }) => `@${batch}.${index}: ${url} failed:\n${reason}`).join('\n\n')); } + + // if none of the preliminary uploads was successful, no need to try and create images + // report the failure to the client and return + if (!newMediaItems.length) { + console.error(red(`${remoteUploadError} Thus, aborting image creation. Please try again.`)); + _error(res, remoteUploadError); + return; + } + + // STEP 2/2: create the media items and return the API's response to the client, along with any failures return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( results => _success(res, { results, failed }), error => _error(res, mediaError, error) @@ -78,40 +116,68 @@ export default class GooglePhotosManager extends ApiManager { } }); + /** + * This route receives a list of urls that point to images + * stored on Google's servers and (following a *rough* heuristic) + * uploads each image to Dash's server if it hasn't already been uploaded. + * Unfortunately, since Google has so many of these images on its servers, + * these user content urls expire every 6 hours. So we can't store the url of a locally uploaded + * Google image and compare the candidate url to it to figure out if we already have it, + * since the same bytes on their server might now be associated with a new, random url. + * So, we do the next best thing and try to use an intrinsic attribute of those bytes as + * an identifier: the precise content size. This works in small cases, but has the obvious flaw of failing to upload + * an image locally if we already have uploaded another Google user content image with the exact same content size. + */ register({ method: Method.POST, - subscription: "/googlePhotosMediaDownload", + subscription: "/googlePhotosMediaGet", secureHandler: async ({ req, res }) => { const { mediaItems } = req.body as { mediaItems: MediaItem[] }; + if (!mediaItems) { + // non-starter, since the input was in an invalid format + _invalid(res, requestError); + return; + } let failed = 0; - if (mediaItems) { - const completed: Opt[] = []; - for (const { baseUrl } of mediaItems) { - const results = await DashUploadUtils.InspectImage(baseUrl); - if (results instanceof Error) { - failed++; - continue; - } - const { contentSize, ...attributes } = results; - const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize); - if (!found) { - const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error)); - if (upload) { - completed.push(upload); - await Database.Auxiliary.LogUpload(upload); - } else { - failed++; - } + const completed: Opt[] = []; + for (const { baseUrl } of mediaItems) { + // start by getting the content size of the remote image + const results = await DashUploadUtils.InspectImage(baseUrl); + if (results instanceof Error) { + // if something went wrong here, we can't hope to upload it, so just move on to the next + failed++; + continue; + } + const { contentSize, ...attributes } = results; + // check to see if we have uploaded a Google user content image *specifically via this route* already + // that has this exact content size + const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize); + if (!found) { + // if we haven't, then upload it locally to Dash's server + const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error)); + if (upload) { + completed.push(upload); + // inform the heuristic that we've encountered an image with this content size, + // to be later checked against in future uploads + await Database.Auxiliary.LogUpload(upload); } else { - completed.push(found); + // make note of a failure to upload locallys + failed++; } + } else { + // if we have, the variable 'found' is handily the upload information of the + // existing image, so we add it to the list as if we had just uploaded it now without actually + // making a duplicate write + completed.push(found); } - if (failed) { - return _error(res, UploadError(failed)); - } - return _success(res, completed); } - _invalid(res, requestError); + // if there are any failures, report a general failure to the client + if (failed) { + return _error(res, localUploadError(failed)); + } + // otherwise, return the image upload information list corresponding to the newly (or previously) + // uploaded images + _success(res, completed); } }); -- cgit v1.2.3-70-g09d2 From f33ad290f1de3a01f2c4536f03f040e09771f82e Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sun, 16 Feb 2020 00:50:23 -0500 Subject: improved file upload api --- src/client/Network.ts | 14 ++- .../util/Import & Export/DirectoryImportBox.tsx | 17 ++-- src/client/views/collections/CollectionSubView.tsx | 56 +++++------ src/new_fields/Doc.ts | 6 +- src/scraping/buxton/final/BuxtonImporter.ts | 7 +- src/server/ApiManagers/GooglePhotosManager.ts | 5 +- src/server/ApiManagers/UploadManager.ts | 8 +- src/server/DashUploadUtils.ts | 107 +++++++-------------- src/server/SharedMediaTypes.ts | 39 ++++++++ src/server/database.ts | 6 +- 10 files changed, 137 insertions(+), 128 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/Network.ts b/src/client/Network.ts index ccf60f199..6982ecf19 100644 --- a/src/client/Network.ts +++ b/src/client/Network.ts @@ -1,5 +1,6 @@ import { Utils } from "../Utils"; import requestPromise = require('request-promise'); +import { Upload } from "../server/SharedMediaTypes"; export namespace Networking { @@ -17,12 +18,21 @@ export namespace Networking { return requestPromise.post(options); } - export async function PostFormDataToServer(relativeRoute: string, formData: FormData) { + export async function UploadFilesToServer(files: File | File[]): Promise[]> { + const formData = new FormData(); + if (Array.isArray(files)) { + if (!files.length) { + return []; + } + files.forEach(file => formData.append(Utils.GenerateGuid(), file)); + } else { + formData.append(Utils.GenerateGuid(), files); + } const parameters = { method: 'POST', body: formData }; - const response = await fetch(relativeRoute, parameters); + const response = await fetch("/uploadFormData", parameters); return response.json(); } diff --git a/src/client/util/Import & Export/DirectoryImportBox.tsx b/src/client/util/Import & Export/DirectoryImportBox.tsx index d04f56e57..3d8bcbab7 100644 --- a/src/client/util/Import & Export/DirectoryImportBox.tsx +++ b/src/client/util/Import & Export/DirectoryImportBox.tsx @@ -22,7 +22,7 @@ import "./DirectoryImportBox.scss"; import { Networking } from "../../Network"; import { BatchedArray } from "array-batcher"; import * as path from 'path'; -import { AcceptibleMedia } from "../../../server/SharedMediaTypes"; +import { AcceptibleMedia, Upload } from "../../../server/SharedMediaTypes"; const unsupported = ["text/html", "text/plain"]; @@ -107,20 +107,21 @@ export default class DirectoryImportBox extends React.Component runInAction(() => this.phase = `Internal: uploading ${this.quota - this.completed} files to Dash...`); const batched = BatchedArray.from(validated, { batchSize: 15 }); - const uploads = await batched.batchedMapAsync(async (batch, collector) => { - const formData = new FormData(); - + const uploads = await batched.batchedMapAsync>(async (batch, collector) => { batch.forEach(file => { sizes.push(file.size); modifiedDates.push(file.lastModified); - formData.append(Utils.GenerateGuid(), file); }); - - collector.push(...(await Networking.PostFormDataToServer("/uploadFormData", formData))); + collector.push(...(await Networking.UploadFilesToServer(batch))); runInAction(() => this.completed += batch.length); }); - await Promise.all(uploads.map(async ({ name, type, accessPaths, exifData }) => { + await Promise.all(uploads.map(async response => { + const { source: { type }, result } = response; + if (result instanceof Error) { + return; + } + const { accessPaths, exifData } = result; const path = Utils.prepend(accessPaths.agnostic.client); const document = await Docs.Get.DocumentFromType(type, path, { _width: 300, title: name }); const { data, error } = exifData; diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index 0963e1ea6..e0b2d524b 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -1,4 +1,4 @@ -import { action, computed, IReactionDisposer, reaction, trace } from "mobx"; +import { action, computed, IReactionDisposer, reaction } from "mobx"; import * as rp from 'request-promise'; import CursorField from "../../../new_fields/CursorField"; import { Doc, DocListCast, Opt, WidthSym, HeightSym } from "../../../new_fields/Doc"; @@ -25,6 +25,7 @@ import { ImageUtils } from "../../util/Import & Export/ImageUtils"; import { Networking } from "../../Network"; import { GestureUtils } from "../../../pen-gestures/GestureUtils"; import { InteractionUtils } from "../../util/InteractionUtils"; +import { Upload } from "../../../server/SharedMediaTypes"; export interface CollectionViewProps extends FieldViewProps { addDocument: (document: Doc) => boolean; @@ -288,6 +289,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { } const { items } = e.dataTransfer; const { length } = items; + const files: File[] = []; if (length) { const batch = UndoManager.StartBatch("collection view drop"); const promises: Promise[] = []; @@ -307,41 +309,31 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { }); promises.push(prom); } - const type = item.type; if (item.kind === "file") { const file = item.getAsFile(); - const formData = new FormData(); - - if (!file || !file.type) { - continue; - } - - formData.append('file', file); - const dropFileName = file ? file.name : "-empty-"; - promises.push(Networking.PostFormDataToServer("/uploadFormData", formData).then(results => { - results.map(action((result: any) => { - const { accessPaths, nativeWidth, nativeHeight, contentSize } = result; - if (Object.keys(accessPaths).length) { - const full = { ...options, _width: 300, title: dropFileName }; - const pathname = Utils.prepend(accessPaths.agnostic.client); - Docs.Get.DocumentFromType(type, pathname, full).then(doc => { - if (doc) { - const proto = Doc.GetProto(doc); - proto.fileUpload = basename(pathname).replace("upload_", "").replace(/\.[a-z0-9]*$/, ""); - nativeWidth && (proto["data-nativeWidth"] = nativeWidth); - nativeHeight && (proto["data-nativeHeight"] = nativeHeight); - contentSize && (proto.contentSize = contentSize); - this.props?.addDocument(doc); - } - }); - } else { - alert("Upload failed..."); - } - })); - })); + file && file.type && files.push(file); } } - + (await Networking.UploadFilesToServer(files)).forEach(({ source: { name, type }, result }) => { + if (result instanceof Error) { + alert(`Upload failed: ${result.message}`); + return; + } + const full = { ...options, _width: 300, title: name }; + const pathname = Utils.prepend(result.accessPaths.agnostic.client); + Docs.Get.DocumentFromType(type, pathname, full).then(doc => { + if (doc) { + const proto = Doc.GetProto(doc); + proto.fileUpload = basename(pathname).replace("upload_", "").replace(/\.[a-z0-9]*$/, ""); + if (Upload.isImageInformation(result)) { + proto["data-nativeWidth"] = result.nativeWidth; + proto["data-nativeHeight"] = result.nativeHeight; + proto.contentSize = result.contentSize; + } + this.props?.addDocument(doc); + } + }); + }); if (promises.length) { Promise.all(promises).finally(() => { completed && completed(); batch.end(); }); } else { diff --git a/src/new_fields/Doc.ts b/src/new_fields/Doc.ts index 55c0660c0..a722f552e 100644 --- a/src/new_fields/Doc.ts +++ b/src/new_fields/Doc.ts @@ -112,10 +112,10 @@ export class Doc extends RefField { // getPrototypeOf: (target) => Cast(target[SelfProxy].proto, Doc) || null, // TODO this might be able to replace the proto logic in getter has: (target, key) => key in target.__fields, ownKeys: target => { - let obj = {} as any; + const obj = {} as any; Object.assign(obj, target.___fields); runInAction(() => obj.__LAYOUT__ = target.__LAYOUT__); - return Object.keys(obj) + return Object.keys(obj); }, getOwnPropertyDescriptor: (target, prop) => { if (prop.toString() === "__LAYOUT__") { @@ -864,7 +864,7 @@ Scripting.addGlobal(function redo() { return UndoManager.Redo(); }); Scripting.addGlobal(function curPresentationItem() { const curPres = Doc.UserDoc().curPresentation as Doc; return curPres && DocListCast(curPres[Doc.LayoutFieldKey(curPres)])[NumCast(curPres._itemIndex)]; -}) +}); Scripting.addGlobal(function selectDoc(doc: any) { Doc.UserDoc().SelectedDocs = new List([doc]); }); Scripting.addGlobal(function selectedDocs(container: Doc, excludeCollections: boolean, prevValue: any) { const docs = DocListCast(Doc.UserDoc().SelectedDocs).filter(d => !Doc.AreProtosEqual(d, container) && !d.annotationOn && d.type !== DocumentType.DOCUMENT && d.type !== DocumentType.KVP && (!excludeCollections || !Cast(d.data, listSpec(Doc), null))); diff --git a/src/scraping/buxton/final/BuxtonImporter.ts b/src/scraping/buxton/final/BuxtonImporter.ts index 47d6bbe83..8041343fd 100644 --- a/src/scraping/buxton/final/BuxtonImporter.ts +++ b/src/scraping/buxton/final/BuxtonImporter.ts @@ -8,6 +8,7 @@ const StreamZip = require('node-stream-zip'); const createImageSizeStream = require("image-size-stream"); import { parseXml } from "libxmljs"; import { strictEqual } from "assert"; +import { Readable, PassThrough } from "stream"; interface DocumentContents { body: string; @@ -293,15 +294,15 @@ async function writeImages(zip: any): Promise { const imageUrls: ImageData[] = []; for (const mediaPath of imageEntries) { - const streamImage = () => new Promise((resolve, reject) => { + const streamImage = () => new Promise((resolve, reject) => { zip.stream(mediaPath, (error: any, stream: any) => error ? reject(error) : resolve(stream)); }); const { width, height, type } = await new Promise(async resolve => { - const sizeStream = createImageSizeStream().on('size', (dimensions: Dimensions) => { + const sizeStream = (createImageSizeStream() as PassThrough).on('size', (dimensions: Dimensions) => { readStream.destroy(); resolve(dimensions); - }); + }).on("error", () => readStream.destroy()); const readStream = await streamImage(); readStream.pipe(sizeStream); }); diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 04b724f4b..25c54ee2e 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -8,6 +8,7 @@ import { Opt } from "../../new_fields/Doc"; import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils"; import { Database } from "../database"; import { red } from "colors"; +import { Upload } from "../SharedMediaTypes"; const prefix = "google_photos_"; const remoteUploadError = "None of the preliminary uploads to Google's servers was successful."; @@ -139,7 +140,7 @@ export default class GooglePhotosManager extends ApiManager { return; } let failed = 0; - const completed: Opt[] = []; + const completed: Opt[] = []; for (const { baseUrl } of mediaItems) { // start by getting the content size of the remote image const results = await DashUploadUtils.InspectImage(baseUrl); @@ -151,7 +152,7 @@ export default class GooglePhotosManager extends ApiManager { const { contentSize, ...attributes } = results; // check to see if we have uploaded a Google user content image *specifically via this route* already // that has this exact content size - const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize); + const found: Opt = await Database.Auxiliary.QueryUploadHistory(contentSize); if (!found) { // if we haven't, then upload it locally to Dash's server const upload = await DashUploadUtils.UploadInspectedImage({ contentSize, ...attributes }, undefined, prefix, false).catch(error => _error(res, downloadError, error)); diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 4d09528f4..8f2a5ea3e 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -4,12 +4,12 @@ import * as formidable from 'formidable'; import v4 = require('uuid/v4'); const AdmZip = require('adm-zip'); import { extname, basename, dirname } from 'path'; -import { createReadStream, createWriteStream, unlink, readFileSync } from "fs"; +import { createReadStream, createWriteStream, unlink } from "fs"; import { publicDirectory, filesDirectory } from ".."; import { Database } from "../database"; -import { DashUploadUtils, SizeSuffix } from "../DashUploadUtils"; +import { DashUploadUtils } from "../DashUploadUtils"; import * as sharp from 'sharp'; -import { AcceptibleMedia } from "../SharedMediaTypes"; +import { AcceptibleMedia, Upload } from "../SharedMediaTypes"; import { normalize } from "path"; const imageDataUri = require('image-data-uri'); @@ -47,7 +47,7 @@ export default class UploadManager extends ApiManager { form.keepExtensions = true; return new Promise(resolve => { form.parse(req, async (_err, _fields, files) => { - const results: any[] = []; + const results: Upload.FileResponse[] = []; for (const key in files) { const result = await DashUploadUtils.upload(files[key]); result && results.push(result); diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 913ddc1c3..b66651ef2 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -3,9 +3,9 @@ import { Utils } from '../Utils'; import * as path from 'path'; import * as sharp from 'sharp'; import request = require('request-promise'); -import { ExifData, ExifImage } from 'exif'; +import { ExifImage } from 'exif'; import { Opt } from '../new_fields/Doc'; -import { AcceptibleMedia } from './SharedMediaTypes'; +import { AcceptibleMedia, Upload } from './SharedMediaTypes'; import { filesDirectory } from '.'; import { File } from 'formidable'; import { basename } from "path"; @@ -14,7 +14,7 @@ import { ParsedPDF } from "../server/PdfTypes"; const parse = require('pdf-parse'); import { Directory, serverPathToFile, clientPathToFile, pathToDirectory } from './ApiManagers/UploadManager'; import { red } from 'colors'; -import { Writable } from 'stream'; +import { Stream } from 'stream'; const requestImageSize = require("../client/util/request-image-size"); export enum SizeSuffix { @@ -40,13 +40,6 @@ export namespace DashUploadUtils { suffix: SizeSuffix; } - export interface ImageFileResponse { - name: string; - path: string; - type: string; - exif: Opt; - } - export const Sizes: { [size: string]: Size } = { SMALL: { width: 100, suffix: SizeSuffix.Small }, MEDIUM: { width: 400, suffix: SizeSuffix.Medium }, @@ -60,20 +53,9 @@ export namespace DashUploadUtils { const size = "content-length"; const type = "content-type"; - export interface ImageUploadInformation { - accessPaths: AccessPathInfo; - exifData: EnrichedExifData; - contentSize?: number; - contentType?: string; - } - - export interface AccessPathInfo { - [suffix: string]: { client: string, server: string }; - } - const { imageFormats, videoFormats, applicationFormats } = AcceptibleMedia; - export async function upload(file: File): Promise { + export async function upload(file: File): Promise { const { type, path, name } = file; const types = type.split("/"); @@ -83,33 +65,33 @@ export namespace DashUploadUtils { switch (category) { case "image": if (imageFormats.includes(format)) { - const results = await UploadImage(path, basename(path)); - return { ...results, name, type }; + const result = await UploadImage(path, basename(path)); + return { source: file, result }; } case "video": if (videoFormats.includes(format)) { - return MoveParsedFile(path, Directory.videos); + return MoveParsedFile(file, Directory.videos); } case "application": if (applicationFormats.includes(format)) { - return UploadPdf(path); + return UploadPdf(file); } } console.log(red(`Ignoring unsupported file (${name}) with upload type (${type}).`)); - return { accessPaths: {} }; + return { source: file, result: new Error(`Could not upload unsupported file (${name}) with upload type (${type}).`) }; } - async function UploadPdf(absolutePath: string) { - const dataBuffer = readFileSync(absolutePath); + async function UploadPdf(file: File) { + const { path, name } = file; + const dataBuffer = readFileSync(path); const result: ParsedPDF = await parse(dataBuffer); - const parsedName = basename(absolutePath); await new Promise((resolve, reject) => { - const textFilename = `${parsedName.substring(0, parsedName.length - 4)}.txt`; + const textFilename = `${name.substring(0, name.length - 4)}.txt`; const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename)); writeStream.write(result.text, error => error ? reject(error) : resolve()); }); - return MoveParsedFile(absolutePath, Directory.pdfs); + return MoveParsedFile(file, Directory.pdfs); } /** @@ -123,13 +105,13 @@ export namespace DashUploadUtils { * @param {string} prefix is a string prepended to the generated image name in the * event that @param filename is not specified * - * @returns {ImageUploadInformation} This method returns + * @returns {ImageUploadInformation | Error} This method returns * 1) the paths to the uploaded images (plural due to resizing) - * 2) the file name of each of the resized images + * 2) the exif data embedded in the image, or the error explaining why exif couldn't be parsed * 3) the size of the image, in bytes (4432130) * 4) the content type of the image, i.e. image/(jpeg | png | ...) */ - export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise => { + export const UploadImage = async (source: string, filename?: string, prefix: string = ""): Promise => { const metadata = await InspectImage(source); if (metadata instanceof Error) { return metadata; @@ -137,22 +119,6 @@ export namespace DashUploadUtils { return UploadInspectedImage(metadata, filename || metadata.filename, prefix); }; - export interface InspectionResults { - source: string; - requestable: string; - exifData: EnrichedExifData; - contentSize: number; - contentType: string; - nativeWidth: number; - nativeHeight: number; - filename?: string; - } - - export interface EnrichedExifData { - data: ExifData; - error?: string; - } - export async function buildFileDirectories() { const pending = Object.keys(Directory).map(sub => createIfNotExists(`${filesDirectory}/${sub}`)); return Promise.all(pending); @@ -175,7 +141,7 @@ export namespace DashUploadUtils { * * @param source is the path or url to the image in question */ - export const InspectImage = async (source: string): Promise => { + export const InspectImage = async (source: string): Promise => { let rawMatches: RegExpExecArray | null; let filename: string | undefined; if ((rawMatches = /^data:image\/([a-z]+);base64,(.*)/.exec(source)) !== null) { @@ -216,14 +182,17 @@ export namespace DashUploadUtils { }; }; - export async function MoveParsedFile(absolutePath: string, destination: Directory): Promise> { + export async function MoveParsedFile(file: File, destination: Directory): Promise { + const { name, path: sourcePath } = file; return new Promise(resolve => { - const filename = basename(absolutePath); - const destinationPath = serverPathToFile(destination, filename); - rename(absolutePath, destinationPath, error => { - resolve(error ? undefined : { - accessPaths: { - agnostic: getAccessPaths(destination, filename) + const destinationPath = serverPathToFile(destination, name); + rename(sourcePath, destinationPath, error => { + resolve({ + source: file, + result: error ? error : { + accessPaths: { + agnostic: getAccessPaths(destination, name) + } } }); }); @@ -237,16 +206,16 @@ export namespace DashUploadUtils { }; } - export const UploadInspectedImage = async (metadata: InspectionResults, filename?: string, prefix = "", cleanUp = true): Promise => { + export const UploadInspectedImage = async (metadata: Upload.InspectionResults, filename?: string, prefix = "", cleanUp = true): Promise => { const { requestable, source, ...remaining } = metadata; const extension = `.${remaining.contentType.split("/")[1].toLowerCase()}`; const resolved = filename || `${prefix}upload_${Utils.GenerateGuid()}${extension}`; const { images } = Directory; - const information: ImageUploadInformation = { + const information: Upload.ImageInformation = { accessPaths: { agnostic: getAccessPaths(images, resolved) }, - ...remaining + ...metadata }; const outputPath = pathToDirectory(Directory.images); const writtenFiles = await outputResizedImages(() => request(requestable), outputPath, resolved, extension); @@ -259,9 +228,9 @@ export namespace DashUploadUtils { return information; }; - const parseExifData = async (source: string): Promise => { + const parseExifData = async (source: string): Promise => { const image = await request.get(source, { encoding: null }); - return new Promise(resolve => { + return new Promise(resolve => { new ExifImage({ image }, (error, data) => { let reason: Opt = undefined; if (error) { @@ -279,18 +248,14 @@ export namespace DashUploadUtils { force: true }; - export interface ReadStreamLike { - pipe: (dest: Writable) => Writable; - } - - export async function outputResizedImages(readStreamSource: () => ReadStreamLike | Promise, outputPath: string, fileName: string, ext: string) { + export async function outputResizedImages(readStreamSource: () => Stream | Promise, outputPath: string, fileName: string, ext: string) { const writtenFiles: { [suffix: string]: string } = {}; for (const { resizer, suffix } of resizers(ext)) { const resolved = writtenFiles[suffix] = InjectSize(fileName, suffix); await new Promise(async (resolve, reject) => { const writeStream = createWriteStream(path.resolve(outputPath, resolved)); - let readStream: ReadStreamLike; const source = readStreamSource(); + let readStream: Stream; if (source instanceof Promise) { readStream = await source; } else { @@ -320,7 +285,7 @@ export namespace DashUploadUtils { initial = initial.webp(); } else if (tiffs.includes(ext)) { initial = initial.tiff(); - } else { + } else if (ext === ".gif") { initial = undefined; } return { diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts index 274b4f01e..185e787cc 100644 --- a/src/server/SharedMediaTypes.ts +++ b/src/server/SharedMediaTypes.ts @@ -1,3 +1,6 @@ +import { ExifData } from 'exif'; +import { File } from 'formidable'; + export namespace AcceptibleMedia { export const gifs = [".gif"]; export const pngs = [".png"]; @@ -7,4 +10,40 @@ export namespace AcceptibleMedia { export const imageFormats = [...pngs, ...jpgs, ...gifs, ...webps, ...tiffs]; export const videoFormats = [".mov", ".mp4"]; export const applicationFormats = [".pdf"]; +} + +export namespace Upload { + + export function isImageInformation(uploadResponse: Upload.FileInformation): uploadResponse is Upload.ImageInformation { + return "nativeWidth" in uploadResponse; + } + + export interface FileInformation { + accessPaths: AccessPathInfo; + } + + export type FileResponse = { source: File, result: T | Error }; + + export type ImageInformation = FileInformation & InspectionResults; + + export interface AccessPathInfo { + [suffix: string]: { client: string, server: string }; + } + + export interface InspectionResults { + source: string; + requestable: string; + exifData: EnrichedExifData; + contentSize: number; + contentType: string; + nativeWidth: number; + nativeHeight: number; + filename?: string; + } + + export interface EnrichedExifData { + data: ExifData; + error?: string; + } + } \ No newline at end of file diff --git a/src/server/database.ts b/src/server/database.ts index 83ce865c6..055f04c49 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -2,12 +2,12 @@ import * as mongodb from 'mongodb'; import { Transferable } from './Message'; import { Opt } from '../new_fields/Doc'; import { Utils, emptyFunction } from '../Utils'; -import { DashUploadUtils } from './DashUploadUtils'; import { Credentials } from 'google-auth-library'; import { GoogleApiServerUtils } from './apis/google/GoogleApiServerUtils'; import { IDatabase } from './IDatabase'; import { MemoryDatabase } from './MemoryDatabase'; import * as mongoose from 'mongoose'; +import { Upload } from './SharedMediaTypes'; export namespace Database { @@ -297,7 +297,7 @@ export namespace Database { }; export const QueryUploadHistory = async (contentSize: number) => { - return SanitizedSingletonQuery({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); + return SanitizedSingletonQuery({ contentSize }, AuxiliaryCollections.GooglePhotosUploadHistory); }; export namespace GoogleAuthenticationToken { @@ -326,7 +326,7 @@ export namespace Database { } - export const LogUpload = async (information: DashUploadUtils.ImageUploadInformation) => { + export const LogUpload = async (information: Upload.ImageInformation) => { const bundle = { _id: Utils.GenerateDeterministicGuid(String(information.contentSize!)), ...information -- cgit v1.2.3-70-g09d2 From b42e1e1e2da941955d7751b6003f18fecd5f2f8d Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sun, 16 Feb 2020 12:22:10 -0500 Subject: collection sub view and google authentication manager cleanup, deleteAssets route added --- src/client/apis/GoogleAuthenticationManager.tsx | 46 ++-- .../views/collections/CollectionCarouselView.tsx | 28 +-- .../views/collections/CollectionLinearView.tsx | 2 +- .../collections/CollectionMasonryViewFieldRow.tsx | 2 +- .../views/collections/CollectionSchemaView.tsx | 7 +- .../views/collections/CollectionStackingView.tsx | 12 +- .../CollectionStackingViewFieldColumn.tsx | 2 +- src/client/views/collections/CollectionSubView.tsx | 264 +++++++++++---------- .../views/collections/CollectionTreeView.tsx | 6 +- .../collectionFreeForm/CollectionFreeFormView.tsx | 10 +- .../CollectionMulticolumnView.tsx | 6 +- .../CollectionMultirowView.tsx | 6 +- src/server/ApiManagers/DeleteManager.ts | 31 +-- 13 files changed, 213 insertions(+), 209 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/apis/GoogleAuthenticationManager.tsx b/src/client/apis/GoogleAuthenticationManager.tsx index ce1277667..417dc3c3b 100644 --- a/src/client/apis/GoogleAuthenticationManager.tsx +++ b/src/client/apis/GoogleAuthenticationManager.tsx @@ -12,8 +12,8 @@ const prompt = "Paste authorization code here..."; @observer export default class GoogleAuthenticationManager extends React.Component<{}> { public static Instance: GoogleAuthenticationManager; - @observable private openState = false; private authenticationLink: Opt = undefined; + @observable private openState = false; @observable private authenticationCode: Opt = undefined; @observable private clickedState = false; @observable private success: Opt = undefined; @@ -39,24 +39,18 @@ export default class GoogleAuthenticationManager extends React.Component<{}> { const disposer = reaction( () => this.authenticationCode, async authenticationCode => { - if (!authenticationCode) { - return; + if (authenticationCode) { + disposer(); + const { access_token, avatar, name } = await Networking.PostToServer("/writeGoogleAccessToken", { authenticationCode }); + runInAction(() => { + this.avatar = avatar; + this.username = name; + this.hasBeenClicked = false; + this.success = false; + }); + this.beginFadeout(); + resolve(access_token); } - const { access_token, avatar, name } = await Networking.PostToServer( - "/writeGoogleAccessToken", - { authenticationCode } - ); - runInAction(() => { - this.avatar = avatar; - this.username = name; - }); - this.beginFadeout(); - disposer(); - resolve(access_token); - action(() => { - this.hasBeenClicked = false; - this.success = false; - }); } ); }); @@ -86,26 +80,20 @@ export default class GoogleAuthenticationManager extends React.Component<{}> { GoogleAuthenticationManager.Instance = this; } - private handleClick = () => { - window.open(this.authenticationLink); - setTimeout(() => this.hasBeenClicked = true, 500); - } - - private handlePaste = action((e: React.ChangeEvent) => { - this.authenticationCode = e.currentTarget.value; - }); - private get renderPrompt() { return (
{this.displayLauncher ? : (null)} {this.clickedState ? this.authenticationCode = e.currentTarget.value)} placeholder={prompt} /> : (null)} {this.avatar ? { //used for stacking and masonry view this._dropDisposer?.(); if (ele) { - this._dropDisposer = DragManager.MakeDropTarget(ele, this.drop.bind(this)); + this._dropDisposer = DragManager.MakeDropTarget(ele, this.onInternalDrop.bind(this)); } } @@ -41,21 +41,21 @@ export class CollectionCarouselView extends CollectionSubView(CarouselDocument) } panelHeight = () => this.props.PanelHeight() - 50; - @computed get content() { + @computed get content() { const index = NumCast(this.layoutDoc._itemIndex); return !(this.childLayoutPairs?.[index]?.layout instanceof Doc) ? (null) : - <> -
- -
-
- -
- + <> +
+ +
+
+ +
+ ; } @computed get buttons() { return <> diff --git a/src/client/views/collections/CollectionLinearView.tsx b/src/client/views/collections/CollectionLinearView.tsx index 7eb316cf0..9bbc9f1b6 100644 --- a/src/client/views/collections/CollectionLinearView.tsx +++ b/src/client/views/collections/CollectionLinearView.tsx @@ -67,7 +67,7 @@ export class CollectionLinearView extends CollectionSubView(LinearDocument) { protected createDashEventsTarget = (ele: HTMLDivElement) => { //used for stacking and masonry view this._dropDisposer && this._dropDisposer(); if (ele) { - this._dropDisposer = DragManager.MakeDropTarget(ele, this.drop.bind(this)); + this._dropDisposer = DragManager.MakeDropTarget(ele, this.onInternalDrop.bind(this)); } } diff --git a/src/client/views/collections/CollectionMasonryViewFieldRow.tsx b/src/client/views/collections/CollectionMasonryViewFieldRow.tsx index e25a2f5eb..f3d512a97 100644 --- a/src/client/views/collections/CollectionMasonryViewFieldRow.tsx +++ b/src/client/views/collections/CollectionMasonryViewFieldRow.tsx @@ -80,7 +80,7 @@ export class CollectionMasonryViewFieldRow extends React.Component d[key] = castedValue); - this.props.parent.drop(e, de); + this.props.parent.onInternalDrop(e, de); e.stopPropagation(); } }); diff --git a/src/client/views/collections/CollectionSchemaView.tsx b/src/client/views/collections/CollectionSchemaView.tsx index fa8be5177..c422c38f1 100644 --- a/src/client/views/collections/CollectionSchemaView.tsx +++ b/src/client/views/collections/CollectionSchemaView.tsx @@ -14,7 +14,6 @@ import { SchemaHeaderField } from "../../../new_fields/SchemaHeaderField"; import { ComputedField } from "../../../new_fields/ScriptField"; import { Cast, FieldValue, NumCast, StrCast } from "../../../new_fields/Types"; import { Docs, DocumentOptions } from "../../documents/Documents"; -import { DocumentType } from "../../documents/DocumentTypes"; import { Gateway } from "../../northstar/manager/Gateway"; import { CompileScript, Transformer, ts } from "../../util/Scripting"; import { Transform } from "../../util/Transform"; @@ -175,7 +174,7 @@ export class CollectionSchemaView extends CollectionSubView(doc => doc) { moveDocument={this.props.moveDocument} ScreenToLocalTransform={this.props.ScreenToLocalTransform} active={this.props.active} - onDrop={this.onDrop} + onDrop={this.onExternalDrop} addDocTab={this.props.addDocTab} pinToPres={this.props.pinToPres} isSelected={this.props.isSelected} @@ -199,7 +198,7 @@ export class CollectionSchemaView extends CollectionSubView(doc => doc) { render() { return
-
this.props.active(true) && e.stopPropagation()} onDrop={e => this.onDrop(e, {})} ref={this.createTarget}> +
this.props.active(true) && e.stopPropagation()} onDrop={e => this.onExternalDrop(e, {})} ref={this.createTarget}> {this.schemaTable}
{this.dividerDragger} @@ -692,7 +691,7 @@ export class SchemaTable extends React.Component { onContextMenu = (e: React.MouseEvent): void => { if (!e.isPropagationStopped() && this.props.Document[Id] !== "mainDoc") { // need to test this because GoldenLayout causes a parallel hierarchy in the React DOM for its children and the main document view7 // ContextMenu.Instance.addItem({ description: "Make DB", event: this.makeDB, icon: "table" }); - ContextMenu.Instance.addItem({ description: "Toggle text wrapping", event: this.toggleTextwrap, icon: "table" }) + ContextMenu.Instance.addItem({ description: "Toggle text wrapping", event: this.toggleTextwrap, icon: "table" }); } } diff --git a/src/client/views/collections/CollectionStackingView.tsx b/src/client/views/collections/CollectionStackingView.tsx index 055035b3e..a9cefae6a 100644 --- a/src/client/views/collections/CollectionStackingView.tsx +++ b/src/client/views/collections/CollectionStackingView.tsx @@ -189,7 +189,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { } getDocHeight(d?: Doc) { if (!d) return 0; - let layoutDoc = Doc.Layout(d, this.props.childLayoutTemplate?.()); + const layoutDoc = Doc.Layout(d, this.props.childLayoutTemplate?.()); const nw = NumCast(layoutDoc._nativeWidth); const nh = NumCast(layoutDoc._nativeHeight); let wid = this.columnWidth / (this.isStackingView ? this.numGroupColumns : 1); @@ -234,7 +234,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { @undoBatch @action - drop = (e: Event, de: DragManager.DropEvent) => { + onInternalDrop = (e: Event, de: DragManager.DropEvent) => { const where = [de.x, de.y]; let targInd = -1; let plusOne = 0; @@ -248,7 +248,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { plusOne = where[axis] > (pos[axis] + pos1[axis]) / 2 ? 1 : 0; } }); - if (super.drop(e, de)) { + if (super.onInternalDrop(e, de)) { const newDoc = de.complete.docDragData.droppedDocuments[0]; const docs = this.childDocList; if (docs) { @@ -264,7 +264,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { } @undoBatch @action - onDrop = async (e: React.DragEvent): Promise => { + onExternalDrop = async (e: React.DragEvent): Promise => { const where = [e.clientX, e.clientY]; let targInd = -1; this._docXfs.map((cd, i) => { @@ -274,7 +274,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { targInd = i; } }); - super.onDrop(e, {}, () => { + super.onExternalDrop(e, {}, () => { if (targInd !== -1) { const newDoc = this.childDocs[this.childDocs.length - 1]; const docs = this.childDocList; @@ -405,7 +405,7 @@ export class CollectionStackingView extends CollectionSubView(doc => doc) { transformOrigin: "top left", }} onScroll={action((e: React.UIEvent) => this._scroll = e.currentTarget.scrollTop)} - onDrop={this.onDrop.bind(this)} + onDrop={this.onExternalDrop.bind(this)} onContextMenu={this.onContextMenu} onWheel={e => this.props.active() && e.stopPropagation()} > {this.renderedSections} diff --git a/src/client/views/collections/CollectionStackingViewFieldColumn.tsx b/src/client/views/collections/CollectionStackingViewFieldColumn.tsx index 2ff477c57..87c35679f 100644 --- a/src/client/views/collections/CollectionStackingViewFieldColumn.tsx +++ b/src/client/views/collections/CollectionStackingViewFieldColumn.tsx @@ -71,7 +71,7 @@ export class CollectionStackingViewFieldColumn extends React.Component d[key] = undefined); } - this.props.parent.drop(e, de); + this.props.parent.onInternalDrop(e, de); e.stopPropagation(); } }); diff --git a/src/client/views/collections/CollectionSubView.tsx b/src/client/views/collections/CollectionSubView.tsx index e80e1c802..042385dcd 100644 --- a/src/client/views/collections/CollectionSubView.tsx +++ b/src/client/views/collections/CollectionSubView.tsx @@ -57,7 +57,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { this.gestureDisposer?.(); this.multiTouchDisposer?.(); if (ele) { - this.dropDisposer = DragManager.MakeDropTarget(ele, this.drop.bind(this)); + this.dropDisposer = DragManager.MakeDropTarget(ele, this.onInternalDrop.bind(this)); this.gestureDisposer = GestureUtils.MakeGestureTarget(ele, this.onGesture.bind(this)); this.multiTouchDisposer = InteractionUtils.MakeMultiTouchTarget(ele, this.onTouchStart.bind(this)); } @@ -156,7 +156,7 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { @undoBatch @action - protected drop(e: Event, de: DragManager.DropEvent): boolean { + protected onInternalDrop(e: Event, de: DragManager.DropEvent): boolean { const docDragData = de.complete.docDragData; (this.props.Document.dropConverter instanceof ScriptField) && this.props.Document.dropConverter.script.run({ dragData: docDragData }); /// bcz: check this @@ -195,158 +195,172 @@ export function CollectionSubView(schemaCtor: (doc: Doc) => T) { @undoBatch @action - protected async onDrop(e: React.DragEvent, options: DocumentOptions, completed?: () => void) { + protected async onExternalDrop(e: React.DragEvent, options: DocumentOptions, completed?: () => void) { if (e.ctrlKey) { e.stopPropagation(); // bcz: this is a hack to stop propagation when dropping an image on a text document with shift+ctrl return; } - const html = e.dataTransfer.getData("text/html"); - const text = e.dataTransfer.getData("text/plain"); + + const { dataTransfer } = e; + const html = dataTransfer.getData("text/html"); + const text = dataTransfer.getData("text/plain"); if (text && text.startsWith(" { - if (f instanceof Doc) { - if (options.x || options.y) { f.x = options.x; f.y = options.y; } // should be in CollectionFreeFormView - (f instanceof Doc) && this.props.addDocument(f); - } - }); - } else { - this.props.addDocument && this.props.addDocument(Docs.Create.WebDocument(href, { ...options, title: href })); - } - } else if (text) { - this.props.addDocument && this.props.addDocument(Docs.Create.TextDocument(text, { ...options, _width: 100, _height: 25 })); - } + const { addDocument } = this.props; + if (!addDocument) { + alert("this.props.addDocument does not exist. Aborting drop operation."); return; } - if (html && !html.startsWith(" 1 && tags[1].startsWith("img") ? tags[1] : ""; - if (img) { - const split = img.split("src=\"")[1].split("\"")[0]; - let source = split; - if (split.startsWith("data:image") && split.includes("base64")) { - const [{ accessPaths }] = await Networking.PostToServer("/uploadRemoteImage", { sources: [split] }); - source = Utils.prepend(accessPaths.agnostic.client); + + if (html) { + if (FormattedTextBox.IsFragment(html)) { + const href = FormattedTextBox.GetHref(html); + if (href) { + const docid = FormattedTextBox.GetDocFromUrl(href); + if (docid) { // prosemirror text containing link to dash document + DocServer.GetRefField(docid).then(f => { + if (f instanceof Doc) { + if (options.x || options.y) { f.x = options.x; f.y = options.y; } // should be in CollectionFreeFormView + (f instanceof Doc) && addDocument(f); + } + }); + } else { + addDocument(Docs.Create.WebDocument(href, { ...options, title: href })); + } + } else if (text) { + addDocument(Docs.Create.TextDocument(text, { ...options, _width: 100, _height: 25 })); } - const doc = Docs.Create.ImageDocument(source, { ...options, _width: 300 }); - ImageUtils.ExtractExif(doc); - this.props.addDocument(doc); return; - } else { - const path = window.location.origin + "/doc/"; - if (text.startsWith(path)) { - const docid = text.replace(Utils.prepend("/doc/"), "").split("?")[0]; - DocServer.GetRefField(docid).then(f => { - if (f instanceof Doc) { - if (options.x || options.y) { f.x = options.x; f.y = options.y; } // should be in CollectionFreeFormView - (f instanceof Doc) && this.props.addDocument(f); - } - }); + } + if (!html.startsWith(" 1 && tags[1].startsWith("img") ? tags[1] : ""; + if (img) { + const split = img.split("src=\"")[1].split("\"")[0]; + let source = split; + if (split.startsWith("data:image") && split.includes("base64")) { + const [{ accessPaths }] = await Networking.PostToServer("/uploadRemoteImage", { sources: [split] }); + source = Utils.prepend(accessPaths.agnostic.client); + } + const doc = Docs.Create.ImageDocument(source, { ...options, _width: 300 }); + ImageUtils.ExtractExif(doc); + addDocument(doc); + return; } else { - const htmlDoc = Docs.Create.HtmlDocument(html, { ...options, title: "-web page-", _width: 300, _height: 300 }); - Doc.GetProto(htmlDoc)["data-text"] = text; - this.props.addDocument(htmlDoc); + const path = window.location.origin + "/doc/"; + if (text.startsWith(path)) { + const docid = text.replace(Utils.prepend("/doc/"), "").split("?")[0]; + DocServer.GetRefField(docid).then(f => { + if (f instanceof Doc) { + if (options.x || options.y) { f.x = options.x; f.y = options.y; } // should be in CollectionFreeFormView + (f instanceof Doc) && this.props.addDocument(f); + } + }); + } else { + const htmlDoc = Docs.Create.HtmlDocument(html, { ...options, title: "-web page-", _width: 300, _height: 300 }); + Doc.GetProto(htmlDoc)["data-text"] = text; + this.props.addDocument(htmlDoc); + } + return; } - return; } } - if (text && text.indexOf("www.youtube.com/watch") !== -1) { - const url = text.replace("youtube.com/watch?v=", "youtube.com/embed/"); - this.props.addDocument(Docs.Create.VideoDocument(url, { ...options, title: url, _width: 400, _height: 315, _nativeWidth: 600, _nativeHeight: 472.5 })); - return; - } - let matches: RegExpExecArray | null; - if ((matches = /(https:\/\/)?docs\.google\.com\/document\/d\/([^\\]+)\/edit/g.exec(text)) !== null) { - const newBox = Docs.Create.TextDocument("", { ...options, _width: 400, _height: 200, title: "Awaiting title from Google Docs..." }); - const proto = newBox.proto!; - const documentId = matches[2]; - proto[GoogleRef] = documentId; - proto.data = "Please select this document and then click on its pull button to load its contents from from Google Docs..."; - proto.backgroundColor = "#eeeeff"; - this.props.addDocument(newBox); - // const parent = Docs.Create.StackingDocument([newBox], { title: `Google Doc Import (${documentId})` }); - // CollectionDockingView.Instance.AddRightSplit(parent, undefined); - // proto.height = parent[HeightSym](); - return; - } - if ((matches = /(https:\/\/)?photos\.google\.com\/(u\/3\/)?album\/([^\\]+)/g.exec(text)) !== null) { - const albums = await GooglePhotos.Transactions.ListAlbums(); - const albumId = matches[3]; - const mediaItems = await GooglePhotos.Query.AlbumSearch(albumId); - console.log(mediaItems); - return; + + if (text) { + if (text.includes("www.youtube.com/watch")) { + const url = text.replace("youtube.com/watch?v=", "youtube.com/embed/"); + addDocument(Docs.Create.VideoDocument(url, { + ...options, + title: url, + _width: 400, + _height: 315, + _nativeWidth: 600, + _nativeHeight: 472.5 + })); + return; + } + let matches: RegExpExecArray | null; + if ((matches = /(https:\/\/)?docs\.google\.com\/document\/d\/([^\\]+)\/edit/g.exec(text)) !== null) { + const newBox = Docs.Create.TextDocument("", { ...options, _width: 400, _height: 200, title: "Awaiting title from Google Docs..." }); + const proto = newBox.proto!; + const documentId = matches[2]; + proto[GoogleRef] = documentId; + proto.data = "Please select this document and then click on its pull button to load its contents from from Google Docs..."; + proto.backgroundColor = "#eeeeff"; + addDocument(newBox); + return; + } + if ((matches = /(https:\/\/)?photos\.google\.com\/(u\/3\/)?album\/([^\\]+)/g.exec(text)) !== null) { + const albumId = matches[3]; + const mediaItems = await GooglePhotos.Query.AlbumSearch(albumId); + console.log(mediaItems); + return; + } } + const { items } = e.dataTransfer; const { length } = items; const files: File[] = []; - if (length) { - const batch = UndoManager.StartBatch("collection view drop"); - const promises: Promise[] = []; - // tslint:disable-next-line:prefer-for-of - for (let i = 0; i < length; i++) { - const item = e.dataTransfer.items[i]; - if (item.kind === "string" && item.type.indexOf("uri") !== -1) { - let str: string; - const prom = new Promise(resolve => item.getAsString(resolve)) - .then(action((s: string) => rp.head(Utils.CorsProxy(str = s)))) - .then(result => { - const type = result["content-type"]; - if (type) { - Docs.Get.DocumentFromType(type, str, options) - .then(doc => doc && this.props.addDocument(doc)); - } - }); - promises.push(prom); - } - if (item.kind === "file") { - const file = item.getAsFile(); - file && file.type && files.push(file); + const generatedDocuments: Doc[] = []; + if (!length) { + alert("No uploadable content found."); + return; + } + + const batch = UndoManager.StartBatch("collection view drop"); + for (let i = 0; i < length; i++) { + const item = e.dataTransfer.items[i]; + if (item.kind === "string" && item.type.includes("uri")) { + const stringContents = await new Promise(resolve => item.getAsString(resolve)); + const type = (await rp.head(Utils.CorsProxy(stringContents)))["content-type"]; + if (type) { + const doc = await Docs.Get.DocumentFromType(type, stringContents, options); + doc && generatedDocuments.push(doc); } } - promises.push(Networking.UploadFilesToServer(files).then(responses => responses.forEach(({ source: { name, type }, result }) => { - if (result instanceof Error) { - alert(`Upload failed: ${result.message}`); - return; - } - const full = { ...options, _width: 300, title: name }; - const pathname = Utils.prepend(result.accessPaths.agnostic.client); - Docs.Get.DocumentFromType(type, pathname, full).then(doc => { - if (doc) { - const proto = Doc.GetProto(doc); - proto.fileUpload = basename(pathname).replace("upload_", "").replace(/\.[a-z0-9]*$/, ""); - if (Upload.isImageInformation(result)) { - proto["data-nativeWidth"] = result.nativeWidth; - proto["data-nativeHeight"] = result.nativeHeight; - proto.contentSize = result.contentSize; - } - this.props?.addDocument(doc); - } - }); - }))); - if (promises.length) { - Promise.all(promises).finally(() => { completed && completed(); batch.end(); }); - } else { - if (text && !text.includes("https://")) { - this.props.addDocument(Docs.Create.TextDocument(text, { ...options, _width: 400, _height: 315 })); - } - batch.end(); + if (item.kind === "file") { + const file = item.getAsFile(); + file && file.type && files.push(file); + } + } + for (const { source: { name, type }, result } of await Networking.UploadFilesToServer(files)) { + if (result instanceof Error) { + alert(`Upload failed: ${result.message}`); + return; } + const full = { ...options, _width: 300, title: name }; + const pathname = Utils.prepend(result.accessPaths.agnostic.client); + const doc = await Docs.Get.DocumentFromType(type, pathname, full); + if (!doc) { + continue; + } + const proto = Doc.GetProto(doc); + proto.fileUpload = basename(pathname).replace("upload_", "").replace(/\.[a-z0-9]*$/, ""); + if (Upload.isImageInformation(result)) { + proto["data-nativeWidth"] = result.nativeWidth; + proto["data-nativeHeight"] = result.nativeHeight; + proto.contentSize = result.contentSize; + } + generatedDocuments.push(doc); + } + if (generatedDocuments.length) { + generatedDocuments.forEach(addDocument); + completed && completed(); } else { - alert("No uploadable content found."); + if (text && !text.includes("https://")) { + addDocument(Docs.Create.TextDocument(text, { ...options, _width: 400, _height: 315 })); + } } + batch.end(); } } + return CollectionSubView; } diff --git a/src/client/views/collections/CollectionTreeView.tsx b/src/client/views/collections/CollectionTreeView.tsx index 8720ce002..13ab7c1a4 100644 --- a/src/client/views/collections/CollectionTreeView.tsx +++ b/src/client/views/collections/CollectionTreeView.tsx @@ -210,7 +210,7 @@ class TreeView extends React.Component { } else { ContextMenu.Instance.addItem({ description: "Delete Workspace", event: () => this.props.deleteDoc(this.props.document), icon: "trash-alt" }); ContextMenu.Instance.addItem({ description: "Create New Workspace", event: () => MainView.Instance.createNewWorkspace(), icon: "plus" }); - } + } ContextMenu.Instance.addItem({ description: "Toggle Theme Colors", event: () => this.props.document.darkScheme = !this.props.document.darkScheme, icon: "minus" }); ContextMenu.Instance.addItem({ description: "Open Fields", event: () => { const kvp = Docs.Create.KVPDocument(this.props.document, { _width: 300, _height: 300 }); this.props.addDocTab(kvp, this.props.dataDoc ? this.props.dataDoc : kvp, "onRight"); }, icon: "layer-group" }); ContextMenu.Instance.addItem({ description: "Publish", event: () => DocUtils.Publish(this.props.document, StrCast(this.props.document.title), () => { }, () => { }), icon: "file" }); @@ -594,7 +594,7 @@ export class CollectionTreeView extends CollectionSubView(Document) { protected createTreeDropTarget = (ele: HTMLDivElement) => { this.treedropDisposer && this.treedropDisposer(); if (this._mainEle = ele) { - this.treedropDisposer = DragManager.MakeDropTarget(ele, this.drop.bind(this)); + this.treedropDisposer = DragManager.MakeDropTarget(ele, this.onInternalDrop.bind(this)); } } @@ -702,7 +702,7 @@ export class CollectionTreeView extends CollectionSubView(Document) { !existingOnClick && ContextMenu.Instance.addItem({ description: "OnClick...", subitems: onClicks, icon: "hand-point-right" }); } outerXf = () => Utils.GetScreenTransform(this._mainEle!); - onTreeDrop = (e: React.DragEvent) => this.onDrop(e, {}); + onTreeDrop = (e: React.DragEvent) => this.onExternalDrop(e, {}); @computed get renderClearButton() { return
diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx index 969d6b3c8..bdc5e03e3 100644 --- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx +++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx @@ -117,20 +117,20 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { } @action - onDrop = (e: React.DragEvent): Promise => { + onExternalDrop = (e: React.DragEvent): Promise => { const pt = this.getTransform().transformPoint(e.pageX, e.pageY); - return super.onDrop(e, { x: pt[0], y: pt[1] }); + return super.onExternalDrop(e, { x: pt[0], y: pt[1] }); } @undoBatch @action - drop = (e: Event, de: DragManager.DropEvent) => { + onInternalDrop = (e: Event, de: DragManager.DropEvent) => { if (this.props.Document.isBackground) return false; const xf = this.getTransform(); const xfo = this.getTransformOverlay(); const [xp, yp] = xf.transformPoint(de.x, de.y); const [xpo, ypo] = xfo.transformPoint(de.x, de.y); - if (super.drop(e, de)) { + if (super.onInternalDrop(e, de)) { if (de.complete.docDragData) { if (de.complete.docDragData.droppedDocuments.length) { const firstDoc = de.complete.docDragData.droppedDocuments[0]; @@ -1079,7 +1079,7 @@ export class CollectionFreeFormView extends CollectionSubView(PanZoomDocument) { return
{ - if (super.drop(e, de)) { + onInternalDrop = (e: Event, de: DragManager.DropEvent) => { + if (super.onInternalDrop(e, de)) { de.complete.docDragData?.droppedDocuments.forEach(action((d: Doc) => { d.dimUnit = "*"; d.dimMagnitude = 1; @@ -214,7 +214,7 @@ export class CollectionMulticolumnView extends CollectionSubView(MulticolumnDocu getTransform={dxf} onClick={this.onChildClickHandler} renderDepth={this.props.renderDepth + 1} - /> + />; } /** * @returns the resolved list of rendered child documents, displayed diff --git a/src/client/views/collections/collectionMulticolumn/CollectionMultirowView.tsx b/src/client/views/collections/collectionMulticolumn/CollectionMultirowView.tsx index 630a178cf..5e59f8237 100644 --- a/src/client/views/collections/collectionMulticolumn/CollectionMultirowView.tsx +++ b/src/client/views/collections/collectionMulticolumn/CollectionMultirowView.tsx @@ -190,8 +190,8 @@ export class CollectionMultirowView extends CollectionSubView(MultirowDocument) @undoBatch @action - drop = (e: Event, de: DragManager.DropEvent) => { - if (super.drop(e, de)) { + onInternalDrop = (e: Event, de: DragManager.DropEvent) => { + if (super.onInternalDrop(e, de)) { de.complete.docDragData?.droppedDocuments.forEach(action((d: Doc) => { d.dimUnit = "*"; d.dimMagnitude = 1; @@ -215,7 +215,7 @@ export class CollectionMultirowView extends CollectionSubView(MultirowDocument) getTransform={dxf} onClick={this.onChildClickHandler} renderDepth={this.props.renderDepth + 1} - /> + />; } /** * @returns the resolved list of rendered child documents, displayed diff --git a/src/server/ApiManagers/DeleteManager.ts b/src/server/ApiManagers/DeleteManager.ts index be452c0ff..9e70af2eb 100644 --- a/src/server/ApiManagers/DeleteManager.ts +++ b/src/server/ApiManagers/DeleteManager.ts @@ -2,6 +2,11 @@ import ApiManager, { Registration } from "./ApiManager"; import { Method, _permission_denied, PublicHandler } from "../RouteManager"; import { WebSocket } from "../Websocket/Websocket"; import { Database } from "../database"; +import rimraf = require("rimraf"); +import { pathToDirectory, Directory } from "./UploadManager"; +import { filesDirectory } from ".."; +import { DashUploadUtils } from "../DashUploadUtils"; +import { mkdirSync } from "fs"; export default class DeleteManager extends ApiManager { @@ -31,21 +36,19 @@ export default class DeleteManager extends ApiManager { } }); - const hi: PublicHandler = async ({ res, isRelease }) => { - if (isRelease) { - return _permission_denied(res, deletionPermissionError); + register({ + method: Method.GET, + subscription: "/deleteAssets", + secureHandler: async ({ res, isRelease }) => { + if (isRelease) { + return _permission_denied(res, deletionPermissionError); + } + rimraf.sync(filesDirectory); + mkdirSync(filesDirectory); + await DashUploadUtils.buildFileDirectories(); + res.redirect("/delete"); } - await Database.Instance.deleteAll('users'); - res.redirect("/home"); - }; - - // register({ - // method: Method.GET, - // subscription: "/deleteUsers", - // onValidation: hi, - // onUnauthenticated: hi - // }); - + }); register({ method: Method.GET, -- cgit v1.2.3-70-g09d2 From be19bae1c56d3fd6e510b1aa83231cce8e3f94cc Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 22 Feb 2020 15:41:41 -0500 Subject: can now update search indices from server --- solr-8.3.1/bin/solr-8983.pid | 2 +- src/server/ApiManagers/SearchManager.ts | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/solr-8.3.1/bin/solr-8983.pid b/solr-8.3.1/bin/solr-8983.pid index a6905f8ba..779eb1af5 100644 --- a/solr-8.3.1/bin/solr-8983.pid +++ b/solr-8.3.1/bin/solr-8983.pid @@ -1 +1 @@ -999 +17656 diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index 4ce12f9f3..4bbf31ac5 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -6,7 +6,7 @@ import * as path from 'path'; import { pathToDirectory, Directory } from "./UploadManager"; import { red, cyan, yellow } from "colors"; import RouteSubscriber from "../RouteSubscriber"; -import { exec } from "child_process"; +import { exec, execSync } from "child_process"; import { onWindows } from ".."; import { get } from "request-promise"; @@ -23,6 +23,10 @@ export class SearchManager extends ApiManager { const status = req.params.action === "start"; const success = await SolrManager.SetRunning(status); console.log(success ? `Successfully ${status ? "started" : "stopped"} Solr!` : `Uh oh! Check the console for the error that occurred while ${status ? "starting" : "stopping"} Solr`); + } else if (action === "update") { + execSync("npx ts-node updateSearch.ts", { cwd: path.resolve(__dirname, "../"), stdio: "inherit" }); + } else { + console.log(yellow(`${action} is an unknown solr operation.`)); } res.redirect("/home"); } -- cgit v1.2.3-70-g09d2 From 3256482fedde71a91387654e03540820b6780178 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sun, 23 Feb 2020 17:57:00 -0500 Subject: consolidated update search to a route --- solr-8.3.1/server/solr/dash/data/index/write.lock | 0 .../tmp/start_6476327636763392575.properties | 11 ++ .../tmp/start_7329004517204835686.properties | 11 ++ .../tmp/start_9067375725008958788.properties | 11 ++ src/server/ApiManagers/SearchManager.ts | 154 ++++++++++++++++++--- src/server/updateSearch.ts | 125 ----------------- 6 files changed, 170 insertions(+), 142 deletions(-) delete mode 100644 solr-8.3.1/server/solr/dash/data/index/write.lock create mode 100644 solr-8.3.1/server/tmp/start_6476327636763392575.properties create mode 100644 solr-8.3.1/server/tmp/start_7329004517204835686.properties create mode 100644 solr-8.3.1/server/tmp/start_9067375725008958788.properties delete mode 100644 src/server/updateSearch.ts (limited to 'src/server/ApiManagers') diff --git a/solr-8.3.1/server/solr/dash/data/index/write.lock b/solr-8.3.1/server/solr/dash/data/index/write.lock deleted file mode 100644 index e69de29bb..000000000 diff --git a/solr-8.3.1/server/tmp/start_6476327636763392575.properties b/solr-8.3.1/server/tmp/start_6476327636763392575.properties new file mode 100644 index 000000000..90a5a6ca4 --- /dev/null +++ b/solr-8.3.1/server/tmp/start_6476327636763392575.properties @@ -0,0 +1,11 @@ +#start.jar properties +#Sun Feb 23 22:38:07 UTC 2020 +java.version.platform=8 +java.version=1.8.0_131 +java.version.micro=0 +jetty.home=C\:\\Users\\avd\\Desktop\\Sam\\Dash-Web\\solr-8.3.1\\server +java.version.minor=8 +jetty.home.uri=file\:///C\:/Users/avd/Desktop/Sam/Dash-Web/solr-8.3.1/server +jetty.base=C\:\\Users\\avd\\Desktop\\Sam\\Dash-Web\\solr-8.3.1\\server +java.version.major=1 +jetty.base.uri=file\:///C\:/Users/avd/Desktop/Sam/Dash-Web/solr-8.3.1/server diff --git a/solr-8.3.1/server/tmp/start_7329004517204835686.properties b/solr-8.3.1/server/tmp/start_7329004517204835686.properties new file mode 100644 index 000000000..4df8d553e --- /dev/null +++ b/solr-8.3.1/server/tmp/start_7329004517204835686.properties @@ -0,0 +1,11 @@ +#start.jar properties +#Sun Feb 23 20:58:08 UTC 2020 +java.version.platform=8 +java.version=1.8.0_131 +java.version.micro=0 +jetty.home=C\:\\Users\\avd\\Desktop\\Sam\\Dash-Web\\solr-8.3.1\\server +java.version.minor=8 +jetty.home.uri=file\:///C\:/Users/avd/Desktop/Sam/Dash-Web/solr-8.3.1/server +jetty.base=C\:\\Users\\avd\\Desktop\\Sam\\Dash-Web\\solr-8.3.1\\server +java.version.major=1 +jetty.base.uri=file\:///C\:/Users/avd/Desktop/Sam/Dash-Web/solr-8.3.1/server diff --git a/solr-8.3.1/server/tmp/start_9067375725008958788.properties b/solr-8.3.1/server/tmp/start_9067375725008958788.properties new file mode 100644 index 000000000..2fb8898e9 --- /dev/null +++ b/solr-8.3.1/server/tmp/start_9067375725008958788.properties @@ -0,0 +1,11 @@ +#start.jar properties +#Sun Feb 23 21:03:16 UTC 2020 +java.version.platform=8 +java.version=1.8.0_131 +java.version.micro=0 +jetty.home=C\:\\Users\\avd\\Desktop\\Sam\\Dash-Web\\solr-8.3.1\\server +java.version.minor=8 +jetty.home.uri=file\:///C\:/Users/avd/Desktop/Sam/Dash-Web/solr-8.3.1/server +jetty.base=C\:\\Users\\avd\\Desktop\\Sam\\Dash-Web\\solr-8.3.1\\server +java.version.major=1 +jetty.base.uri=file\:///C\:/Users/avd/Desktop/Sam/Dash-Web/solr-8.3.1/server diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index 4bbf31ac5..be17c3105 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -4,11 +4,15 @@ import { Search } from "../Search"; const findInFiles = require('find-in-files'); import * as path from 'path'; import { pathToDirectory, Directory } from "./UploadManager"; -import { red, cyan, yellow } from "colors"; +import { red, cyan, yellow, green } from "colors"; import RouteSubscriber from "../RouteSubscriber"; import { exec, execSync } from "child_process"; import { onWindows } from ".."; import { get } from "request-promise"; +import { log_execution } from "../ActionUtilities"; +import { Database } from "../database"; +import rimraf = require("rimraf"); +import { mkdirSync, chmod, chmodSync } from "fs"; export class SearchManager extends ApiManager { @@ -19,14 +23,17 @@ export class SearchManager extends ApiManager { subscription: new RouteSubscriber("solr").add("action"), secureHandler: async ({ req, res }) => { const { action } = req.params; - if (["start", "stop"].includes(action)) { - const status = req.params.action === "start"; - const success = await SolrManager.SetRunning(status); - console.log(success ? `Successfully ${status ? "started" : "stopped"} Solr!` : `Uh oh! Check the console for the error that occurred while ${status ? "starting" : "stopping"} Solr`); - } else if (action === "update") { - execSync("npx ts-node updateSearch.ts", { cwd: path.resolve(__dirname, "../"), stdio: "inherit" }); - } else { - console.log(yellow(`${action} is an unknown solr operation.`)); + switch (action) { + case "start": + case "stop": + const status = req.params.action === "start"; + SolrManager.SetRunning(status); + break; + case "update": + await SolrManager.update(); + break; + default: + console.log(yellow(`${action} is an unknown solr operation.`)); } res.redirect("/home"); } @@ -73,12 +80,10 @@ export class SearchManager extends ApiManager { export namespace SolrManager { - const command = onWindows ? "solr.cmd" : "solr"; - - export async function SetRunning(status: boolean): Promise { + export function SetRunning(status: boolean) { const args = status ? "start" : "stop -p 8983"; console.log(`solr management: trying to ${args}`); - exec(`${command} ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => { + exec(`solr ${args}`, { cwd: "./solr-8.3.1/bin" }, (error, stdout, stderr) => { if (error) { console.log(red(`solr management error: unable to ${args} server`)); console.log(red(error.message)); @@ -86,12 +91,127 @@ export namespace SolrManager { console.log(cyan(stdout)); console.log(yellow(stderr)); }); + if (status) { + console.log(cyan("Start script is executing: please allow 15 seconds for solr to start on port 8983.")); + } + } + + export async function update() { + console.log(green("Beginning update...")); + await log_execution({ + startMessage: "Clearing existing Solr information...", + endMessage: "Solr information successfully cleared", + action: Search.clear, + color: cyan + }); + const cursor = await log_execution({ + startMessage: "Connecting to and querying for all documents from database...", + endMessage: ({ result, error }) => { + const success = error === null && result !== undefined; + if (!success) { + console.log(red("Unable to connect to the database.")); + process.exit(0); + } + return "Connection successful and query complete"; + }, + action: () => Database.Instance.query({}), + color: yellow + }); + const updates: any[] = []; + let numDocs = 0; + function updateDoc(doc: any) { + numDocs++; + if ((numDocs % 50) === 0) { + console.log(`Batch of 50 complete, total of ${numDocs}`); + } + if (doc.__type !== "Doc") { + return; + } + const fields = doc.fields; + if (!fields) { + return; + } + const update: any = { id: doc._id }; + let dynfield = false; + for (const key in fields) { + const value = fields[key]; + const term = ToSearchTerm(value); + if (term !== undefined) { + const { suffix, value } = term; + update[key + suffix] = value; + dynfield = true; + } + } + if (dynfield) { + updates.push(update); + } + } + await cursor?.forEach(updateDoc); + const result = await log_execution({ + startMessage: `Dispatching updates for ${updates.length} documents`, + endMessage: "Dispatched updates complete", + action: () => Search.updateDocuments(updates), + color: cyan + }); try { - await get("http://localhost:8983"); - return true; - } catch { - return false; + if (result) { + const { status } = JSON.parse(result).responseHeader; + console.log(status ? red(`Failed with status code (${status})`) : green("Success!")); + } else { + console.log(red("Solr is likely not running!")); + } + } catch (e) { + console.log(red("Error:")); + console.log(e); + console.log("\n"); } + await cursor?.close(); + } + + const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { + "number": "_n", + "string": "_t", + "boolean": "_b", + "image": ["_t", "url"], + "video": ["_t", "url"], + "pdf": ["_t", "url"], + "audio": ["_t", "url"], + "web": ["_t", "url"], + "date": ["_d", value => new Date(value.date).toISOString()], + "proxy": ["_i", "fieldId"], + "list": ["_l", list => { + const results = []; + for (const value of list.fields) { + const term = ToSearchTerm(value); + if (term) { + results.push(term.value); + } + } + return results.length ? results : null; + }] + }; + + function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { + if (val === null || val === undefined) { + return; + } + const type = val.__type || typeof val; + let suffix = suffixMap[type]; + if (!suffix) { + return; + } + + if (Array.isArray(suffix)) { + const accessor = suffix[1]; + if (typeof accessor === "function") { + val = accessor(val); + } else { + val = val[accessor]; + } + suffix = suffix[0]; + } + + return { suffix, value: val }; } } \ No newline at end of file diff --git a/src/server/updateSearch.ts b/src/server/updateSearch.ts deleted file mode 100644 index dd2067c87..000000000 --- a/src/server/updateSearch.ts +++ /dev/null @@ -1,125 +0,0 @@ -import { Database } from "./database"; -import { Search } from "./Search"; -import { log_execution } from "./ActionUtilities"; -import { cyan, green, yellow, red } from "colors"; - -const suffixMap: { [type: string]: (string | [string, string | ((json: any) => any)]) } = { - "number": "_n", - "string": "_t", - "boolean": "_b", - "image": ["_t", "url"], - "video": ["_t", "url"], - "pdf": ["_t", "url"], - "audio": ["_t", "url"], - "web": ["_t", "url"], - "date": ["_d", value => new Date(value.date).toISOString()], - "proxy": ["_i", "fieldId"], - "list": ["_l", list => { - const results = []; - for (const value of list.fields) { - const term = ToSearchTerm(value); - if (term) { - results.push(term.value); - } - } - return results.length ? results : null; - }] -}; - -function ToSearchTerm(val: any): { suffix: string, value: any } | undefined { - if (val === null || val === undefined) { - return; - } - const type = val.__type || typeof val; - let suffix = suffixMap[type]; - if (!suffix) { - return; - } - - if (Array.isArray(suffix)) { - const accessor = suffix[1]; - if (typeof accessor === "function") { - val = accessor(val); - } else { - val = val[accessor]; - } - suffix = suffix[0]; - } - - return { suffix, value: val }; -} - -async function update() { - console.log(green("Beginning update...")); - await log_execution({ - startMessage: "Clearing existing Solr information...", - endMessage: "Solr information successfully cleared", - action: Search.clear, - color: cyan - }); - const cursor = await log_execution({ - startMessage: "Connecting to and querying for all documents from database...", - endMessage: ({ result, error }) => { - const success = error === null && result !== undefined; - if (!success) { - console.log(red("Unable to connect to the database.")); - process.exit(0); - } - return "Connection successful and query complete"; - }, - action: () => Database.Instance.query({}), - color: yellow - }); - const updates: any[] = []; - let numDocs = 0; - function updateDoc(doc: any) { - numDocs++; - if ((numDocs % 50) === 0) { - console.log(`Batch of 50 complete, total of ${numDocs}`); - } - if (doc.__type !== "Doc") { - return; - } - const fields = doc.fields; - if (!fields) { - return; - } - const update: any = { id: doc._id }; - let dynfield = false; - for (const key in fields) { - const value = fields[key]; - const term = ToSearchTerm(value); - if (term !== undefined) { - const { suffix, value } = term; - update[key + suffix] = value; - dynfield = true; - } - } - if (dynfield) { - updates.push(update); - } - } - await cursor?.forEach(updateDoc); - const result = await log_execution({ - startMessage: `Dispatching updates for ${updates.length} documents`, - endMessage: "Dispatched updates complete", - action: () => Search.updateDocuments(updates), - color: cyan - }); - try { - if (result) { - const { status } = JSON.parse(result).responseHeader; - console.log(status ? red(`Failed with status code (${status})`) : green("Success!")); - } else { - console.log(red("Solr is likely not running!")); - } - } catch (e) { - console.log(red("Error:")); - console.log(e); - console.log("\n"); - } - await cursor?.close(); - process.exit(0); -} - -update(); \ No newline at end of file -- cgit v1.2.3-70-g09d2 From b97d0d1c03f567d53297f8922c7f407cbf0a9b7d Mon Sep 17 00:00:00 2001 From: kimdahey Date: Sat, 29 Feb 2020 13:50:28 -0500 Subject: added supported audio formats --- package-lock.json | 46 ++++++++++++++++----------------- src/server/ApiManagers/UploadManager.ts | 8 ++++++ src/server/SharedMediaTypes.ts | 1 + 3 files changed, 32 insertions(+), 23 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/package-lock.json b/package-lock.json index 2636d7113..c487b11c8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1997,7 +1997,7 @@ }, "util": { "version": "0.10.3", - "resolved": "http://registry.npmjs.org/util/-/util-0.10.3.tgz", + "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", "integrity": "sha1-evsa/lCAUkZInj23/g7TeTNqwPk=", "dev": true, "requires": { @@ -2585,7 +2585,7 @@ }, "browserify-aes": { "version": "1.2.0", - "resolved": "http://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", "requires": { "buffer-xor": "^1.0.3", @@ -2619,7 +2619,7 @@ }, "browserify-rsa": { "version": "4.0.1", - "resolved": "http://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=", "requires": { "bn.js": "^4.1.0", @@ -2783,7 +2783,7 @@ }, "camelcase-keys": { "version": "2.1.0", - "resolved": "http://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", "integrity": "sha1-MIvur/3ygRkFHvodkyITyRuPkuc=", "requires": { "camelcase": "^2.0.0", @@ -3576,7 +3576,7 @@ }, "create-hash": { "version": "1.2.0", - "resolved": "http://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", "requires": { "cipher-base": "^1.0.1", @@ -3588,7 +3588,7 @@ }, "create-hmac": { "version": "1.1.7", - "resolved": "http://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", "requires": { "cipher-base": "^1.0.3", @@ -4126,7 +4126,7 @@ }, "diffie-hellman": { "version": "5.0.3", - "resolved": "http://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", + "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", "requires": { "bn.js": "^4.1.0", @@ -6994,7 +6994,7 @@ }, "is-accessor-descriptor": { "version": "0.1.6", - "resolved": "http://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", "requires": { "kind-of": "^3.0.2" @@ -7049,7 +7049,7 @@ }, "is-data-descriptor": { "version": "0.1.4", - "resolved": "http://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", "requires": { "kind-of": "^3.0.2" @@ -7736,7 +7736,7 @@ }, "load-json-file": { "version": "1.1.0", - "resolved": "http://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", "requires": { "graceful-fs": "^4.1.2", @@ -8054,7 +8054,7 @@ }, "media-typer": { "version": "0.3.0", - "resolved": "http://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" }, "mem": { @@ -8086,7 +8086,7 @@ }, "meow": { "version": "3.7.0", - "resolved": "http://registry.npmjs.org/meow/-/meow-3.7.0.tgz", + "resolved": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz", "integrity": "sha1-cstmi0JSKCkKu/qFaJJYcwioAfs=", "requires": { "camelcase-keys": "^2.0.0", @@ -8261,7 +8261,7 @@ }, "mkdirp": { "version": "0.5.1", - "resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "requires": { "minimist": "0.0.8" @@ -8594,7 +8594,7 @@ }, "next-tick": { "version": "1.0.0", - "resolved": "http://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz", "integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw=" }, "nice-try": { @@ -8677,7 +8677,7 @@ }, "semver": { "version": "5.3.0", - "resolved": "http://registry.npmjs.org/semver/-/semver-5.3.0.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", "integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=" }, "tar": { @@ -12277,7 +12277,7 @@ }, "os-homedir": { "version": "1.0.2", - "resolved": "http://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" }, "os-locale": { @@ -12290,7 +12290,7 @@ }, "os-tmpdir": { "version": "1.0.2", - "resolved": "http://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=" }, "osenv": { @@ -12530,7 +12530,7 @@ }, "path-is-absolute": { "version": "1.0.1", - "resolved": "http://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-is-inside": { @@ -14367,7 +14367,7 @@ }, "safe-regex": { "version": "1.1.0", - "resolved": "http://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", "requires": { "ret": "~0.1.10" @@ -14642,7 +14642,7 @@ }, "sha.js": { "version": "2.4.11", - "resolved": "http://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", "requires": { "inherits": "^2.0.1", @@ -15499,7 +15499,7 @@ }, "string_decoder": { "version": "1.1.1", - "resolved": "http://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" @@ -15545,7 +15545,7 @@ }, "strip-eof": { "version": "1.0.0", - "resolved": "http://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=" }, "strip-indent": { @@ -16317,7 +16317,7 @@ }, "tty-browserify": { "version": "0.0.0", - "resolved": "http://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", + "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", "integrity": "sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=", "dev": true }, diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 8f2a5ea3e..4dc58d611 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -59,6 +59,14 @@ export default class UploadManager extends ApiManager { } }); + register({ + method: Method.GET, + subscription: "/hello", + secureHandler: ({ req, res }) => { + res.send("

world!

"); + } + }); + register({ method: Method.POST, subscription: "/uploadRemoteImage", diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts index 185e787cc..55c0d14e8 100644 --- a/src/server/SharedMediaTypes.ts +++ b/src/server/SharedMediaTypes.ts @@ -10,6 +10,7 @@ export namespace AcceptibleMedia { export const imageFormats = [...pngs, ...jpgs, ...gifs, ...webps, ...tiffs]; export const videoFormats = [".mov", ".mp4"]; export const applicationFormats = [".pdf"]; + export const audioFormats = [".wav", ".mp3", ".flac", ".au", ".aiff", ".m4a"]; } export namespace Upload { -- cgit v1.2.3-70-g09d2 From 22bc57fe1f997d5f8f6a471b4b719b50fbd8552a Mon Sep 17 00:00:00 2001 From: kimdahey Date: Sat, 29 Feb 2020 14:16:13 -0500 Subject: created upload audio --- src/server/ApiManagers/UploadManager.ts | 3 ++- src/server/DashUploadUtils.ts | 13 +++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) (limited to 'src/server/ApiManagers') diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 4dc58d611..095567be1 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -19,7 +19,8 @@ export enum Directory { videos = "videos", pdfs = "pdfs", text = "text", - pdf_thumbnails = "pdf_thumbnails" + pdf_thumbnails = "pdf_thumbnails", + audio = "audio" } export function serverPathToFile(directory: Directory, filename: string) { diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index ea4c26ca2..e5aec852f 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -82,6 +82,19 @@ export namespace DashUploadUtils { return { source: file, result: new Error(`Could not upload unsupported file (${name}) with upload type (${type}).`) }; } + async function uploadAudio(file: File) { + const { path: sourcePath } = file; + const dataBuffer = readFileSync(sourcePath); + + await new Promise((resolve, reject) => { + const name = path.basename(sourcePath); + const audioFilename = `${name.substring(0, name.length - 4)}.mp3`; + const writeStream = createWriteStream(serverPathToFile(Directory.audio, audioFilename)); + writeStream.write(result.text, error => error ? reject(error) : resolve()); + }); + return MoveParsedFile(file, Directory.audio); + } + async function UploadPdf(file: File) { const { path: sourcePath } = file; const dataBuffer = readFileSync(sourcePath); -- cgit v1.2.3-70-g09d2 From 8c39fb5678bfdd414249f10b0b80e823370f7228 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 29 Feb 2020 15:26:55 -0500 Subject: consolidate server side google photos files --- src/server/ApiManagers/GooglePhotosManager.ts | 153 +++++++++++++++++++++- src/server/apis/google/GooglePhotosUploadUtils.ts | 150 --------------------- 2 files changed, 149 insertions(+), 154 deletions(-) delete mode 100644 src/server/apis/google/GooglePhotosUploadUtils.ts (limited to 'src/server/ApiManagers') diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 25c54ee2e..98f6a1404 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -3,12 +3,13 @@ import { Method, _error, _success, _invalid } from "../RouteManager"; import * as path from "path"; import { GoogleApiServerUtils } from "../apis/google/GoogleApiServerUtils"; import { BatchedArray, TimeUnit } from "array-batcher"; -import { GooglePhotosUploadUtils } from "../apis/google/GooglePhotosUploadUtils"; import { Opt } from "../../new_fields/Doc"; import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils"; import { Database } from "../database"; import { red } from "colors"; import { Upload } from "../SharedMediaTypes"; +import request = require('request-promise'); +import { NewMediaItemResult } from "../apis/google/SharedTypes"; const prefix = "google_photos_"; const remoteUploadError = "None of the preliminary uploads to Google's servers was successful."; @@ -64,7 +65,7 @@ export default class GooglePhotosManager extends ApiManager { // set on Google's servers, and would instantly return an error. So, we ease things out and send the photos to upload in // batches of 25, where the next batch is sent 100 millieconds after we receive a response from Google's servers. const failed: GooglePhotosUploadFailure[] = []; - const batched = BatchedArray.from(media, { batchSize: 25 }); + const batched = BatchedArray.from(media, { batchSize: 25 }); const interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; const newMediaItems = await batched.batchedMapPatientInterval( interval, @@ -78,7 +79,7 @@ export default class GooglePhotosManager extends ApiManager { const imageToUpload = InjectSize(url, SizeSuffix.Original); // STEP 1/2: send the raw bytes of the image from our server to Google's servers. We'll get back an upload token // which acts as a pointer to those bytes that we can use to locate them later on - const uploadToken = await GooglePhotosUploadUtils.DispatchGooglePhotosUpload(token, imageToUpload).catch(fail); + const uploadToken = await Uploader.SendBytes(token, imageToUpload).catch(fail); if (!uploadToken) { fail(`${path.extname(url)} is not an accepted extension`); } else { @@ -110,7 +111,7 @@ export default class GooglePhotosManager extends ApiManager { } // STEP 2/2: create the media items and return the API's response to the client, along with any failures - return GooglePhotosUploadUtils.CreateMediaItems(token, newMediaItems, req.body.album).then( + return Uploader.CreateMediaItems(token, newMediaItems, req.body.album).then( results => _success(res, { results, failed }), error => _error(res, mediaError, error) ); @@ -183,4 +184,148 @@ export default class GooglePhotosManager extends ApiManager { }); } +} + +/** + * This namespace encompasses the logic + * necessary to upload images to Google's server, + * and then initialize / create those images in the Photos + * API given the upload tokens returned from the initial + * uploading process. + * + * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate + */ +export namespace Uploader { + + /** + * Specifies the structure of the object + * necessary to upload bytes to Google's servers. + * The url is streamed to access the image's bytes, + * and the description is what appears in Google Photos' + * description field. + */ + export interface UploadSource { + url: string; + description: string; + } + + /** + * This is the format needed to pass + * into the BatchCreate API request + * to take a reference to raw uploaded bytes + * and actually create an image in Google Photos. + * + * So, to instantiate this interface you must have already dispatched an upload + * and received an upload token. + */ + export interface NewMediaItem { + description: string; + simpleMediaItem: { + uploadToken: string; + }; + } + + /** + * A utility function to streamline making + * calls to the API's url - accentuates + * the relative path in the caller. + * @param extension the desired + * subset of the API + */ + function prepend(extension: string): string { + return `https://photoslibrary.googleapis.com/v1/${extension}`; + } + + /** + * Factors out the creation of the API request's + * authentication elements stored in the header. + * @param type the contents of the request + * @param token the user-specific Google access token + */ + function headers(type: string, token: string) { + return { + 'Content-Type': `application/${type}`, + 'Authorization': `Bearer ${token}`, + }; + } + + /** + * This is the first step in the remote image creation process. + * Here we upload the raw bytes of the image to Google's servers by + * setting authentication and other required header properties and including + * the raw bytes to the image, to be uploaded, in the body of the request. + * @param bearerToken the user-specific Google access token, specifies the account associated + * with the eventual image creation + * @param url the url of the image to upload + * @param filename an optional name associated with the uploaded image - if not specified + * defaults to the filename (basename) in the url + */ + export const SendBytes = async (bearerToken: string, url: string, filename?: string): Promise => { + // check if the url points to a non-image or an unsupported format + if (!DashUploadUtils.validateExtension(url)) { + return undefined; + } + const body = await request(url, { encoding: null }); // returns a readable stream with the unencoded binary image data + const parameters = { + method: 'POST', + uri: prepend('uploads'), + headers: { + ...headers('octet-stream', bearerToken), + 'X-Goog-Upload-File-Name': filename || path.basename(url), + 'X-Goog-Upload-Protocol': 'raw' + }, + body + }; + return new Promise((resolve, reject) => request(parameters, (error, _response, body) => { + if (error) { + // on rejection, the server logs the error and the offending image + return reject(error); + } + resolve(body); + })); + }; + + /** + * This is the second step in the remote image creation process: having uploaded + * the raw bytes of the image and received / stored pointers (upload tokens) to those + * bytes, we can now instruct the API to finalize the creation of those images by + * submitting a batch create request with the list of upload tokens and the description + * to be associated with reach resulting new image. + * @param bearerToken the user-specific Google access token, specifies the account associated + * with the eventual image creation + * @param newMediaItems a list of objects containing a description and, effectively, the + * pointer to the uploaded bytes + * @param album if included, will add all of the newly created remote images to the album + * with the specified id + */ + export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise => { + // it's important to note that the API can't handle more than 50 items in each request and + // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)... + const batched = BatchedArray.from(newMediaItems, { batchSize: 50 }); + // ...so we execute them in delayed batches and await the entire execution + return batched.batchedMapPatientInterval( + { magnitude: 100, unit: TimeUnit.Milliseconds }, + async (batch: NewMediaItem[], collector: any): Promise => { + const parameters = { + method: 'POST', + headers: headers('json', bearerToken), + uri: prepend('mediaItems:batchCreate'), + body: { newMediaItems: batch } as any, + json: true + }; + // register the target album, if provided + album && (parameters.body.albumId = album.id); + collector.push(...(await new Promise((resolve, reject) => { + request(parameters, (error, _response, body) => { + if (error) { + reject(error); + } else { + resolve(body.newMediaItemResults); + } + }); + }))); + } + ); + }; + } \ No newline at end of file diff --git a/src/server/apis/google/GooglePhotosUploadUtils.ts b/src/server/apis/google/GooglePhotosUploadUtils.ts deleted file mode 100644 index d305eed0a..000000000 --- a/src/server/apis/google/GooglePhotosUploadUtils.ts +++ /dev/null @@ -1,150 +0,0 @@ - -import request = require('request-promise'); -import * as path from 'path'; -import { NewMediaItemResult } from './SharedTypes'; -import { BatchedArray, TimeUnit } from 'array-batcher'; -import { DashUploadUtils } from '../../DashUploadUtils'; - -/** - * This namespace encompasses the logic - * necessary to upload images to Google's server, - * and then initialize / create those images in the Photos - * API given the upload tokens returned from the initial - * uploading process. - * - * https://developers.google.com/photos/library/reference/rest/v1/mediaItems/batchCreate - */ -export namespace GooglePhotosUploadUtils { - - /** - * Specifies the structure of the object - * necessary to upload bytes to Google's servers. - * The url is streamed to access the image's bytes, - * and the description is what appears in Google Photos' - * description field. - */ - export interface UploadSource { - url: string; - description: string; - } - - /** - * This is the format needed to pass - * into the BatchCreate API request - * to take a reference to raw uploaded bytes - * and actually create an image in Google Photos. - * - * So, to instantiate this interface you must have already dispatched an upload - * and received an upload token. - */ - export interface NewMediaItem { - description: string; - simpleMediaItem: { - uploadToken: string; - }; - } - - /** - * A utility function to streamline making - * calls to the API's url - accentuates - * the relative path in the caller. - * @param extension the desired - * subset of the API - */ - function prepend(extension: string): string { - return `https://photoslibrary.googleapis.com/v1/${extension}`; - } - - /** - * Factors out the creation of the API request's - * authentication elements stored in the header. - * @param type the contents of the request - * @param token the user-specific Google access token - */ - function headers(type: string, token: string) { - return { - 'Content-Type': `application/${type}`, - 'Authorization': `Bearer ${token}`, - }; - } - - /** - * This is the first step in the remote image creation process. - * Here we upload the raw bytes of the image to Google's servers by - * setting authentication and other required header properties and including - * the raw bytes to the image, to be uploaded, in the body of the request. - * @param bearerToken the user-specific Google access token, specifies the account associated - * with the eventual image creation - * @param url the url of the image to upload - * @param filename an optional name associated with the uploaded image - if not specified - * defaults to the filename (basename) in the url - */ - export const DispatchGooglePhotosUpload = async (bearerToken: string, url: string, filename?: string): Promise => { - // check if the url points to a non-image or an unsupported format - if (!DashUploadUtils.validateExtension(url)) { - return undefined; - } - const body = await request(url, { encoding: null }); // returns a readable stream with the unencoded binary image data - const parameters = { - method: 'POST', - uri: prepend('uploads'), - headers: { - ...headers('octet-stream', bearerToken), - 'X-Goog-Upload-File-Name': filename || path.basename(url), - 'X-Goog-Upload-Protocol': 'raw' - }, - body - }; - return new Promise((resolve, reject) => request(parameters, (error, _response, body) => { - if (error) { - // on rejection, the server logs the error and the offending image - return reject(error); - } - resolve(body); - })); - }; - - /** - * This is the second step in the remote image creation process: having uploaded - * the raw bytes of the image and received / stored pointers (upload tokens) to those - * bytes, we can now instruct the API to finalize the creation of those images by - * submitting a batch create request with the list of upload tokens and the description - * to be associated with reach resulting new image. - * @param bearerToken the user-specific Google access token, specifies the account associated - * with the eventual image creation - * @param newMediaItems a list of objects containing a description and, effectively, the - * pointer to the uploaded bytes - * @param album if included, will add all of the newly created remote images to the album - * with the specified id - */ - export const CreateMediaItems = async (bearerToken: string, newMediaItems: NewMediaItem[], album?: { id: string }): Promise => { - // it's important to note that the API can't handle more than 50 items in each request and - // seems to need at least some latency between requests (spamming it synchronously has led to the server returning errors)... - const batched = BatchedArray.from(newMediaItems, { batchSize: 50 }); - // ...so we execute them in delayed batches and await the entire execution - return batched.batchedMapPatientInterval( - { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: NewMediaItem[], collector: any): Promise => { - const parameters = { - method: 'POST', - headers: headers('json', bearerToken), - uri: prepend('mediaItems:batchCreate'), - body: { newMediaItems: batch } as any, - json: true - }; - // register the target album, if provided - album && (parameters.body.albumId = album.id); - collector.push(...(await new Promise((resolve, reject) => { - request(parameters, (error, _response, body) => { - if (error) { - reject(error); - } else { - resolve(body.newMediaItemResults); - } - }); - }))); - } - ); - }; - -} \ No newline at end of file -- cgit v1.2.3-70-g09d2 From 7bf05274e1f3c75217db11bf3d4112431f55e1b5 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sat, 29 Feb 2020 15:31:51 -0500 Subject: allow proper type inference for batched arrays systemwide --- src/server/ApiManagers/GooglePhotosManager.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/server/ApiManagers/GooglePhotosManager.ts b/src/server/ApiManagers/GooglePhotosManager.ts index 98f6a1404..88219423d 100644 --- a/src/server/ApiManagers/GooglePhotosManager.ts +++ b/src/server/ApiManagers/GooglePhotosManager.ts @@ -69,7 +69,7 @@ export default class GooglePhotosManager extends ApiManager { const interval = { magnitude: 100, unit: TimeUnit.Milliseconds }; const newMediaItems = await batched.batchedMapPatientInterval( interval, - async (batch: any, collector: any, { completedBatches }: any) => { + async (batch, collector, { completedBatches }) => { for (let index = 0; index < batch.length; index++) { const { url, description } = batch[index]; // a local function used to record failure of an upload @@ -305,7 +305,7 @@ export namespace Uploader { // ...so we execute them in delayed batches and await the entire execution return batched.batchedMapPatientInterval( { magnitude: 100, unit: TimeUnit.Milliseconds }, - async (batch: NewMediaItem[], collector: any): Promise => { + async (batch: NewMediaItem[], collector): Promise => { const parameters = { method: 'POST', headers: headers('json', bearerToken), -- cgit v1.2.3-70-g09d2 From eddd9181d69bdf71d695c5adab9660cd8308fa69 Mon Sep 17 00:00:00 2001 From: andrewdkim Date: Wed, 4 Mar 2020 20:48:50 -0500 Subject: fixed file location bug --- src/client/views/nodes/AudioBox.tsx | 13 ++++++++----- src/server/ApiManagers/UploadManager.ts | 3 +++ src/server/DashUploadUtils.ts | 17 +++++++---------- src/server/SharedMediaTypes.ts | 2 +- 4 files changed, 19 insertions(+), 16 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/views/nodes/AudioBox.tsx b/src/client/views/nodes/AudioBox.tsx index 62a479b2a..5ed8fba19 100644 --- a/src/client/views/nodes/AudioBox.tsx +++ b/src/client/views/nodes/AudioBox.tsx @@ -44,6 +44,7 @@ export class AudioBox extends DocExtendableComponent { self.stopRecording(); - gumStream.getAudioTracks()[0].stop(); - }, 60 * 60 * 1000); // stop after an hour? + self._stream?.getAudioTracks()[0].stop(); + }, 60 * 1000); // stop after a minute }); } @@ -169,6 +170,7 @@ export class AudioBox extends DocExtendableComponent { - this.pause(); + this.stopRecording(); this._ele!.currentTime = 0; e.stopPropagation(); } @@ -210,6 +212,7 @@ export class AudioBox extends DocExtendableComponent; } + // line 226 is stop button but it doesn't do anything render() { const interactive = this.active() ? "-interactive" : ""; return
{ + const { sources } = req.body; if (Array.isArray(sources)) { const results = await Promise.all(sources.map(source => DashUploadUtils.UploadImage(source))); @@ -85,6 +86,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: "/uploadDoc", secureHandler: ({ req, res }) => { + const form = new formidable.IncomingForm(); form.keepExtensions = true; // let path = req.body.path; @@ -189,6 +191,7 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: "/inspectImage", secureHandler: async ({ req, res }) => { + const { source } = req.body; if (typeof source === "string") { return res.send(await DashUploadUtils.InspectImage(source)); diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index 7cbfc4408..fb4e87c08 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -78,7 +78,8 @@ export namespace DashUploadUtils { } case "audio": if (audioFormats.includes(format)) { - return UploadAudio(file); + console.log("1"); + return MoveParsedFile(file, Directory.audio); } } @@ -86,16 +87,9 @@ export namespace DashUploadUtils { return { source: file, result: new Error(`Could not upload unsupported file (${name}) with upload type (${type}).`) }; } - async function uploadAudio(file: File) { + async function UploadAudio(file: File) { const { path: sourcePath } = file; - const dataBuffer = readFileSync(sourcePath); - await new Promise((resolve, reject) => { - const name = path.basename(sourcePath); - const audioFilename = `${name.substring(0, name.length - 4)}.mp3`; - const writeStream = createWriteStream(serverPathToFile(Directory.audio, audioFilename)); - writeStream.write(result.text, error => error ? reject(error) : resolve()); - }); return MoveParsedFile(file, Directory.audio); } @@ -109,6 +103,7 @@ export namespace DashUploadUtils { const writeStream = createWriteStream(serverPathToFile(Directory.text, textFilename)); writeStream.write(result.text, error => error ? reject(error) : resolve()); }); + console.log(MoveParsedFile(file, Directory.pdfs)); return MoveParsedFile(file, Directory.pdfs); } @@ -212,8 +207,10 @@ export namespace DashUploadUtils { accessPaths: { agnostic: getAccessPaths(destination, name) } + } - }); + } + ); }); }); } diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts index 55c0d14e8..3d3683912 100644 --- a/src/server/SharedMediaTypes.ts +++ b/src/server/SharedMediaTypes.ts @@ -10,7 +10,7 @@ export namespace AcceptibleMedia { export const imageFormats = [...pngs, ...jpgs, ...gifs, ...webps, ...tiffs]; export const videoFormats = [".mov", ".mp4"]; export const applicationFormats = [".pdf"]; - export const audioFormats = [".wav", ".mp3", ".flac", ".au", ".aiff", ".m4a"]; + export const audioFormats = [".wav", ".mp3", ".flac", ".au", ".aiff", ".m4a", ".webm;codecs=opus"]; } export namespace Upload { -- cgit v1.2.3-70-g09d2 From 12329d9de180e8f0bd629ccf6b351baab7782fc5 Mon Sep 17 00:00:00 2001 From: Bob Zeleznik Date: Wed, 4 Mar 2020 23:41:31 -0500 Subject: fixes for video snapshots & following links to anchors in videos --- src/client/documents/Documents.ts | 4 ++-- src/client/util/DocumentManager.ts | 24 ++++++++++++++---------- src/client/util/DragManager.ts | 2 +- src/client/views/nodes/VideoBox.tsx | 1 - src/server/ApiManagers/UploadManager.ts | 2 ++ 5 files changed, 19 insertions(+), 14 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/documents/Documents.ts b/src/client/documents/Documents.ts index b06ff5465..5f0e63b56 100644 --- a/src/client/documents/Documents.ts +++ b/src/client/documents/Documents.ts @@ -549,8 +549,8 @@ export namespace Docs { linkDocProto.anchor2 = target.doc; linkDocProto.anchor1Context = source.ctx; linkDocProto.anchor2Context = target.ctx; - linkDocProto.anchor1Timecode = source.doc.currentTimecode; - linkDocProto.anchor2Timecode = target.doc.currentTimecode; + linkDocProto.anchor1Timecode = source.doc.currentTimecode || source.doc.displayTimecode; + linkDocProto.anchor2Timecode = target.doc.currentTimecode || source.doc.displayTimecode; if (linkDocProto.layout_key1 === undefined) { Cast(linkDocProto.proto, Doc, null).layout_key1 = DocuLinkBox.LayoutString("anchor1"); diff --git a/src/client/util/DocumentManager.ts b/src/client/util/DocumentManager.ts index 0ec1d23d9..323d31af2 100644 --- a/src/client/util/DocumentManager.ts +++ b/src/client/util/DocumentManager.ts @@ -158,16 +158,20 @@ export class DocumentManager { targetDocContextView.props.focus(targetDocContextView.props.Document, willZoom); // now find the target document within the context - setTimeout(() => { - const retryDocView = DocumentManager.Instance.getDocumentView(targetDoc); - if (retryDocView) { - retryDocView.props.focus(targetDoc, willZoom); // focus on the target if it now exists in the context - } else { - if (closeContextIfNotFound && targetDocContextView.props.removeDocument) targetDocContextView.props.removeDocument(targetDocContextView.props.Document); - targetDoc.layout && (dockFunc || CollectionDockingView.AddRightSplit)(Doc.BrushDoc(Doc.MakeAlias(targetDoc))); // otherwise create a new view of the target - } - highlight(); - }, 0); + if (targetDoc.displayTimecode) { // the target should show up once the video scrubs to the display timecode; + targetDocContext.currentTimecode = targetDoc.displayTimecode; + } else { + setTimeout(() => { + const retryDocView = DocumentManager.Instance.getDocumentView(targetDoc); + if (retryDocView) { + retryDocView.props.focus(targetDoc, willZoom); // focus on the target if it now exists in the context + } else { + if (closeContextIfNotFound && targetDocContextView.props.removeDocument) targetDocContextView.props.removeDocument(targetDocContextView.props.Document); + targetDoc.layout && (dockFunc || CollectionDockingView.AddRightSplit)(Doc.BrushDoc(Doc.MakeAlias(targetDoc))); // otherwise create a new view of the target + } + highlight(); + }, 0); + } } else { // there's no context view so we need to create one first and try again (dockFunc || CollectionDockingView.AddRightSplit)(targetDocContext); setTimeout(() => { diff --git a/src/client/util/DragManager.ts b/src/client/util/DragManager.ts index af920c7da..8ddd59237 100644 --- a/src/client/util/DragManager.ts +++ b/src/client/util/DragManager.ts @@ -193,7 +193,7 @@ export namespace DragManager { // drag a document and drop it (or make an alias/copy on drop) export function StartDocumentDrag(eles: HTMLElement[], dragData: DocumentDragData, downX: number, downY: number, options?: DragOptions) { const addAudioTag = (dropDoc: any) => { - !dropDoc.creationDate && (dropDoc.creationDate = new DateField); + dropDoc && !dropDoc.creationDate && (dropDoc.creationDate = new DateField); dropDoc instanceof Doc && AudioBox.ActiveRecordings.map(d => DocUtils.MakeLink({ doc: dropDoc }, { doc: d }, "audio link", "audio timeline")); return dropDoc; } diff --git a/src/client/views/nodes/VideoBox.tsx b/src/client/views/nodes/VideoBox.tsx index 7ab650dc9..439f2d85f 100644 --- a/src/client/views/nodes/VideoBox.tsx +++ b/src/client/views/nodes/VideoBox.tsx @@ -261,7 +261,6 @@ export class VideoBox extends DocAnnotatableComponent {"" + Math.round(curTime)} diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index f872bdf94..08a374fa9 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -206,6 +206,7 @@ export default class UploadManager extends ApiManager { { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, + { resizer: sharp().resize(1200, undefined, { withoutEnlargement: true }), suffix: "_o" }, // bcz: this should just be the original image, not a resized version ]; let isImage = false; if (pngs.includes(ext)) { @@ -224,6 +225,7 @@ export default class UploadManager extends ApiManager { const path = serverPathToFile(Directory.images, filename + resizer.suffix + ext); createReadStream(savedName).pipe(resizer.resizer).pipe(createWriteStream(path)); }); + } res.send(clientPathToFile(Directory.images, filename + ext)); }); -- cgit v1.2.3-70-g09d2 From 288e4d24b61d281819b7f0b5bb697edbcc9ed173 Mon Sep 17 00:00:00 2001 From: bob Date: Thu, 5 Mar 2020 10:34:20 -0500 Subject: masonry view fix to add notes to the top. fixed uploadURI to use image resizer better --- .../collections/CollectionMasonryViewFieldRow.tsx | 24 +++++++++++++--------- src/server/ApiManagers/UploadManager.ts | 7 +++---- 2 files changed, 17 insertions(+), 14 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/views/collections/CollectionMasonryViewFieldRow.tsx b/src/client/views/collections/CollectionMasonryViewFieldRow.tsx index 6ebd3194d..af3e18a4b 100644 --- a/src/client/views/collections/CollectionMasonryViewFieldRow.tsx +++ b/src/client/views/collections/CollectionMasonryViewFieldRow.tsx @@ -8,7 +8,7 @@ import Measure from "react-measure"; import { Doc } from "../../../new_fields/Doc"; import { PastelSchemaPalette, SchemaHeaderField } from "../../../new_fields/SchemaHeaderField"; import { ScriptField } from "../../../new_fields/ScriptField"; -import { StrCast } from "../../../new_fields/Types"; +import { StrCast, NumCast } from "../../../new_fields/Types"; import { numberRange } from "../../../Utils"; import { Docs } from "../../documents/Documents"; import { DragManager } from "../../util/DragManager"; @@ -139,9 +139,10 @@ export class CollectionMasonryViewFieldRow extends React.Component { this._createAliasSelected = false; const key = StrCast(this.props.parent.props.Document._pivotField); - const newDoc = Docs.Create.TextDocument("", { _height: 18, _width: 200, title: value }); + const newDoc = Docs.Create.TextDocument(value, { _autoHeight: true, _width: 200, title: value }); newDoc[key] = this.getValue(this.props.heading); - return this.props.parent.props.addDocument(newDoc); + const docs = this.props.parent.childDocList; + return docs ? (docs.splice(0, 0, newDoc) ? true : false) : this.props.parent.props.addDocument(newDoc); } deleteRow = undoBatch(action(() => { @@ -274,6 +275,15 @@ export class CollectionMasonryViewFieldRow extends React.Component + {(chromeStatus !== 'view-mode' && chromeStatus !== 'disabled') ? +
+ +
: null + }
- {(chromeStatus !== 'view-mode' && chromeStatus !== 'disabled') ? -
- -
: null - }
; } @@ -317,7 +321,7 @@ export class CollectionMasonryViewFieldRow extends React.Component + style={{ background: evContents !== `NO ${key.toUpperCase()} VALUE` ? this._color : "lightgrey" }}> {evContents === `NO ${key.toUpperCase()} VALUE` ? (null) :
diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 08a374fa9..50a759c9d 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -7,7 +7,7 @@ import { extname, basename, dirname } from 'path'; import { createReadStream, createWriteStream, unlink } from "fs"; import { publicDirectory, filesDirectory } from ".."; import { Database } from "../database"; -import { DashUploadUtils } from "../DashUploadUtils"; +import { DashUploadUtils, InjectSize, SizeSuffix } from "../DashUploadUtils"; import * as sharp from 'sharp'; import { AcceptibleMedia, Upload } from "../SharedMediaTypes"; import { normalize } from "path"; @@ -199,14 +199,13 @@ export default class UploadManager extends ApiManager { res.status(401).send("incorrect parameters specified"); return; } - return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, filename)).then((savedName: string) => { + return imageDataUri.outputFile(uri, serverPathToFile(Directory.images, InjectSize(filename, SizeSuffix.Original))).then((savedName: string) => { const ext = extname(savedName).toLowerCase(); const { pngs, jpgs } = AcceptibleMedia; const resizers = [ { resizer: sharp().resize(100, undefined, { withoutEnlargement: true }), suffix: "_s" }, { resizer: sharp().resize(400, undefined, { withoutEnlargement: true }), suffix: "_m" }, { resizer: sharp().resize(900, undefined, { withoutEnlargement: true }), suffix: "_l" }, - { resizer: sharp().resize(1200, undefined, { withoutEnlargement: true }), suffix: "_o" }, // bcz: this should just be the original image, not a resized version ]; let isImage = false; if (pngs.includes(ext)) { @@ -225,7 +224,7 @@ export default class UploadManager extends ApiManager { const path = serverPathToFile(Directory.images, filename + resizer.suffix + ext); createReadStream(savedName).pipe(resizer.resizer).pipe(createWriteStream(path)); }); - + } res.send(clientPathToFile(Directory.images, filename + ext)); }); -- cgit v1.2.3-70-g09d2 From 6dd6d455a8a8bf8235b753591acd7761ad6cd91f Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Sun, 8 Mar 2020 14:58:35 -0400 Subject: removed recommender from branch --- src/client/ClientRecommender.tsx | 8 +- src/client/apis/IBM_Recommender.ts | 66 ++++---- src/server/ApiManagers/UtilManager.ts | 44 +++--- src/server/Recommender.ts | 274 +++++++++++++++++----------------- 4 files changed, 196 insertions(+), 196 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/ClientRecommender.tsx b/src/client/ClientRecommender.tsx index cb1674943..0e67a6e57 100644 --- a/src/client/ClientRecommender.tsx +++ b/src/client/ClientRecommender.tsx @@ -5,10 +5,10 @@ import { CognitiveServices, Confidence, Tag, Service } from "./cognitive_service import React = require("react"); import { observer } from "mobx-react"; import { observable, action, computed, reaction } from "mobx"; -var assert = require('assert'); -var sw = require('stopword'); -var FeedParser = require('feedparser'); -var https = require('https'); +// var assert = require('assert'); +// var sw = require('stopword'); +// var FeedParser = require('feedparser'); +// var https = require('https'); import "./ClientRecommender.scss"; import { JSXElement } from "babel-types"; import { RichTextField } from "../new_fields/RichTextField"; diff --git a/src/client/apis/IBM_Recommender.ts b/src/client/apis/IBM_Recommender.ts index da6257f28..4e1c541c8 100644 --- a/src/client/apis/IBM_Recommender.ts +++ b/src/client/apis/IBM_Recommender.ts @@ -1,40 +1,40 @@ -import { Opt } from "../../new_fields/Doc"; +// import { Opt } from "../../new_fields/Doc"; -const NaturalLanguageUnderstandingV1 = require('ibm-watson/natural-language-understanding/v1'); -const { IamAuthenticator } = require('ibm-watson/auth'); +// const NaturalLanguageUnderstandingV1 = require('ibm-watson/natural-language-understanding/v1'); +// const { IamAuthenticator } = require('ibm-watson/auth'); -export namespace IBM_Recommender { +// export namespace IBM_Recommender { - // pass to IBM account is Browngfx1 +// // pass to IBM account is Browngfx1 - const naturalLanguageUnderstanding = new NaturalLanguageUnderstandingV1({ - version: '2019-07-12', - authenticator: new IamAuthenticator({ - apikey: 'tLiYwbRim3CnBcCO4phubpf-zEiGcub1uh0V-sD9OKhw', - }), - url: 'https://gateway-wdc.watsonplatform.net/natural-language-understanding/api' - }); +// const naturalLanguageUnderstanding = new NaturalLanguageUnderstandingV1({ +// version: '2019-07-12', +// authenticator: new IamAuthenticator({ +// apikey: 'tLiYwbRim3CnBcCO4phubpf-zEiGcub1uh0V-sD9OKhw', +// }), +// url: 'https://gateway-wdc.watsonplatform.net/natural-language-understanding/api' +// }); - const analyzeParams = { - 'text': 'this is a test of the keyword extraction feature I am integrating into the program', - 'features': { - 'keywords': { - 'sentiment': true, - 'emotion': true, - 'limit': 3 - }, - } - }; +// const analyzeParams = { +// 'text': 'this is a test of the keyword extraction feature I am integrating into the program', +// 'features': { +// 'keywords': { +// 'sentiment': true, +// 'emotion': true, +// 'limit': 3 +// }, +// } +// }; - export const analyze = async (_parameters: any): Promise> => { - try { - const response = await naturalLanguageUnderstanding.analyze(_parameters); - console.log(response); - return (JSON.stringify(response, null, 2)); - } catch (err) { - console.log('error: ', err); - return undefined; - } - }; +// export const analyze = async (_parameters: any): Promise> => { +// try { +// const response = await naturalLanguageUnderstanding.analyze(_parameters); +// console.log(response); +// return (JSON.stringify(response, null, 2)); +// } catch (err) { +// console.log('error: ', err); +// return undefined; +// } +// }; -} \ No newline at end of file +// } \ No newline at end of file diff --git a/src/server/ApiManagers/UtilManager.ts b/src/server/ApiManagers/UtilManager.ts index d18529cf2..ad8119bf4 100644 --- a/src/server/ApiManagers/UtilManager.ts +++ b/src/server/ApiManagers/UtilManager.ts @@ -3,11 +3,11 @@ import { Method } from "../RouteManager"; import { exec } from 'child_process'; import RouteSubscriber from "../RouteSubscriber"; import { red } from "colors"; -import { IBM_Recommender } from "../../client/apis/IBM_Recommender"; -import { Recommender } from "../Recommender"; +// import { IBM_Recommender } from "../../client/apis/IBM_Recommender"; +// import { Recommender } from "../Recommender"; -const recommender = new Recommender(); -recommender.testModel(); +// const recommender = new Recommender(); +// recommender.testModel(); import executeImport from "../../scraping/buxton/final/BuxtonImporter"; export default class UtilManager extends ApiManager { @@ -27,25 +27,25 @@ export default class UtilManager extends ApiManager { } }); - register({ - method: Method.POST, - subscription: "/IBMAnalysis", - secureHandler: async ({ req, res }) => res.send(await IBM_Recommender.analyze(req.body)) - }); + // register({ + // method: Method.POST, + // subscription: "/IBMAnalysis", + // secureHandler: async ({ req, res }) => res.send(await IBM_Recommender.analyze(req.body)) + // }); - register({ - method: Method.POST, - subscription: "/recommender", - secureHandler: async ({ req, res }) => { - const keyphrases = req.body.keyphrases; - const wordvecs = await recommender.vectorize(keyphrases); - let embedding: Float32Array = new Float32Array(); - if (wordvecs && wordvecs.dataSync()) { - embedding = wordvecs.dataSync() as Float32Array; - } - res.send(embedding); - } - }); + // register({ + // method: Method.POST, + // subscription: "/recommender", + // secureHandler: async ({ req, res }) => { + // const keyphrases = req.body.keyphrases; + // const wordvecs = await recommender.vectorize(keyphrases); + // let embedding: Float32Array = new Float32Array(); + // if (wordvecs && wordvecs.dataSync()) { + // embedding = wordvecs.dataSync() as Float32Array; + // } + // res.send(embedding); + // } + // }); register({ diff --git a/src/server/Recommender.ts b/src/server/Recommender.ts index 1d2cb3858..aacdb4053 100644 --- a/src/server/Recommender.ts +++ b/src/server/Recommender.ts @@ -1,137 +1,137 @@ -//import { Doc } from "../new_fields/Doc"; -//import { StrCast } from "../new_fields/Types"; -//import { List } from "../new_fields/List"; -//import { CognitiveServices } from "../client/cognitive_services/CognitiveServices"; - -// var w2v = require('word2vec'); -var assert = require('assert'); -var arxivapi = require('arxiv-api-node'); -import requestPromise = require("request-promise"); -import * as use from '@tensorflow-models/universal-sentence-encoder'; -import { Tensor } from "@tensorflow/tfjs-core/dist/tensor"; -require('@tensorflow/tfjs-node'); - -//http://gnuwin32.sourceforge.net/packages/make.htm - -export class Recommender { - - private _model: any; - static Instance: Recommender; - private dimension: number = 0; - private choice: string = ""; // Tensorflow or Word2Vec - - constructor() { - console.log("creating recommender..."); - Recommender.Instance = this; - } - - /*** - * Loads pre-trained model from TF - */ - - public async loadTFModel() { - let self = this; - return new Promise(res => { - use.load().then(model => { - self.choice = "TF"; - self._model = model; - self.dimension = 512; - res(model); - }); - } - - ); - } - - /*** - * Loads pre-trained model from word2vec - */ - - // private loadModel(): Promise { - // let self = this; - // return new Promise(res => { - // w2v.loadModel("./node_modules/word2vec/examples/fixtures/vectors.txt", function (err: any, model: any) { - // self.choice = "WV"; - // self._model = model; - // self.dimension = model.size; - // res(model); - // }); - // }); - // } - - /*** - * Testing - */ - - public async testModel() { - if (!this._model) { - await this.loadTFModel(); - } - if (this._model) { - if (this.choice === "WV") { - let similarity = this._model.similarity('father', 'mother'); - console.log(similarity); - } - else if (this.choice === "TF") { - const model = this._model as use.UniversalSentenceEncoder; - // Embed an array of sentences. - const sentences = [ - 'Hello.', - 'How are you?' - ]; - const embeddings = await this.vectorize(sentences); - if (embeddings) embeddings.print(true /*verbose*/); - // model.embed(sentences).then(embeddings => { - // // `embeddings` is a 2D tensor consisting of the 512-dimensional embeddings for each sentence. - // // So in this example `embeddings` has the shape [2, 512]. - // embeddings.print(true /* verbose */); - // }); - } - } - else { - console.log("model not found :("); - } - } - - /*** - * Uses model to convert words to vectors - */ - - public async vectorize(text: string[]): Promise { - if (!this._model) { - await this.loadTFModel(); - } - if (this._model) { - if (this.choice === "WV") { - let word_vecs = this._model.getVectors(text); - return word_vecs; - } - else if (this.choice === "TF") { - const model = this._model as use.UniversalSentenceEncoder; - return new Promise(res => { - model.embed(text).then(embeddings => { - res(embeddings); - }); - }); - - } - } - } - - // public async trainModel() { - // console.log("phrasing..."); - // w2v.word2vec("./node_modules/word2vec/examples/eng_news-typical_2016_1M-sentences.txt", './node_modules/word2vec/examples/my_phrases.txt', { - // cbow: 1, - // size: 200, - // window: 8, - // negative: 25, - // hs: 0, - // sample: 1e-4, - // threads: 20, - // iter: 200, - // minCount: 2 - // }); - // console.log("phrased!!!"); - // } - -} +// //import { Doc } from "../new_fields/Doc"; +// //import { StrCast } from "../new_fields/Types"; +// //import { List } from "../new_fields/List"; +// //import { CognitiveServices } from "../client/cognitive_services/CognitiveServices"; + +// // var w2v = require('word2vec'); +// var assert = require('assert'); +// var arxivapi = require('arxiv-api-node'); +// import requestPromise = require("request-promise"); +// import * as use from '@tensorflow-models/universal-sentence-encoder'; +// import { Tensor } from "@tensorflow/tfjs-core/dist/tensor"; +// require('@tensorflow/tfjs-node'); + +// //http://gnuwin32.sourceforge.net/packages/make.htm + +// export class Recommender { + +// private _model: any; +// static Instance: Recommender; +// private dimension: number = 0; +// private choice: string = ""; // Tensorflow or Word2Vec + +// constructor() { +// console.log("creating recommender..."); +// Recommender.Instance = this; +// } + +// /*** +// * Loads pre-trained model from TF +// */ + +// public async loadTFModel() { +// let self = this; +// return new Promise(res => { +// use.load().then(model => { +// self.choice = "TF"; +// self._model = model; +// self.dimension = 512; +// res(model); +// }); +// } + +// ); +// } + +// /*** +// * Loads pre-trained model from word2vec +// */ + +// // private loadModel(): Promise { +// // let self = this; +// // return new Promise(res => { +// // w2v.loadModel("./node_modules/word2vec/examples/fixtures/vectors.txt", function (err: any, model: any) { +// // self.choice = "WV"; +// // self._model = model; +// // self.dimension = model.size; +// // res(model); +// // }); +// // }); +// // } + +// /*** +// * Testing +// */ + +// public async testModel() { +// if (!this._model) { +// await this.loadTFModel(); +// } +// if (this._model) { +// if (this.choice === "WV") { +// let similarity = this._model.similarity('father', 'mother'); +// console.log(similarity); +// } +// else if (this.choice === "TF") { +// const model = this._model as use.UniversalSentenceEncoder; +// // Embed an array of sentences. +// const sentences = [ +// 'Hello.', +// 'How are you?' +// ]; +// const embeddings = await this.vectorize(sentences); +// if (embeddings) embeddings.print(true /*verbose*/); +// // model.embed(sentences).then(embeddings => { +// // // `embeddings` is a 2D tensor consisting of the 512-dimensional embeddings for each sentence. +// // // So in this example `embeddings` has the shape [2, 512]. +// // embeddings.print(true /* verbose */); +// // }); +// } +// } +// else { +// console.log("model not found :("); +// } +// } + +// /*** +// * Uses model to convert words to vectors +// */ + +// public async vectorize(text: string[]): Promise { +// if (!this._model) { +// await this.loadTFModel(); +// } +// if (this._model) { +// if (this.choice === "WV") { +// let word_vecs = this._model.getVectors(text); +// return word_vecs; +// } +// else if (this.choice === "TF") { +// const model = this._model as use.UniversalSentenceEncoder; +// return new Promise(res => { +// model.embed(text).then(embeddings => { +// res(embeddings); +// }); +// }); + +// } +// } +// } + +// // public async trainModel() { +// // console.log("phrasing..."); +// // w2v.word2vec("./node_modules/word2vec/examples/eng_news-typical_2016_1M-sentences.txt", './node_modules/word2vec/examples/my_phrases.txt', { +// // cbow: 1, +// // size: 200, +// // window: 8, +// // negative: 25, +// // hs: 0, +// // sample: 1e-4, +// // threads: 20, +// // iter: 200, +// // minCount: 2 +// // }); +// // console.log("phrased!!!"); +// // } + +// } -- cgit v1.2.3-70-g09d2 From e9a16afa46af3ecec0bd7b58f9ca13b85d62a860 Mon Sep 17 00:00:00 2001 From: Bob Zeleznik Date: Tue, 17 Mar 2020 01:12:21 -0400 Subject: fixed up route handling for relative paths. changed search to dashsearch for server request. --- src/client/util/SearchUtil.ts | 6 +++--- src/server/ApiManagers/SearchManager.ts | 2 +- src/server/RouteManager.ts | 36 ++++++++++++++++++++++----------- src/server/index.ts | 3 ++- 4 files changed, 30 insertions(+), 17 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/client/util/SearchUtil.ts b/src/client/util/SearchUtil.ts index 2d9c807dd..b597f1e07 100644 --- a/src/client/util/SearchUtil.ts +++ b/src/client/util/SearchUtil.ts @@ -34,7 +34,7 @@ export namespace SearchUtil { export function Search(query: string, returnDocs: false, options?: SearchParams): Promise; export async function Search(query: string, returnDocs: boolean, options: SearchParams = {}) { query = query || "*"; //If we just have a filter query, search for * as the query - const rpquery = Utils.prepend("/search"); + const rpquery = Utils.prepend("/dashsearch"); const gotten = await rp.get(rpquery, { qs: { ...options, q: query } }); const result: IdSearchResult = gotten.startsWith("<") ? { ids: [], docs: [], numFound: 0, lines: [] } : JSON.parse(gotten); if (!returnDocs) { @@ -52,7 +52,7 @@ export namespace SearchUtil { const newLines: string[][] = []; await Promise.all(fileids.map(async (tr: string, i: number) => { const docQuery = "fileUpload_t:" + tr.substr(0, 7); //If we just have a filter query, search for * as the query - const docResult = JSON.parse(await rp.get(Utils.prepend("/search"), { qs: { ...options, q: docQuery } })); + const docResult = JSON.parse(await rp.get(Utils.prepend("/dashsearch"), { qs: { ...options, q: docQuery } })); newIds.push(...docResult.ids); newLines.push(...docResult.ids.map((dr: any) => txtresult.lines[i])); })); @@ -121,7 +121,7 @@ export namespace SearchUtil { export async function GetAllDocs() { const query = "*"; - const response = await rp.get(Utils.prepend('/search'), { + const response = await rp.get(Utils.prepend('/dashsearch'), { qs: { start: 0, rows: 10000, q: query }, diff --git a/src/server/ApiManagers/SearchManager.ts b/src/server/ApiManagers/SearchManager.ts index be17c3105..5f7d1cf6d 100644 --- a/src/server/ApiManagers/SearchManager.ts +++ b/src/server/ApiManagers/SearchManager.ts @@ -61,7 +61,7 @@ export class SearchManager extends ApiManager { register({ method: Method.GET, - subscription: "/search", + subscription: "/dashsearch", secureHandler: async ({ req, res }) => { const solrQuery: any = {}; ["q", "fq", "start", "rows", "hl", "hl.fl"].forEach(key => solrQuery[key] = req.query[key]); diff --git a/src/server/RouteManager.ts b/src/server/RouteManager.ts index d8265582e..c88f3bb51 100644 --- a/src/server/RouteManager.ts +++ b/src/server/RouteManager.ts @@ -79,6 +79,7 @@ export default class RouteManager { } } + static routes: string[] = []; /** * * @param initializer @@ -86,11 +87,18 @@ export default class RouteManager { addSupervisedRoute = (initializer: RouteInitializer): void => { const { method, subscription, secureHandler, publicHandler, errorHandler } = initializer; + typeof (initializer.subscription) === "string" && RouteManager.routes.push(initializer.subscription); + initializer.subscription instanceof RouteSubscriber && RouteManager.routes.push(initializer.subscription.root); + initializer.subscription instanceof Array && initializer.subscription.map(sub => { + typeof (sub) === "string" && RouteManager.routes.push(sub); + sub instanceof RouteSubscriber && RouteManager.routes.push(sub.root); + }); const isRelease = this._isRelease; let redirected = ""; const supervised = async (req: Request, res: Response) => { let { user } = req; const { originalUrl: target } = req; + console.log("TARGET: " + target); if (process.env.DB === "MEM" && !user) { user = { id: "guest", email: "", userDocumentId: "guestDocId" }; } @@ -126,26 +134,30 @@ export default class RouteManager { const original = url.replace(start, ""); const theurl = original.match(/http[s]?:\/\/[^\/]*/)![0]; const newdirect = start + encodeURIComponent(theurl + target); - if (newdirect !== redirected) { - redirected = newdirect; - console.log("redirect relative path: " + (theurl + target)); - res.redirect(redirected); - } + console.log("REDIRECT: " + (theurl + target)); + res.redirect(newdirect); } - else { - if (target.startsWith("/doc/")) { - !res.headersSent && setTimeout(() => { + else if (!res.headersSent) { + const which2 = RouteManager.routes.findIndex(r => (r !== "/" || r === target) && target.startsWith(r)); + const which = Array.from(registered.keys()).findIndex(r => (r !== "/" || r === target) && target.startsWith(r)); + console.log("WHICH = " + (which === -1 ? "" : Array.from(registered.keys())[which])); + if (which !== -1) { + setTimeout(() => { + console.log("handled:" + target); if (!res.headersSent) { - res.redirect("/login"); console.log(red(`Initiating fallback for ${target}. Please remove dangling promise from route handler`)); const warning = `request to ${target} fell through - this is a fallback response`; res.send({ warning }); } }, 1000); - } else { - const warning = `request to ${target} fell through - this is a fallback response`; - res.send({ warning }); } + else { + console.log("unhandled:" + target); + res.end(); + } + } else { + console.log("pre-sent:" + target); + res.end(); } }; const subscribe = (subscriber: RouteSubscriber | string) => { diff --git a/src/server/index.ts b/src/server/index.ts index c4c05157a..def36e922 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -117,7 +117,8 @@ function routeSetter({ isRelease, addSupervisedRoute, logRegistrationOutcome }: addSupervisedRoute({ method: Method.GET, subscription: "/*", - secureHandler: ({ res }) => { } + secureHandler: ({ res }) => { + } }); logRegistrationOutcome(); -- cgit v1.2.3-70-g09d2 From b0e121a9d767ca30e5b6732e3aeabbda0e0a7e97 Mon Sep 17 00:00:00 2001 From: Sam Wilkins Date: Wed, 18 Mar 2020 17:48:02 -0700 Subject: finalized cleanup --- src/server/ApiManagers/UploadManager.ts | 1 - src/server/DashUploadUtils.ts | 21 +++++++++++++-------- src/server/SharedMediaTypes.ts | 2 +- src/server/index.ts | 2 ++ 4 files changed, 16 insertions(+), 10 deletions(-) (limited to 'src/server/ApiManagers') diff --git a/src/server/ApiManagers/UploadManager.ts b/src/server/ApiManagers/UploadManager.ts index 42e33ece0..98f029c7d 100644 --- a/src/server/ApiManagers/UploadManager.ts +++ b/src/server/ApiManagers/UploadManager.ts @@ -43,7 +43,6 @@ export default class UploadManager extends ApiManager { method: Method.POST, subscription: "/uploadFormData", secureHandler: async ({ req, res }) => { - console.log("/upload register"); const form = new formidable.IncomingForm(); form.uploadDir = pathToDirectory(Directory.parsed_files); form.keepExtensions = true; diff --git a/src/server/DashUploadUtils.ts b/src/server/DashUploadUtils.ts index dd99ff746..2af816df8 100644 --- a/src/server/DashUploadUtils.ts +++ b/src/server/DashUploadUtils.ts @@ -60,7 +60,7 @@ export namespace DashUploadUtils { const types = type.split("/"); const category = types[0]; - const format = `.${types[1]}`; + let format = `.${types[1]}`; switch (category) { case "image": @@ -77,8 +77,12 @@ export namespace DashUploadUtils { return UploadPdf(file); } case "audio": + const components = format.split(";"); + if (components.length > 1) { + format = components[0]; + } if (audioFormats.includes(format)) { - return MoveParsedFile(file, Directory.audio); + return UploadAudio(file, format); } } @@ -86,12 +90,6 @@ export namespace DashUploadUtils { return { source: file, result: new Error(`Could not upload unsupported file (${name}) with upload type (${type}).`) }; } - async function UploadAudio(file: File) { - const { path: sourcePath } = file; - - return MoveParsedFile(file, Directory.audio); - } - async function UploadPdf(file: File) { const { path: sourcePath } = file; const dataBuffer = readFileSync(sourcePath); @@ -105,6 +103,13 @@ export namespace DashUploadUtils { return MoveParsedFile(file, Directory.pdfs); } + const manualSuffixes = [".webm"]; + + async function UploadAudio(file: File, format: string) { + const suffix = manualSuffixes.includes(format) ? format : undefined; + return MoveParsedFile(file, Directory.audio, suffix); + } + /** * Uploads an image specified by the @param source to Dash's /public/files/ * directory, and returns information generated during that upload diff --git a/src/server/SharedMediaTypes.ts b/src/server/SharedMediaTypes.ts index 3d3683912..2495123b7 100644 --- a/src/server/SharedMediaTypes.ts +++ b/src/server/SharedMediaTypes.ts @@ -10,7 +10,7 @@ export namespace AcceptibleMedia { export const imageFormats = [...pngs, ...jpgs, ...gifs, ...webps, ...tiffs]; export const videoFormats = [".mov", ".mp4"]; export const applicationFormats = [".pdf"]; - export const audioFormats = [".wav", ".mp3", ".flac", ".au", ".aiff", ".m4a", ".webm;codecs=opus"]; + export const audioFormats = [".wav", ".mp3", ".flac", ".au", ".aiff", ".m4a", ".webm"]; } export namespace Upload { diff --git a/src/server/index.ts b/src/server/index.ts index 10205314a..f4446352f 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -25,6 +25,7 @@ import { yellow } from "colors"; import { DashSessionAgent } from "./DashSession/DashSessionAgent"; import SessionManager from "./ApiManagers/SessionManager"; import { AppliedSessionAgent } from "./DashSession/Session/agents/applied_session_agent"; +import { Utils } from "../Utils"; export const onWindows = process.platform === "win32"; export let sessionAgent: AppliedSessionAgent; @@ -37,6 +38,7 @@ export const filesDirectory = path.resolve(publicDirectory, "files"); * before clients can access the server should be run or awaited here. */ async function preliminaryFunctions() { + // Utils.TraceConsoleLog(); await Logger.initialize(); await GoogleCredentialsLoader.loadCredentials(); GoogleApiServerUtils.processProjectCredentials(); -- cgit v1.2.3-70-g09d2