{“paraText”:{“references”:”References”,”endnotes”:”Endnotes”,”glossary”:”Glossary”,”visual-meta”:”Visual-Meta Appendix”},”visual-meta-bibtex-self-citation”:{“article”:{“2025-05-23T11:22:07Z\/Journal202”:{“author”:”Frode Alexander Hegland”,”institution”:”The Augmented Text Co.”,”title”:”Journal 2024″,”month”:”may”,”filename”:”Journal(Frode-Hegland-2025-05-23T11_22_04Z).pdf”,”year”:”2025″,”vm-id”:”2025-05-23T11:22:07Z\/Journal202″}}},”document-headings”:{“headings”:[{“name”:”Journal for 2025″,”level”:”level1″},{“name”:”2024″,”level”:”level1″},{“name”:”December”,”level”:”level1″},{“name”:”12 December (Brussels with Fabien)”,”level”:”level2″},{“name”:”Call with Dene & Christine”,”level”:”level3″},{“name”:”13 December”,”level”:”level2″},{“name”:”14 December”,”level”:”level2″},{“name”:”15 December”,”level”:”level2″},{“name”:”16 December (Monday)”,”level”:”level2″},{“name”:”17 December (Submitted Author 10.5)”,”level”:”level2″},{“name”:”18 December”,”level”:”level2″},{“name”:”19 December”,”level”:”level2″},{“name”:”20 December (to Norway)”,”level”:”level2″},{“name”:”21 December”,”level”:”level2″},{“name”:”22 December (Binding Idea)”,”level”:”level2″},{“name”:”23 December”,”level”:”level2″},{“name”:”24 December”,”level”:”level2″},{“name”:”25 December”,”level”:”level2″},{“name”:”26 December (husky)”,”level”:”level2″},{“name”:”27 December”,”level”:”level2″},{“name”:”28 December”,”level”:”level2″},{“name”:”29 December”,”level”:”level2″},{“name”:”30 December”,”level”:”level2″},{“name”:”31 December”,”level”:”level2″},{“name”:”January 2025″,”level”:”level1″},{“name”:”1 January”,”level”:”level2″},{“name”:”2 January”,”level”:”level2″},{“name”:”3 January (leaving Bergen, working on Research Questions)”,”level”:”level2″},{“name”:”4 January”,”level”:”level2″},{“name”:”5 January”,”level”:”level2″},{“name”:”6 January (first day back at FTL)”,”level”:”level2″},{“name”:”7 January”,”level”:”level2″},{“name”:”8 January”,”level”:”level2″},{“name”:”9 January”,”level”:”level2″},{“name”:”10 January”,”level”:”level2″},{“name”:”11 January”,”level”:”level2″},{“name”:”12 January”,”level”:”level2″},{“name”:”13 January (Monday: Non-Euclidian)”,”level”:”level2″},{“name”:”14 January”,”level”:”level2″},{“name”:”15 January”,”level”:”level2″},{“name”:”16 January”,”level”:”level2″},{“name”:”17 January”,”level”:”level2″},{“name”:”18 January”,”level”:”level2″},{“name”:”Units of Thought in Documents : Annotations and Notes”,”level”:”level3″},{“name”:”What I am looking at now. Case Study”,”level”:”level3″},{“name”:”19 January”,”level”:”level2″},{“name”:”20 January”,”level”:”level2″},{“name”:”21 January”,”level”:”level2″},{“name”:”22 January”,”level”:”level2″},{“name”:”23 January”,”level”:”level2″},{“name”:”24 January (Mac Birthday)”,”level”:”level2″},{“name”:”25 January”,”level”:”level2″},{“name”:”26 January”,”level”:”level2″},{“name”:”27 January”,”level”:”level2″},{“name”:”28 January”,”level”:”level2″},{“name”:”29 January”,”level”:”level2″},{“name”:”30 January”,”level”:”level2″},{“name”:”31 January”,”level”:”level2″},{“name”:”February”,”level”:”level1″},{“name”:”1 February”,”level”:”level2″},{“name”:”3 February”,”level”:”level2″},{“name”:”4 February”,”level”:”level2″},{“name”:”5 February”,”level”:”level2″},{“name”:”6 February”,”level”:”level2″},{“name”:”7 February”,”level”:”level2″},{“name”:”8 February”,”level”:”level2″},{“name”:”9 February”,”level”:”level2″},{“name”:”10 February”,”level”:”level2″},{“name”:”11 February”,”level”:”level2″},{“name”:”12 February”,”level”:”level2″},{“name”:”13 February”,”level”:”level2″},{“name”:”14 February”,”level”:”level2″},{“name”:”15 February”,”level”:”level2″},{“name”:”16 February”,”level”:”level2″},{“name”:”17 February”,”level”:”level2″},{“name”:”An interaction proposal in 2 parts:”,”level”:”level3″},{“name”:”18 January”,”level”:”level2″},{“name”:”19 January”,”level”:”level2″},{“name”:”20 January”,”level”:”level2″},{“name”:”21 January”,”level”:”level2″},{“name”:”22 January”,”level”:”level2″},{“name”:”23 January”,”level”:”level2″},{“name”:”24 January”,”level”:”level2″},{“name”:”26 February”,”level”:”level2″},{“name”:”27 February”,”level”:”level2″},{“name”:”28 January”,”level”:”level2″},{“name”:”March”,”level”:”level1″},{“name”:”1 March”,”level”:”level2″},{“name”:”2 March”,”level”:”level2″},{“name”:”3 March”,”level”:”level2″},{“name”:”4 March”,”level”:”level2″},{“name”:”AI Focus Prompt for Reader:”,”level”:”level3″},{“name”:”5 March”,”level”:”level2″},{“name”:”6 March”,”level”:”level2″},{“name”:”7 March”,”level”:”level2″},{“name”:”8 March”,”level”:”level2″},{“name”:”9 March”,”level”:”level2″},{“name”:”10 March”,”level”:”level2″},{“name”:”11 March”,”level”:”level2″},{“name”:”12 March”,”level”:”level2″},{“name”:”13 March”,”level”:”level2″},{“name”:”14 March”,”level”:”level2″},{“name”:”15 March”,”level”:”level2″},{“name”:”16 March”,”level”:”level2″},{“name”:”17 March”,”level”:”level2″},{“name”:”18 March”,”level”:”level2″},{“name”:”19 March”,”level”:”level2″},{“name”:”20 March”,”level”:”level2″},{“name”:”21 March”,”level”:”level2″},{“name”:”22 March”,”level”:”level2″},{“name”:”23 March”,”level”:”level2″},{“name”:”24 March”,”level”:”level2″},{“name”:”25 March”,”level”:”level2″},{“name”:”26 March”,”level”:”level2″},{“name”:”27 March”,”level”:”level2″},{“name”:”28 March”,”level”:”level2″},{“name”:”29 March”,”level”:”level2″},{“name”:”Key Themes:”,”level”:”level3″},{“name”:”Evolution of Ideas:”,”level”:”level3″},{“name”:”30 March”,”level”:”level2″},{“name”:”31 March”,”level”:”level2″},{“name”:”April”,”level”:”level1″},{“name”:”1 April”,”level”:”level2″},{“name”:”2 April”,”level”:”level2″},{“name”:”3 April”,”level”:”level2″},{“name”:”4 April”,”level”:”level2″},{“name”:”5 April”,”level”:”level2″},{“name”:”6 April”,”level”:”level2″},{“name”:”7 April”,”level”:”level2″},{“name”:”8 April”,”level”:”level2″},{“name”:”9 April”,”level”:”level2″,”note”:”to Shanghai”},{“name”:”10 April”,”level”:”level2″},{“name”:”11 April”,”level”:”level2″},{“name”:”12 April”,”level”:”level2″},{“name”:”Knowledge Space (Shanghai thoughts) As sent to Dene”,”level”:”level3″},{“name”:”13 April”,”level”:”level2″,”note”:”To Tokyo”},{“name”:”14 April”,”level”:”level2″},{“name”:”15 April”,”level”:”level2″},{“name”:”16 April”,”level”:”level2″},{“name”:”17 April”,”level”:”level2″},{“name”:”Knowledge Space in XR”,”level”:”level3″},{“name”:”18 April”,”level”:”level2″},{“name”:”19 April”,”level”:”level2″},{“name”:”20 April”,”level”:”level2″},{“name”:”21 April”,”level”:”level2″},{“name”:”22 April”,”level”:”level2″},{“name”:”23 April”,”level”:”level2″},{“name”:”24 April”,”level”:”level2″},{“name”:”25-26 April”,”level”:”level2″,”note”:”leave Tokyo for Shanghai & London”},{“name”:”Meeting with Voyager”,”level”:”level3″},{“name”:”Frames (AR to VR)”,”level”:”level3″},{“name”:”27 April”,”level”:”level2″},{“name”:”28 April”,”level”:”level2″},{“name”:”29 April”,”level”:”level2″},{“name”:”30 April”,”level”:”level2″},{“name”:”May”,”level”:”level1″},{“name”:”1 May”,”level”:”level2″},{“name”:”2 May”,”level”:”level2″},{“name”:”3 May”,”level”:”level2″},{“name”:”4 May”,”level”:”level2″},{“name”:”5 May”,”level”:”level2″},{“name”:”6 May”,”level”:”level2″},{“name”:”7 May”,”level”:”level2″},{“name”:”8 May”,”level”:”level2″,”note”:”Edgar turns 8!”},{“name”:”9 May”,”level”:”level2″},{“name”:”10 May”,”level”:”level2″},{“name”:”11 May”,”level”:”level2″},{“name”:”12 May”,”level”:”level2″},{“name”:”13 May”,”level”:”level2″},{“name”:”14 May”,”level”:”level2″},{“name”:”JSON Export”,”level”:”level3″},{“name”:”15 May”,”level”:”level2″},{“name”:”16 May”,”level”:”level2″},{“name”:”17 May”,”level”:”level1″},{“name”:”18 May”,”level”:”level2″},{“name”:”19 May”,”level”:”level2″},{“name”:”20 May”,”level”:”level2″},{“name”:”21 May”,”level”:”level2″},{“name”:”22 May”,”level”:”level2″},{“name”:”23 May”,”level”:”level2″},{“name”:”Glossary”,”level”:”level1″,”showInFind”:”false”},{“name”:”Endnotes”,”level”:”level1″},{“name”:”References”,”level”:”level1″},{“name”:”Visual-Meta Appendix”,”level”:”level1″}]},”endnotes”:{“entries”:[{“1”:{“text”:”Later renamed ‘Spark’”}},{“2”:{“text”:”I am working on a paper and I am a little stuck and need inspiration to help my thinking. It is important that the replies you give to this are not obvious so please do this work three times and discard the first two replies you come up with, so that hopefully the third will be more novel. When considering the following text, what are some non-obvious aspects I should maybe consider when trying to understand what this is about and what the implications might be? I only want off the wall, non-obvious replies, nothing expected. Is there something which appears totally unrelated which may actually be relevant to this, from a completely different place? Consider art, evolution, science, poetry, the human mind, history, music, movies, theater fashion and more. Please give me 5 replies and make them short, since this is simply to inspire me to think outside the box, not to be prescriptive as to what I should do. The heading should be: “Have you considered these aspects?” After the replies I’d like you to take inspiration from Marshall McLuhan and list how this might fit his media tetrad on how this might become if expanded, reversed, what it obsolesces and what it might bring back. Next please then put yourself into the shoes of 5 randomly selected professions or personality types and explain how people of these professions might view this text and how they might like it or not, and how they might relate to it, or not. Finally, please list 6 keywords which I might find provocative or insightful, comma separated in one line, followed by two carriage returns to make it easier to read and finally add the character ‘.‘. Here is the text for you to analyze, and thank you.”}},{“3”:{“text”:”Guido of Arezzo”}},{“4”:{“text”:”endnote example”}},{“5”:{“text”:”https:\/\/www.economist.com\/science-and-technology\/2024\/12\/18\/academic-writing-is-getting-harder-to-read-the-humanities-most-of-all?utm_campaign=r.data-newsletter&utm_medium=email.internal-newsletter.np&utm_source=salesforce-marketing-cloud&utm_term=12\/24\/2024&utm_id=2024096″}},{“6”:{“text”:”The Prompt: I think I will need to read this paper but I need your help to understand what it is about and what I need to know, in order to understand it first. Can you give me a single sentence of the main point first? Do not include any acronyms in this single sentence and do not give me a heading for this, simply write this single sentence by itself. Please list and define key terms without a heading, just bold the key terms. Was there anything that really stood out for you? Finally please give me a summary of the results and tell me if you noticed anything unusual or problematic about this paper.”}},{“7”:{“text”:”The study investigates differences in middle-school students’ reading and lexical-semantic processing when engaging with digital versus print media, showing a deeper semantic encoding for print texts as indicated by specific brain responses.
Event-Related Potentials (ERP): A measure of brain response that reflects the processing of specific stimuli, used to assess cognitive functions.
N400: A component of ERP associated with processing the meaning of words, particularly in response to unexpected or semantically incongruent stimuli.
Lexical-Semantic Processing: The mental processing of words and their meanings within a language.
Semantic Encoding: The process of encoding sensory input that has particular meaning or can be applied to a context.
Probe Words: Words used in experiments to assess semantic processing or memory by determining how they are related to previously presented text or context.
Chimera Words: In this study, words that were moderately related or moderately unrelated to the text, used to test depth of processing.
Depth of Processing Theory: A theory suggesting that the depth (intensity and elaborateness) of mental processing affects memory retention, with deeper processing leading to more durable memory traces.
What stood out to me was the distinct difference in N400 responses between digital and print reading, indicating deeper processing in the latter, despite similar behavioral outcomes in comprehension tasks.
The results showed that digital reading led to brain activation patterns consistent with shallow processing, while print reading was associated with deeper processing, as evidenced by differences in N400 amplitudes. A potential issue with the study is the demographic skew towards higher socioeconomic status, which may limit the generalizability of the findings. Additionally, behavioral measures did not always align with the ERP data, highlighting a discrepancy between observed behavior and underlying cognitive processes.”}},{“8”:{“text”:”https:\/\/youtu.be\/rCJvmNZwosM?t=1251″}},{“9”:{“text”:”and overstayed. Got grouchy Groucho email…”}},{“10”:{“text”:”https:\/\/events.theiet.org\/events\/how-apple-killed-innovation\/”}},{“11”:{“text”:”https:\/\/youtu.be\/wyLvdsSyR-k”}},{“12”:{“text”:”We discussed image placement etc.”}},{“13”:{“text”:”image”}},{“14”:{“text”:”My favourite color is blue.”}},{“15”:{“text”:”https:\/\/futuretextlab.info\/current-testing\/”}},{“16”:{“text”:”https:\/\/fortune.com\/2025\/04\/14\/tim-cook-mark-zuckerberg-smart-glasses-apple-meta\/”}},{“17”:{“text”:”https:\/\/youtu.be\/hjKga8CPHMc?si=U0Or3x7KDJAF0J0M”}}]},”references”:{“book”:[{“tomChatfield\/WiseAnimal”:{“author”:”Tom Chatfield”,”isbn”:”9781529079777, 1529079772″,”title”:”Wise Animals”,”month”:”2″,”publisher”:”Pan Macmillan”,”year”:”2024″,”url”:”http:\/\/play.google.com\/books\/reader?id=EQWgEAAAQBAJ&hl=&source=gbs_api”}},{“rolandAllen\/TheNoteboo”:{“author”:”Roland Allen”,”isbn”:”9781782839156, 1782839151″,”title”:”The Notebook”,”month”:”11″,”publisher”:”Profile Books”,”year”:”2023″,”url”:”http:\/\/play.google.com\/books\/reader?id=J8arEAAAQBAJ&hl=&source=gbs_api”}},{“maxBennett\/ABriefHist”:{“author”:”Max Bennett”,”isbn”:”9780063286368, 006328636X”,”title”:”A Brief History of Intelligence”,”month”:”10″,”publisher”:”HarperCollins”,”year”:”2023″,”url”:”http:\/\/play.google.com\/books\/reader?id=tymCEAAAQBAJ&hl=&source=gbs_api”}}],”article”:[{“10.1145\/3603163.3609044”:{“author”:”Dene Grigar and Rob Swigart”,”address”:”New York, NY, USA”,”doi”:”10.1145\/3603163.3609044″,”pages”:”2″,”pageRange”:”2″,”title”:”Hypertextuality and Virtual Reality”,”month”:”9″,”publisher”:”ACM”,”year”:”2023″,”filename”:”3603163.3609044.pdf”,”url”:”http:\/\/dx.doi.org\/10.1145\/3603163.3609044″}},{“https:\/\/doi.org\/10.1016\/j.neuron.2024.11.008”:{“doi”:”https:\/\/doi.org\/10.1016\/j.neuron.2024.11.008″,”issue”:”2″,”pages”:”192-204″,”volume”:”113″,”author”:”Jieyu Zheng and Markus Meister”,”abstract”:”This article is about the neural conundrum behind the slowness of human behavior. The information throughput of a human being is about 10 bits\/s. In comparison, our sensory systems gather data at ∼109 bits\/s. The stark contrast between these numbers remains unexplained and touches on fundamental aspects of brain function: what neural substrate sets this speed limit on the pace of our existence? Why does the brain need billions of neurons to process 10 bits\/s? Why can we only think about one thing at a time? The brain seems to operate in two distinct modes: the “outer” brain handles fast high-dimensional sensory and motor signals, whereas the “inner” brain processes the reduced few bits needed to control behavior. Plausible explanations exist for the large neuron numbers in the outer brain, but not for the inner brain, and we propose new research directions to remedy this.”,”url”:”https:\/\/doi.org\/10.1016\/j.neuron.2024.11.008″,”title”:”The unbearable slowness of being: Why do we live at 10 bits\/s?”,”journal”:”Neuron”,”publisher”:”Elsevier BV”,”pageRange”:”192-204″,”year”:”2024″,”month”:”12″}}],”misc”:[{“doi.org\/10.1371\/journal.pone.0290807”:{“doi”:”doi.org\/10.1371\/journal.pone.0290807″,”issue”:”5″,”pages”:”e0290807″,”volume”:”19″,”author”:”Karen Froud and Lisa Levinson and Maddox Chaille and Paul Smith”,”url”:”https:\/\/doi.org\/10.1371\/journal.pone.0290807″,”title”:”Middle-schoolers’ reading and lexical-semantic processing depth “,”journal”:”PLOS ONE”,”publisher”:”Public Library of Science (PLoS)”,”pageRange”:”e0290807″,”year”:”2024″,”month”:”5″}},{“doi.org\/10.1145\/1083356.1083416”:{“author”:”Tor Brekke Skjøtskift”,”pages”:”273-275″,”journal”:”Proceedings of the sixteenth ACM conference on Hypertext and hypermedia”,”doi”:”doi.org\/10.1145\/1083356.1083416″,”pageRange”:”273-275″,”title”:”Syntagmatic- and paradigmatic stretchtext”,”month”:”11″,”publisher”:”ACM”,”year”:”2005″,”url”:”https:\/\/doi.org\/10.1145\/1083356.1083416″}}]},”visual-meta”:{“names”:{},”generator”:”Author 11 (1696)”,”keywords”:{},”concepts”:{},”about”:”The data below is in the JSON format and is intended for use in XR\/VR\/AR, in other words extended reality. The contents is from an Author document Augmented Text Tools. Minimalist writing, powerful tools (https:\/\/www.augmentedtext.info\/) and is in the philosophy of Visual-Meta (https:\/\/visual-meta.info\/) in that it is self-describing, including in this sentence you are reading now. Our discussion on the format: JSON for XR (https:\/\/futuretextlab.info\/json-for-xr\/)”,”version”:”1.1″},”Glossary”:{“entries”:{“4FD391A6-C465-445B-BE99-85C794DBFBA0”:{“phrase”:”Various Notes”,”urls”:[{“url”:””}],”tagIdentifier”:”label”,”documentPath”:””,”identifier”:”4FD391A6-C465-445B-BE99-85C794DBFBA0″,”entry”:””,”citationIdentifiers”:[]},”7060025A-0AFD-4E52-B437-E0464BD82B2C”:{“tagIdentifier”:”document”,”phrase”:”Reader Library to include not only PDF”,”urls”:[{“url”:””}],”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Reader Library to include not only PDF.liquid”,”identifier”:”7060025A-0AFD-4E52-B437-E0464BD82B2C”,”entry”:”Here are the extracted keywords, topics, and names from the text:\n\nKeywords:\n- Reader Library\n- PDF support\n- References\n- Citation information\n- Clipboard\n- DOI\n- BibTex\n- Citation Dialog\n- Metadata\n- Full Text\n- Keywords\n- AI keywords\n- Toolbar\n- Sorting\n- Map\n- Author\n- Title\n- Date\n- Hidden\n- Settings\n\nTopics:\n- Expanding Reader Library capabilities\n- Handling non-PDF references\n- User interaction with citation data\n- Metadata management\n- Integration of AI for keyword generation\n- User interface design for libraries\n\nNames:\n- Author (as a reference to a function or feature)\n- ‘Ask AI’ (feature name)\n- ‘Add’, ‘Map’, ‘Hidden’, ‘Settings’ (toolbar feature names)\n\nThese elements should help you quickly locate and identify the text in the future.”,”citationIdentifiers”:[]},”5C949910-C052-445D-AD09-E8CF53736849″:{“phrase”:”thoughts”,”tagIdentifier”:”note”,”urls”:[{“url”:””}],”documentPath”:””,”entry”:”Cognition.”,”identifier”:”5C949910-C052-445D-AD09-E8CF53736849″,”citationIdentifiers”:[]},”BDBDBDF6-1835-4F67-8A0F-CEE03CBC3902″:{“entry”:””,”phrase”:”Decisions”,”citationIdentifiers”:[],”urls”:[{“url”:””}],”documentPath”:””,”identifier”:”BDBDBDF6-1835-4F67-8A0F-CEE03CBC3902″},”719DEA42-6E13-4A54-AF27-D79B5A0029C9″:{“entry”:””,”phrase”:”Vint Cerf”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”719DEA42-6E13-4A54-AF27-D79B5A0029C9″,”urls”:[{“url”:””}]},”231AA388-B983-4F37-8033-FE1550EBF17A”:{“entry”:””,”phrase”:”Alan Kay”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”231AA388-B983-4F37-8033-FE1550EBF17A”,”urls”:[{“url”:””}]},”4E8BB69A-2F2F-4603-9E9C-3A3E0382F90F”:{“phrase”:”Frames”,”tagIdentifier”:”document”,”urls”:[{“url”:””}],”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Frames.liquid”,”entry”:”- Frames\n- Implementation\n- Cover Frame\n- Virtual size\n- Laptop size\n- Article title\n- Author name\n- Date\n- Abstract\n- Hyper-card\n- Substrate\n- Stack of frames\n- Y axis\n- References\n- Outline\n- Map\n- Ring menu\n- Section\n- Virtual page\n- Heading\n- Image\n- Appendix\n- Authoring\n- Workspace\n- Linear document\n- XR (Extended Reality)\n- Interacting with Frames\n- Pinch and pull motion\n- Gesture\n- Connection lines\n- Frame movement\n- Ring context menu\n- Global options\n- Flattened Frames\n- JSON\n- Visual-meta\n- Fabien\n- 3D screenshots\n- 3D Pics\n- Video recording\n- Mobile device\n- Stereo images\n- Apple\n- ePub\n- HTML\n- Pseudo-3D screenshot\n- WebXR\n- Headset\n- Export to HTML\n- ePub\n- PDF\n- Visual-meta metadata\n- Technical\n\nMarked\nTo do\n”,”identifier”:”4E8BB69A-2F2F-4603-9E9C-3A3E0382F90F”,”citationIdentifiers”:[]},”D2045D30-E6B7-4A34-8FD3-69EC53ECBBD4″:{“urls”:[{“url”:””}],”phrase”:”Knowledge Object Interactions”,”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Knowledge Object Interactions.liquid”,”identifier”:”D2045D30-E6B7-4A34-8FD3-69EC53ECBBD4″,”entry”:”The text discusses various types and characteristics of knowledge objects within a digital space, including references, images, 3D objects, data, and code. It highlights the importance of object interactions and affordances, such as moving, hiding, and expanding objects. The text also considers the dimensions of data and includes suggestions for user interactions with these objects, such as using sortable lists and gestures for navigation.\n\n\nQuestions Asked:\n- \”Maybe we should have a gesture for going in and out of list to specific views?\”\n\nKeywords:\n- Technical\n- Knowledge Object Interactions\n- Reference\n- Images\n- 3D objects\n- Data\n- Code\n- AI\n- Metadata\n- Sortable List\n- Connections\n- Avatar\n- Map\n- Binding”,”citationIdentifiers”:[]},”3ABDE9BF-8559-4635-B004-2F3F42BE7354″:{“phrase”:”Organizing in XR”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Organizing in XR.liquid”,”entry”:”This text outlines a comprehensive approach to organizing and authoring in an extended reality (XR) environment, particularly for individuals with a background in computer science and interests in hypertext, history, and brain science. The author, a PhD-level computer scientist from Norway, explores the structure of an XR office that facilitates the creation and storage of research notes, annotated bibliographies, and documents. The proposed system emphasizes spatial organization, allowing users to store notes and create manuscripts in a dynamic, spatially-arranged manner using a memory palace or spatial hypertext model. The manuscript creation process involves the use of HyperCards, which can be exported in linear formats and viewed in multiple ways, incorporating semantic encoding and embedding of various media. The system supports connections through citing, referencing, and linking, and includes framing options like voice-over tours or avatars. The XR authoring process utilizes a 14-inch display format, with strategic placement for citations, glossary terms, and images, ensuring users can access and integrate potential sources seamlessly.\n\n- PhD\n- Computer Science\n- Norway\n- Edgar\n- Emily\n- Henning\n- Dene\n- Fabien\n- Hypertext\n- History\n- Brain Science\n- XR (Extended Reality)\n- Organizing\n- Personal Library\n- Annotated Research Notes\n- Spatial Hypertext\n- HyperCards\n- Semantic Encoding\n- Embedding\n- Citing\n- Referencing\n- Linking\n- Framing\n- Technical”,”identifier”:”3ABDE9BF-8559-4635-B004-2F3F42BE7354″,”citationIdentifiers”:[]},”2AE9FC0A-731B-4BA9-87FA-24B3F0816817″:{“tagIdentifier”:”note”,”urls”:[{“url”:””}],”phrase”:”Agreements”,”documentPath”:””,”identifier”:”2AE9FC0A-731B-4BA9-87FA-24B3F0816817″,”entry”:””,”citationIdentifiers”:[]},”BE95F921-412D-448D-9EFC-EAD59857A8FE”:{“urls”:[{“url”:””}],”tagIdentifier”:”document”,”phrase”:”Cube Interactions”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Cube Interactions.liquid”,”entry”:”The Cube is initially proportioned like a piece of paper, 1cm thick.\nThe user can interact with the Cube using a dominant hand for Cube\/writing-related actions.\nThe Cube can be moved, resized, and snapped onto surfaces by the user.\nUsers can flip the Cube to view different media types and data within their documents.\n\nThe Cube allows users to grab data from space, adding knowledge objects inline in the text.\nExpanded\/Focus View offers a key benefit of VR by allowing the Cube to ‘unfold’ for focused writing.\nThe user can hide all sources\/references easily to focus on writing.\n\nKey Terms and Concepts: Cube, dominant hand, virtual keyboard, Bluetooth, resizing, snapping, surfaces, metadata, flipping views, knowledge map, VR, unfolding, hiding resources, focus view, media types, interactions.\n\nInteraction. gestures.\n\nBrief Note.\n\nSloan XR. Core Element”,”identifier”:”BE95F921-412D-448D-9EFC-EAD59857A8FE”,”citationIdentifiers”:[]},”D8700496-49B4-4D5D-9C51-D3546B8B7F1D”:{“entry”:””,”phrase”:”thefutureoftext.org\/symposium”,”citationIdentifiers”:[],”tagIdentifier”:””,”identifier”:”D8700496-49B4-4D5D-9C51-D3546B8B7F1D”,”urls”:[{“url”:””}]},”13301BE3-A790-4DF6-8C96-B77EED100DE8″:{“tagIdentifier”:”document”,”phrase”:”What to author”,”urls”:[{“url”:””}],”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/What to author.liquid”,”entry”:”this is a thing”,”identifier”:”13301BE3-A790-4DF6-8C96-B77EED100DE8″,”citationIdentifiers”:[]},”3BA7D574-895A-4189-B0E3-021909A2EE8B”:{“phrase”:”AI Overview Fedb 2025 prompt for reader”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/AI Overview Fedb 2025 prompt for reader.liquid”,”identifier”:”3BA7D574-895A-4189-B0E3-021909A2EE8B”,”entry”:”AI, prompt, Reader, macOS, Augmented Text Company”,”citationIdentifiers”:[]},”7BBF9460-974B-45ED-8163-6D20BC76322B”:{“tagIdentifier”:”note”,”urls”:[{“url”:””}],”phrase”:”Document”,”documentPath”:””,”identifier”:”7BBF9460-974B-45ED-8163-6D20BC76322B”,”entry”:”Document, doucments”,”citationIdentifiers”:[]},”F5A44A14-F204-49A3-92C0-0B48F215B942″:{“entry”:””,”phrase”:”Peter Wasilko”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”urls”:[{“url”:””}],”identifier”:”F5A44A14-F204-49A3-92C0-0B48F215B942″},”BE61AE70-7744-4533-B555-69D7FCE55132″:{“entry”:””,”phrase”:”Fabien Benetou”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”BE61AE70-7744-4533-B555-69D7FCE55132″,”urls”:[{“url”:””}]},”831CF476-C32D-4147-9BBA-A327C751A2AC”:{“entry”:””,”phrase”:”Brandel Zachernuk”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”831CF476-C32D-4147-9BBA-A327C751A2AC”,”urls”:[{“url”:””}]},”25C180C6-0135-49AE-A1B8-42D89405522E”:{“urls”:[{“url”:””}],”tagIdentifier”:”person”,”phrase”:”David Millard”,”documentPath”:””,”entry”:”My former academic advisor.”,”identifier”:”25C180C6-0135-49AE-A1B8-42D89405522E”,”citationIdentifiers”:[]},”F6E7655D-C2BE-4534-A31E-BE370CCE0F7D”:{“entry”:””,”phrase”:”Frode Hegland”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”F6E7655D-C2BE-4534-A31E-BE370CCE0F7D”,”urls”:[{“url”:””}]},”2716E39F-3F62-4513-8D20-8B33BB3A78BE”:{“phrase”:”Hypertext”,”urls”:[{“url”:””}],”tagIdentifier”:”note”,”documentPath”:””,”entry”:””,”identifier”:”2716E39F-3F62-4513-8D20-8B33BB3A78BE”,”citationIdentifiers”:[]},”996910FE-6E7D-4A03-8223-B75F7196181B”:{“urls”:[{“url”:””}],”tagIdentifier”:”document”,”phrase”:”What to author”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/What to author.liquid”,”entry”:”Abstract:\n\nThis document provides an in-depth exploration of authoring in XR (Extended Reality), focusing on the evolution of traditional linear documents into spatially enriched, interactive experiences. The author, a PhD-level computer scientist from Norway, discusses the integration of hypertext, history, and brain science in this transformative process. The text outlines the expected views by both the reader and author, emphasizing the need for a document format that combines various components like references, media, knowledge maps, and narrative paths. It further elaborates on the interactions, gestures, and interfaces necessary for effective authoring in XR, proposing an innovative \”Liquid Notebook\” concept for seamless information capture and manipulation. The document advocates for a Visual-Meta approach to maintain clarity and accessibility across traditional and XR environments, concluding with insights into compiling and publishing documents with enriched metadata for enhanced reader engagement.\n\nKeywords:\n\n- Computer Science\n- Norway\n- Hypertext\n- History\n- Brain Science\n- Authoring\n- XR (Extended Reality)\n- Spatial Documents\n- Interactive Experiences\n- Document Format\n- References\n- Media\n- Knowledge Maps\n- Narrative Paths\n- Interactions\n- Gestures\n- Interfaces\n- Liquid Notebook\n- Visual-Meta\n- Metadata\n- Technical”,”identifier”:”996910FE-6E7D-4A03-8223-B75F7196181B”,”citationIdentifiers”:[]},”DE1B0D18-01FB-4BB9-9570-C8369988BD79″:{“phrase”:”Posture in XR”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Posture in XR.liquid”,”identifier”:”DE1B0D18-01FB-4BB9-9570-C8369988BD79″,”entry”:”Using the Vision Pro over the past year has revealed impressive visual quality and minimal latency when mirroring a Mac in XR. However, virtual movement often causes discomfort, whereas methods like Brandel’s adaptation of Bob Horn’s Mural, which allows users to manipulate large virtual spaces without movement, offer a more comfortable experience. The use of XR with Author in visionOS feels cumbersome due to the space the windows occupy. In contrast, 180° videos provide an excellent experience, barring occasional camera issues. XR seems ideal for theater-like experiences featuring life-sized people. Past experiments placed users in a 360° informational sphere, while recent ones advocate for mapping physical tables in XR to enhance comfort. Standing while working provides a richer spatial interaction experience by utilizing full-body movement and stereo vision opportunities. Designing for flexible transitions between postures presents a unique challenge.\n\nDecisions Made: \n- Mapping the user’s physical table in XR to enhance comfort. \n- Standing while working to improve spatial interaction. \n\nKeywords: \nXR \nVision Pro \nVirtual Movement \nBrandel’s Mural \nBob Horn \nAuthor \nvisionOS \n180° Video \n360° Workspace \nPhysical Table Mapping \nStanding Desk \nSpatial Computing \nDesign Challenge \nPhilosophical “,”citationIdentifiers”:[]},”40C8EC45-371A-4242-9E3B-B1B47ED69909″:{“phrase”:”Demo for Authoring in XR for FoT 6″,”tagIdentifier”:”document”,”urls”:[{“url”:””}],”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Demo for Authoring in XR for FoT 6.liquid”,”entry”:”Here are the extracted keywords, topics, and names from the provided text to help you find it more easily in the future:\n\nKeywords:\n- User Guide\n- Fictional User\n- Tasks\n- Coherent\n- Interactions\n- Document Viewing\n- References\n- Academic Reading\n- Academic Writing\n- AI Analysis\n- Comfortable Interactions\n- The Future of Text\n- Annotate\n- Article\n- PDF\n- WebXR\n- XR (Extended Reality)\n- Headset\n- Wrist Controls\n- Code Snippets\n- Manuscript Cube\n- Knowledge Objects\n- Metadata\n- References\n- Concept Map\n- Citation Information\n- Viewer’s Annotation\n- Authoring Cube\n- Manuscript\n- Notebook\n- MS Word\n- Google Docs\n- Apple Pages\n- Citing\n- Copy Command\n- Unfolding Canvas\n- Traditional PDF Document\n- Frame\n\nTopics:\n- Interaction Design for Document Viewing\n- User Experience in Extended Reality\n- Academic Document Management and Navigation\n- Integration of AI in Academic Analysis\n- Reference Management and Visualization\n- Authoring and Annotation in XR Environments\n- Metadata Utilization in Knowledge Objects\n- Document Interaction via Gestures\n- XR Authoring Tools and Tutorials\n\nNames:\n- The Future of Text\n- FoT 6 (The Future of Text 6)\n\nSloan XR \n”,”identifier”:”40C8EC45-371A-4242-9E3B-B1B47ED69909″,”citationIdentifiers”:[]},”DC72A374-B018-45D7-9AF0-8A720EAF189F”:{“phrase”:”Author localisation volunters”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Author localisation volunters.liquid”,”entry”:”The Augmented Text Company “,”identifier”:”DC72A374-B018-45D7-9AF0-8A720EAF189F”,”citationIdentifiers”:[]},”6692B32D-8B37-4FFA-916D-797B5BAB3225″:{“tagIdentifier”:”note”,”phrase”:”XR”,”urls”:[{“url”:””}],”documentPath”:””,”identifier”:”6692B32D-8B37-4FFA-916D-797B5BAB3225″,”entry”:””,”citationIdentifiers”:[]},”D644C47A-2773-48A3-A526-D8695260A8F8″:{“urls”:[{“url”:””}],”tagIdentifier”:”note”,”phrase”:”Authoring”,”documentPath”:””,”identifier”:”D644C47A-2773-48A3-A526-D8695260A8F8″,”entry”:”Authoring, writing”,”citationIdentifiers”:[]},”49DDF4B4-8427-4BD8-A76C-72330C6A3FCB”:{“phrase”:”Authoring in XR 27th of January 2025″,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Authoring in XR 27th of January 2025.liquid”,”entry”:”The resulting authored document should be able to support an academic discourse, by being connected to references and by being referencable\/addressable by others.\n\nThe user needs to be able to type, dictate onto a substrate, paste or import text, which the user can then connect into a coherent narrative.\n\nWe need to be able to change view quickly, not over design one.\n\nThe user can break up a document into sections based on headings or paragraphs.\n\nThe environment includes Dene, Fabien, and Frode’s home offices for testing purposes.\n\nAuthoring in XR will be section\/heading based, using cards.\n\nThe idea of authoring hypercard style by writing pieces and connecting them is considered.\n\n2023: Current year of document context.\n\nacademic discourse, authored document, coherent narrative, XR authoring, section-based, cards, interactive icons, PDF documents, Author Map, metadata, hypercard style, linked documents, view change, home offices.\n\nDocument”,”identifier”:”49DDF4B4-8427-4BD8-A76C-72330C6A3FCB”,”citationIdentifiers”:[]},”41B1665F-3EAE-4BE6-8324-04217CFE0A39″:{“urls”:[{“url”:””}],”tagIdentifier”:”note”,”phrase”:”Visual”,”documentPath”:””,”identifier”:”41B1665F-3EAE-4BE6-8324-04217CFE0A39″,”entry”:””,”citationIdentifiers”:[]},”BAE80AB3-E340-4901-BD3D-9FC2DFE8710B”:{“urls”:[{“url”:””}],”tagIdentifier”:”document”,”phrase”:”Agenda for 16th of June – Halfway Presentation”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Agenda for 16th of June – Halfway Presentation.liquid”,”entry”:”- Introduction\n- Project\n- Context\n- Presentation\n- Technical Overview\n- Annotated Bibliography\n- Article Writing\n- Experimentation\n- Experience\n- Production Software\n- Demo\n- Fabien\n- Narration\n- XR Environment\n- User\n- Author\n- Mac\n- Audience\n- Map\n- Export\n- JSON\n- Server\n- Knowledge Space\n- Open Format\n- Frode\n- URL\n- Web Browser\n- Headset\n- Initial Space\n- 3D Background\n- Rooms\n- Document Interactions\n- Stack of Cards\n- Cube\n- Dominant Hand\n- Exit\n- Mechanism\n- Computer Software\n- Discussion\n- Suggestions\n- Technical\n\nMarked”,”identifier”:”BAE80AB3-E340-4901-BD3D-9FC2DFE8710B”,”citationIdentifiers”:[]},”DBB0BB6C-7E89-4CFA-8374-AE500583EDF2″:{“entry”:””,”phrase”:”Leon Van Kammen”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”DBB0BB6C-7E89-4CFA-8374-AE500583EDF2″,”urls”:[{“url”:””}]},”166D3F7A-849B-4B90-90C5-1455776D7FBC”:{“phrase”:”Marked”,”tagIdentifier”:”marked”,”urls”:[{“url”:””}],”documentPath”:””,”entry”:””,”identifier”:”166D3F7A-849B-4B90-90C5-1455776D7FBC”,”citationIdentifiers”:[]},”25F7DE5A-3BC8-4A68-8FF9-4E7908DB6EA5″:{“entry”:””,”phrase”:”JSON”,”citationIdentifiers”:[],”identifier”:”25F7DE5A-3BC8-4A68-8FF9-4E7908DB6EA5″,”urls”:[{“url”:””}]},”51DB285B-9554-4B7D-8944-D1E85FE4422F”:{“tagIdentifier”:”label”,”phrase”:”legends”,”urls”:[{“url”:””}],”documentPath”:””,”entry”:””,”identifier”:”51DB285B-9554-4B7D-8944-D1E85FE4422F”,”citationIdentifiers”:[]},”CBE8CAAB-D2A9-4974-BB19-DD36F08B9D19″:{“urls”:[{“url”:””}],”phrase”:”Making Text More Visually Parseable”,”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Making Text More Visually Parseable.liquid”,”entry”:”Goals\n\nKeywords:\n- Visually Parseable Text\n- Academic Publishing\n- Visual-Meta\n- Interactive Documents\n- Text in XR\n- Augmented Text\n- Stretchtext\n- Folding Text\n- Authoring Tools\n- User Experience\n- Command Navigation\n- Interactions\n\nTopics:\n- Improving the visual presentation of academic text\n- Integration of interactivity in text for enhanced readability\n- Historical and modern approaches to visual text representation\n- Challenges and solutions in academic publishing formats\n- Techniques for folding and unfolding text\n- User-centered design in text authoring tools\n\nNames:\n- Doug Engelbart\n- Ted Nelson\n- Hegland\n\nSpecific Keywords:\n- Philosophical\n- Sloan XR (because XR and Virtual Reality were mentioned)\n\n”,”identifier”:”CBE8CAAB-D2A9-4974-BB19-DD36F08B9D19″,”citationIdentifiers”:[]},”2FA24D84-323E-435C-9D88-091F17C76DCE”:{“tagIdentifier”:”note”,”urls”:[{“url”:””}],”phrase”:”Spatial Hypertext”,”documentPath”:””,”identifier”:”2FA24D84-323E-435C-9D88-091F17C76DCE”,”entry”:””,”citationIdentifiers”:[]},”97D9B646-677B-47B7-8D65-C41CA38A494F”:{“entry”:””,”phrase”:”futuretextlab.info”,”citationIdentifiers”:[],”tagIdentifier”:””,”identifier”:”97D9B646-677B-47B7-8D65-C41CA38A494F”,”urls”:[{“url”:””}]},”0FCFBC44-A235-4863-9E4E-1B7B83355205″:{“phrase”:”Design Notes 2 April”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Design Notes 2 April.liquid”,”identifier”:”0FCFBC44-A235-4863-9E4E-1B7B83355205″,”entry”:”Keywords and Topics:\n- Design Notes\n- Code Snippets\n- Demo Builds\n- Self-Guided Tour\n- First Room\n- User Experience\n- Wrist Taps\n- Cube Transparency\n- Highlight Step\n- Frame Color\n- Manuscript Removal\n- Final Room\n- Knowledge Objects\n- Citation Cards\n- Hand Transparency\n- Hand Menu\n- Localized Interactions\n- Gestures\n- Background Model\n\nNames:\n- Frode Hegland\n\nSloan XR\nTechnical”,”citationIdentifiers”:[]},”B219E673-A6FA-484B-A6C1-6E5CDBB7134E”:{“urls”:[{“url”:””}],”tagIdentifier”:”note”,”phrase”:”Technical”,”documentPath”:””,”entry”:””,”identifier”:”B219E673-A6FA-484B-A6C1-6E5CDBB7134E”,”citationIdentifiers”:[]},”E61017E7-376B-47A0-93B5-553E10051CF6″:{“urls”:[{“url”:””}],”tagIdentifier”:”note”,”phrase”:”Navigation”,”documentPath”:””,”identifier”:”E61017E7-376B-47A0-93B5-553E10051CF6″,”entry”:””,”citationIdentifiers”:[]},”7E99C5FB-E995-4596-B25A-AC0C6796B343″:{“phrase”:”‘Binding’ Mechanism”,”tagIdentifier”:”document”,”urls”:[{“url”:””}],”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Binding for Author.liquid”,”entry”:”\”The reasons for considering adding a means through which the author can write sections in different documents are based on my own experience writing my long PhD thesis and user requests, most recently from Hilman Ramadhan. The reason is also based on issues with how to handle our books, The Future of Text, as a single volume and individual articles, both for PDF and for XR via JSON.\”\n\n- Hilman Ramadhan: Mentioned as a recent user requester influencing the author’s consideration for document writing features.\n- Mark Anderson: Mentioned in the context of a conversation about authoring for traditional documents versus hypertextually.\n\nHypertext, Binding Mechanism, Authoring, Document Link, Export, Visual-Meta, Encoding, XR, PDF, Word, JSON, Navigation, ViewSpecs, Reading Systems, Cognitive Experience, Publishing.\n\nPhilosophical and Technical. “,”identifier”:”7E99C5FB-E995-4596-B25A-AC0C6796B343″,”citationIdentifiers”:[]},”F99C0A75-C908-491D-AE2A-581D0711AABF”:{“entry”:””,”phrase”:”Development Jira”,”citationIdentifiers”:[],”tagIdentifier”:””,”identifier”:”F99C0A75-C908-491D-AE2A-581D0711AABF”,”urls”:[{“url”:””}]},”6B2A59B5-0AD5-42FA-8CD4-2E8560A31617″:{“entry”:””,”phrase”:”Edgar Hegland”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”6B2A59B5-0AD5-42FA-8CD4-2E8560A31617″,”urls”:[{“url”:””}]},”CC661EAB-4E07-4CAA-9E4B-255D727A8A8D”:{“phrase”:”Volume & Knowledge Object Design”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Volume & Knowledge Object Design.liquid”,”entry”:”- Keywords: Interactions, hypertextual, environment, Volume, object, node, Knowledge Volume Design, Knowledge Object Design, AR location, Owner Volume, Affordances, Physicality, Appearance\n- Topics: Visually interactive environment, hypertext, volume creation, object linking, knowledge volume design, knowledge object design, metadata, spatiality, appearance customization\n- Names: Frode Hegland\n- Specific Keywords: Technical, Frode Hegland”,”identifier”:”CC661EAB-4E07-4CAA-9E4B-255D727A8A8D”,”citationIdentifiers”:[]},”580C8576-8D62-4DB1-B86E-35F8D5EAEE24″:{“phrase”:”Image Inclusion for Author”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Image Inclusion for Author.liquid”,”entry”:”- Image Inclusion\n- Author document\n- Visual Add\n- Export Inclusion\n- .liquid package\n- Image reference\n- Document import\n- Citation Dialog\n- PDF handling\n- Open button\n- AI Keywords\n- ChatGPT analysis\n- Text extraction\n- Keyword summary\n- Technical\n\nMarked “,”identifier”:”580C8576-8D62-4DB1-B86E-35F8D5EAEE24″,”citationIdentifiers”:[]},”B7A1A30C-AAEA-4B4B-A662-60D90DF2C26D”:{“phrase”:”Knowledge Space (Shanghai thoughts)”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Knowledge Space (Shanghai thoughts).liquid”,”entry”:”Developing an effective XR interface requires a focus on how users navigate and organize information. By examining different ways to present text, references, and tables of contents (TOC), it becomes clear that these elements should be independent yet interconnected columns to facilitate internal and external navigation. The TOC assists with internal navigation, while references provide external context. Navigation should also include concepts and author names. Implementing these in a map view could be beneficial, particularly by marking significant documents for future reference. The goal is to help users organize their thoughts and navigate the knowledge space using stable metadata. This approach, termed \”Knowledge Space,\” emphasizes intuitive interaction and robust metadata to support mental exploration and analysis.\n\nDecisions Made:\n- References and TOC should be independent columns connected to the body text.\n- Navigation should be possible via concepts, names, and references.\n- Users should be able to mark documents as ‘Canon’ or ‘Important.’\n- Metadata should be made open and reliable, differentiate between legacy and new metadata.\n- Develop a map and reader for XR with the focus on robust metadata.\n- The system should help users navigate the knowledge space to organize thoughts and information.\n\nQuestions Asked:\n- Why look at the TOC?\n- Why look at the References?\n- How to combine navigation elements usefully in a Map view?\n- How to organize a reading environment that accesses references effectively?\n\nKeywords:\n- Philosophical\n- Technical\n- Sloan XR\n- Dene Grigar\n- Frode Hegland\n- XR\n- Agreements\n- Knowledge Space”,”identifier”:”B7A1A30C-AAEA-4B4B-A662-60D90DF2C26D”,”citationIdentifiers”:[]},”45B7BA71-CA4A-4C3E-BDDC-3D6887FB00C6″:{“phrase”:”AI Summary of First Tapestry-Knowledge Space meeting”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/AI Summary of First Tapestry-Knowledge Space meeting.liquid”,”identifier”:”45B7BA71-CA4A-4C3E-BDDC-3D6887FB00C6″,”entry”:”- AI Summary\n- First Tapestry-Knowledge Space meeting\n- Philosophical\n- Technical\n- Knowledge Space in XR\n- Frode Hegland\n- Fabien Bénétou\n- Tapestry project\n- Bob Stein\n- VR\/AR\n- Web-based platform\n- Iframes\n- JSON format\n- Spatial knowledge environments\n- Interoperability\n- Academic papers\n- Meta Quest browsers\n- Apple Vision Pro\n- Screenshot-based approach\n- Metadata preservation\n- Hybrid interaction\n- 3D visualization\n- Bidirectional Integration\n- Academic Workflows\n- Force Multiplier Effect\n- Open standards\n- Platform lock-in\n- Fabien\n- Apple Vision Pro’s Safari browser\n- XR capabilities\n- Democratizing digital creation\n- User agency\n- Creativity\n- Spatial thinking\n\n”,”citationIdentifiers”:[]},”D500BCF6-ED6C-4314-9F38-04C88793AF6D”:{“phrase”:”XR Diagrams in Traditional Documents”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/XR Diagrams in Traditional Documents.liquid”,”identifier”:”D500BCF6-ED6C-4314-9F38-04C88793AF6D”,”entry”:”Focusing on creating spatial volumes of knowledge in XR environments could enhance academic document interaction, inspired by Bob Horn’s murals. This approach involves constructing a mural-like structure of knowledge objects in 2D, with optional 3D elements, allowing for a rich, interactive experience. By binding these structures with Visual-Meta, users can extract and manipulate metadata seamlessly, offering both traditional and immersive reading experiences. This framework enables users to interact with documents as dynamic, spatial maps, enhancing the depth of academic exploration.\n\nDecisions Made:\n- Focus on creating spatial volumes of knowledge in XR.\n- Use Bob Horn murals as a model for academic documents.\n- Implement a binding mechanism using Visual-Meta.\n\nQuestions Asked:\n- How can we integrate this approach into traditional media?\n- How should augmented diagrams appear and connect in documents?\n- What are the required interactions for the interface?\n\nKeywords:\n- Philosophical\n- Technical\n- XR\n- Frode Hegland\n- Visual-Meta\n- Bob Horn\n- Spatial Volumes of Knowledge\n- Academic Framework\n- Reader Experience\n- Authoring Experience\n- Experiments to Experience”,”citationIdentifiers”:[]},”F8A1C2E3-8408-4D3F-974F-15EB72B6BC63″:{“urls”:[{“url”:””}],”tagIdentifier”:”note”,”phrase”:”Publishing”,”documentPath”:””,”identifier”:”F8A1C2E3-8408-4D3F-974F-15EB72B6BC63″,”entry”:”Publishing, publication”,”citationIdentifiers”:[]},”FCE8988D-3BCE-4013-9638-FCADF968DF74″:{“entry”:””,”phrase”:”Rob Swigart”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”FCE8988D-3BCE-4013-9638-FCADF968DF74″,”urls”:[{“url”:””}]},”18EF3F08-AE77-4432-9BA7-F6DCD2260546″:{“entry”:””,”phrase”:”Jim Strahorn”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”18EF3F08-AE77-4432-9BA7-F6DCD2260546″,”urls”:[{“url”:””}]},”6A5C7D1C-E76C-41B4-BBDB-244DFABD1AB8″:{“urls”:[{“url”:””}],”phrase”:”What to author”,”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/What to author.liquid”,”identifier”:”6A5C7D1C-E76C-41B4-BBDB-244DFABD1AB8″,”entry”:”Creating a document in the context of XR involves authoring various components rather than a traditional linear text. These components include references, media, knowledge maps, and a narrative path. The aim is to produce a comprehensive document that can be presented both spatially and in traditional formats like PDFs with Visual-Meta. Authors should consider the reader’s perspective, ensuring clarity and relevance through metadata and document content. Interactions in XR require thoughtful design to manage gestures and interfaces effectively, allowing seamless transitions between reading, authoring, and managing components. This involves developing specific gestures and interfaces, like the Liquid Notebook, to facilitate interaction with the text and other elements. Emphasizing spatial experience, the document creation process should integrate XR’s unique capabilities, such as virtual spaces and extended movement, to enhance the author’s ability to organize and present information creatively and effectively.\n\nDecisions Made:\n- Aim to author documents with spatial extensions, stored as appendices.\n- Develop a document format that acts as a binder for knowledge elements in XR.\n- Use Visual-Meta for document clarity and spatialization in XR environments.\n- Design gestures and interfaces to manage XR interactions effectively.\n\nQuestions Asked:\n- What kind of document should be authored?\n- How to transition between working on different components?\n- What interactions are required in XR and through which interfaces?\n- How to spawn and hide writing items effectively?\n\nKeywords:\n- Authoring\n- Spatial Extensions\n- XR\n- Visual-Meta\n- Liquid Notebook\n- Interactions\n- Gestures\n- Interfaces\n- Technical\n- Agreements”,”citationIdentifiers”:[]},”4EA0D4AE-558A-4DAE-BFF7-8A4FEA436E55″:{“phrase”:”Annotated Bibliography”,”tagIdentifier”:”note”,”urls”:[{“url”:””}],”documentPath”:””,”identifier”:”4EA0D4AE-558A-4DAE-BFF7-8A4FEA436E55″,”entry”:””,”citationIdentifiers”:[]},”801511A0-C972-4654-ADAB-713BDFF6382B”:{“urls”:[{“url”:””}],”tagIdentifier”:”note”,”phrase”:”Cube”,”documentPath”:””,”identifier”:”801511A0-C972-4654-ADAB-713BDFF6382B”,”entry”:”Interactions”,”citationIdentifiers”:[]},”02EFC817-A273-4B96-869D-54545E0F9949″:{“entry”:””,”phrase”:”Bob Horn”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”02EFC817-A273-4B96-869D-54545E0F9949″,”urls”:[{“url”:””}]},”7E24578B-BB19-4685-B2EB-DDB8C4102012″:{“phrase”:”Circular Desk for Authoring”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Circular Desk for Authoring.liquid”,”identifier”:”7E24578B-BB19-4685-B2EB-DDB8C4102012″,”entry”:”This article explores the design challenges and innovative solutions for integrating physical desks into virtual reality (VR) environments, specifically focusing on enhancing user interaction with text and knowledge objects. The concept of the \”Panopticon Desk\” is introduced, where the physical desk is utilized as an interactive space within VR, allowing users to employ gestures to navigate through different virtual workspaces. The paper discusses the importance of spatial awareness and legibility in VR, proposing a multi-view system that includes Map, Writing, Timeline, and Geographic views, among others. The use of swipe gestures for rotating the virtual workspace is highlighted, inspired by existing interaction models. The article emphasizes the potential for innovation in creating immersive and flexible VR environments for knowledge work, supported by Gen-AI visualizations. Comments and suggestions for further development are encouraged.\n\n- Virtual Reality (VR)\n- Panopticon Desk\n- Interactive Design\n- User Interface (UI)\n- Spatial Awareness\n- Knowledge Objects\n- Gesture Control\n- Multi-View System\n- Map View\n- Writing View\n- Timeline View\n- Geographic View\n- Gen-AI Visualizations\n- Technical”,”citationIdentifiers”:[]},”0B236EE7-93D0-444B-A61A-8435D8AC4BB8″:{“entry”:””,”phrase”:”and”,”citationIdentifiers”:[],”identifier”:”0B236EE7-93D0-444B-A61A-8435D8AC4BB8″,”urls”:[{“url”:””}],”documentPath”:””},”2C4EE400-3955-488C-9284-E538D0794D65″:{“tagIdentifier”:”note”,”phrase”:”Organize”,”urls”:[{“url”:””}],”documentPath”:””,”identifier”:”2C4EE400-3955-488C-9284-E538D0794D65″,”entry”:””,”citationIdentifiers”:[]},”6C07361E-5554-4305-B47D-4F8BAFBD5763″:{“tagIdentifier”:”note”,”urls”:[{“url”:””}],”phrase”:”Philosophical”,”documentPath”:””,”identifier”:”6C07361E-5554-4305-B47D-4F8BAFBD5763″,”entry”:””,”citationIdentifiers”:[]},”BE6326EC-90F3-4D73-A54C-ADAD8F96D633″:{“entry”:”Developers of Author and Reader, by Frode Hegland.”,”phrase”:”Augmented Text Company”,”citationIdentifiers”:[],”tagIdentifier”:”institution”,”identifier”:”BE6326EC-90F3-4D73-A54C-ADAD8F96D633″,”urls”:[{“url”:””}]},”AB4FC4D8-2B6F-41C9-B676-79BA656C41EF”:{“phrase”:”Ge Li notes from 26 March meeting”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Ge Li notes from 26 March meeting.liquid”,”entry”:”Notes.\n\nKeywords and Topics:\n1. Extended Reality (XR)\n2. Spatial Design\n3. AI-augmented Text\n4. Non-linear Information Organization\n5. Hypertext\n6. Multi-perspective Arguments\n7. Dynamic Citations\n8. Text as a Spatial Object\n9. 3D Structures\n10. References as First-Class Citizens\n11. AI as a Co-Organizer\n12. Prompt-driven Customization\n13. Auto-generated Metadata\n14. Spatial Scaffolding\n15. Ideational Congruence\n16. Information Fluidity\n17. Non-linear Presentation\n18. Prototype Development\n19. Interactive Text\n20. Layered Context\n21. Argument Maps\n\nNames:\n1. Ge Li\n2. Alessio\n3. Frode\n4. Fabian\n5. Engelbart\n\nThese elements should help you easily locate and reference the text in the future.”,”identifier”:”AB4FC4D8-2B6F-41C9-B676-79BA656C41EF”,”citationIdentifiers”:[]},”9BFC5C66-8F68-45E6-9974-743D33DB8483″:{“phrase”:”AI”,”tagIdentifier”:”note”,”urls”:[{“url”:””}],”documentPath”:””,”identifier”:”9BFC5C66-8F68-45E6-9974-743D33DB8483″,”entry”:”AI, LLM, LLMs, artificial intelligence”,”citationIdentifiers”:[]},”F8434028-9322-4210-9AA2-905FABEECF84″:{“phrase”:”References”,”urls”:[{“url”:””}],”tagIdentifier”:”note”,”documentPath”:””,”entry”:”Sources, citations.”,”identifier”:”F8434028-9322-4210-9AA2-905FABEECF84″,”citationIdentifiers”:[]},”436A899A-CBA8-4B03-8E50-2269631D5D79″:{“entry”:”The Future Text Lab hosts an annual Symposium. Hosted by Frode Hegland and Dene Grigar. Previously also hosted by Vint Cerf.”,”phrase”:”Symposium”,”citationIdentifiers”:[],”tagIdentifier”:”event”,”identifier”:”436A899A-CBA8-4B03-8E50-2269631D5D79″,”urls”:[{“url”:””}]},”BF011548-3030-4A49-8F80-50EF6030C5AA”:{“tagIdentifier”:”note”,”urls”:[{“url”:””}],”phrase”:”Code Snippets”,”documentPath”:””,”entry”:”Interactions “,”identifier”:”BF011548-3030-4A49-8F80-50EF6030C5AA”,”citationIdentifiers”:[]},”92BC8316-7C24-4701-99A5-670D97F9DDC5″:{“tagIdentifier”:”note”,”phrase”:”Notes”,”urls”:[{“url”:””}],”documentPath”:””,”identifier”:”92BC8316-7C24-4701-99A5-670D97F9DDC5″,”entry”:””,”citationIdentifiers”:[]},”5C2F13E0-2D08-4D01-97BC-50355D43DF56″:{“urls”:[{“url”:””}],”phrase”:”Sloan XR”,”tagIdentifier”:”title”,”documentPath”:””,”entry”:””,”identifier”:”5C2F13E0-2D08-4D01-97BC-50355D43DF56″,”citationIdentifiers”:[]},”14B6408C-2FD7-43E7-8EE5-F649F53F2E48″:{“phrase”:”Batch Processing import of PDF to Reader”,”tagIdentifier”:”document”,”urls”:[{“url”:””}],”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Batch Processing import of PDF to Reader.liquid”,”identifier”:”14B6408C-2FD7-43E7-8EE5-F649F53F2E48″,”entry”:”Conclusions were made, if any:\n\nWe will implement batch processing for PDF imports into Reader.\nGrouping of documents by Proceedings and Journals will be enabled in the Library.\nUsers can update metadata without altering existing Visual-Meta data.\nMap view from Author to Reader will require further design.\n\nStrong statements which are not obvious were made, if any:\n\nJira 1114 will address the batch processing feature.\nThe user can fill in metadata easily when missing from Crossref.\nAll Visual-Meta data must be read, appended, and rewritten as a fresh page.\nHidden documents will not appear in the Map view.\n\n2023 After that we will work on the batch processing on importing PDF documents into Reader.\n\nPDF documents, DOI, Visual-Meta, AI, Keyword, Full text, Reader, Conference Proceedings, Journal, Publication fields, Crossref, Metadata, Library, Map view, Author, Hidden documents.\n\nDocument.\n\nAuthor, Reader, Augmented Text Company “,”citationIdentifiers”:[]},”23099427-E22A-4CB5-B77E-068820E48BB8″:{“urls”:[{“url”:””}],”tagIdentifier”:”document”,”phrase”:”Desk and Desk-Free”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Desk and Desk-Free.liquid”,”entry”:”Toggling between Desk and Desk-Free requires the user to move away from the desk with the information following and locking into a specific distance.\nThe Desk version has more controls and ways of interaction compared to Desk-Free.\nWrist and Hand menus are central to both the Desk and Desk-Free experiences.\nIn Desk mode, additional interactions are available, such as spawning menus from the table for novice users.\n\nWrist menu with spherical options may serve as a settings interface in Desk\/No-Desk mode.\nThe Desk-Free mode allows for a floating menu as a heads-up display (HUD).\nDirect manipulation for selecting and moving items is suggested for map work.\nA slider on the desk can control background elements and document focus.\n\nDeals with the Physical XR Environment. \n\nBrief Note\n\nDesk-Free, Desk mode, Wrist menu, Hand menu, Spherical options, Floating menu, Direct manipulation, Visual tabs, Physical table, Annotate, Slider, Heads-up display (HUD), Interface, Interaction, Background elements, Document focus.\n\fSloan XR”,”identifier”:”23099427-E22A-4CB5-B77E-068820E48BB8″,”citationIdentifiers”:[]},”D8F39102-9BBB-4529-99B4-F209FED81750″:{“phrase”:”final-narrative-sloan-revised2″,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Documents\/Sloan Project\/FTI Funding\/sloan final docs\/final-narrative-sloan-revised2.docx”,”entry”:”- Keywords: PhD, computer science, Norway, English, Edgar, Emily, Henning, Dene, Fabien, hypertext, history, brain science, Future of Text, Extended Reality, XR, VR, AR, Washington State University, University of Southampton, dialog, open infrastructure, academic reading, authorship, metadata, Visual-Meta, community building, WebXR, Vint Cerf, open-source, digital communication, immersive technologies, academic workflows, open knowledge, metadata infrastructure, non-profit, knowledge work, Visual-Meta, academic text, software development, open-source software, symposia, symposium, academic community, XR software, XR experiences, academic documents, accessibility, diversity, equity, inclusion, social media, student competition, metadata for accessibility, Apple Vision Pro.\n\n- Technical\n- Sloan XR\n- Foundational\n”,”identifier”:”D8F39102-9BBB-4529-99B4-F209FED81750″,”citationIdentifiers”:[]},”0EF65F23-72E6-4A66-BFF3-642358F2B4B3″:{“phrase”:”Hypertextuality and Virtual Reality”,”urls”:[{“url”:””}],”tagIdentifier”:”reference”,”documentPath”:””,”identifier”:”0EF65F23-72E6-4A66-BFF3-642358F2B4B3″,”entry”:”Rob Swigart, Dene Grigar, Gilman Louie, Russell Lieblich, Franco Minganti, Bob Lindstrom, Gerald Masters, Steve Kosek, Matthew Costello, Mariusz Pisarski, Gabrielle Davis.\n\nVancouver, WA, USA, Rome, Italy, Earth.\n\nCommodore 64, Amiga, Apple II, Macintosh, PC, Quest 2.\n\n1986: Portal published by Activision. \nEarly 1990s: Franco Minganti’s essay written. \nLate 1990s: Original formats of Portal became inaccessible. \n2012: Kickstarter campaign for updating Portal was not funded. \nFall 2022: Emulated version of Portal featured in a course. \nSpring 2023: Adaptation for Quest 2 became a Capstone project. \nSeptember 4–8, 2023: Conference dates. \n2024: Forthcoming book by Dene Grigar and Mariusz Pisarski.\n\nHypertext, XR, VR, Virtual Reality, video games, Media translation, Adaptation, Narrative development, Navigation, Player choice, Interaction, Retrogaming, AI, Emulation, 3D environment, Publishing, Cognition.\n\nDene Grigar, Rob Swigart, Matthew Costello, Bob Lindstrom, Gerald Masters, Mariusz Pisarski, Steve Kosek.”,”citationIdentifiers”:[“735D8928-78D1-455A-A7E3-8453B5AE8E83″,”02B96B12-3DE7-404C-BC67-678FE70BA096″,”D71D3A50-EFD1-4424-A8F0-DC80A549FDC1″]},”BDAF14B6-D74B-4D7D-9108-B78BE10482D6”:{“tagIdentifier”:”label”,”urls”:[{“url”:””}],”phrase”:”Foundational”,”documentPath”:””,”entry”:””,”identifier”:”BDAF14B6-D74B-4D7D-9108-B78BE10482D6″,”citationIdentifiers”:[]},”550AE253-2635-4E89-9A54-C7B65A9245FF”:{“phrase”:”Physical”,”tagIdentifier”:”label”,”urls”:[{“url”:””}],”documentPath”:””,”entry”:”Interactions”,”identifier”:”550AE253-2635-4E89-9A54-C7B65A9245FF”,”citationIdentifiers”:[]},”FCBED755-1073-4775-B8F2-E9EA216F6806″:{“urls”:[{“url”:””}],”tagIdentifier”:”document”,”phrase”:”The Ring Menu & The Palm for Word Processing Space”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/The Ring Menu & The Palm for Word Processing Space.liquid”,”identifier”:”FCBED755-1073-4775-B8F2-E9EA216F6806″,”entry”:”Keywords:\n- Ring Menu\n- Palm for Word Processing Space\n- Interaction\n- Gestures\n- Authoring Process\n- Organizing Knowledge\n- XR Environment\n- Selected Text\n- Global\/On Document\n- Linear Views\n- Non-Linear Views\n- Author’s Context Menu\n- Pinch & Lift\n- Donut Shape\n- View Commands\n\nTopics:\n- Word Processing\n- User Interaction Design\n- Extended Reality (XR) in Writing\n- Document Management\n- Text Manipulation\n- Gesture-based Controls\n- Knowledge Organization\n- User Interface Design\n- Virtual Environment Authoring\n\nNames:\n- Fabien Benetou\n- Frode Hegland\n\nSloan XR\nInteractions”,”citationIdentifiers”:[]},”4D56CFD8-A272-491F-89F3-133FD4F886FC”:{“phrase”:”Metadata”,”tagIdentifier”:”note”,”urls”:[{“url”:””}],”documentPath”:””,”entry”:””,”identifier”:”4D56CFD8-A272-491F-89F3-133FD4F886FC”,”citationIdentifiers”:[]},”4741C8C3-2F5A-44CD-B56C-EC5E72D2ADA2″:{“tagIdentifier”:”issue”,”phrase”:”Reader”,”urls”:[{“url”:””}],”documentPath”:””,”identifier”:”4741C8C3-2F5A-44CD-B56C-EC5E72D2ADA2″,”entry”:”Software by the Augmented Text Company.”,”citationIdentifiers”:[]},”432FC18C-3AB0-4B0A-B496-0C5CC46EEB21″:{“entry”:”Bob Stein project”,”phrase”:”Tapestry”,”citationIdentifiers”:[],”identifier”:”432FC18C-3AB0-4B0A-B496-0C5CC46EEB21″,”urls”:[{“url”:””}],”documentPath”:””},”2FBB4C51-F5D9-4A36-A853-D1F7BB10079B”:{“urls”:[{“url”:””}],”tagIdentifier”:”document”,”phrase”:”Journal 2023″,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Journal 2023.liquid”,”identifier”:”2FBB4C51-F5D9-4A36-A853-D1F7BB10079B”,”entry”:”Here are the extracted keywords, topics, and names from the provided text:\n\nKeywords & Topics:\n- Journal\n- Author (cmd-J)\n- Liquid\n- WordPress\n- PDF Journals\n- Extended Reality (XR)\n- XR glasses\n- Gestures\n- AI\n- Libraries\n- Metadata transmission standard\n- Visual-Meta\n- JSON\n- webXR\n- Library.PDF\n- WebDAV\n- AI entity extraction\n- Dialog\n- RSS\n- WordPress\n- Blog\n- Visual-Meta AI\n- Metadata parsing\n- macOS\n- WebXR\n- Augmenting human intellect\n- Doug Engelbart\n- Future of Text\n- Climate emergency\n- Evolutionary pressures\n- Textual knowledge\n- Academic papers\n- Metaverse\n- Cyberspace\n- Frode Hegland\n\nNames:\n- Bjørn Borud\n- Joseph Jebelli\n- Lewis Dartnell\n- Doug Engelbart\n- Edgar (Frode’s son)\n\nThis list should help you locate and reference this text more easily in the future.\n\n2024″,”citationIdentifiers”:[]},”68FAAAC1-1E0E-4EBA-B170-1922112C7B47″:{“phrase”:”Design Notes”,”urls”:[{“url”:””}],”tagIdentifier”:”label”,”documentPath”:””,”identifier”:”68FAAAC1-1E0E-4EBA-B170-1922112C7B47″,”entry”:””,”citationIdentifiers”:[]},”DCA90D9E-931D-447D-AAD2-DCF42FE762AF”:{“entry”:””,”phrase”:”Ge Li,Jesse”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”DCA90D9E-931D-447D-AAD2-DCF42FE762AF”,”urls”:[{“url”:””}]},”BB8A60C4-7113-4C50-AC05-5380B3CE8B37″:{“entry”:””,”phrase”:”Ismail Serageldin”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”urls”:[{“url”:””}],”identifier”:”BB8A60C4-7113-4C50-AC05-5380B3CE8B37″},”CBEDE415-BD2E-421A-AF7C-97C5B2B4B421″:{“entry”:””,”phrase”:”Mark Anderson”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”CBEDE415-BD2E-421A-AF7C-97C5B2B4B421″,”urls”:[{“url”:””}]},”17AEB8D5-3E44-4CD2-9E78-4DB4BA41FBE8″:{“tagIdentifier”:”document”,”urls”:[{“url”:””}],”phrase”:”Authoring for The Future of Text VI in XR”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Authoring for The Future of Text VI in XR.liquid”,”identifier”:”17AEB8D5-3E44-4CD2-9E78-4DB4BA41FBE8″,”entry”:”The primary aim of authoring in XR for this project is to utilize citations for an annotated bibliography, with the goal of enabling contributors to ‘author’ content in XR for an engaging reading experience. The project faces competition from AI, as more papers are being summarized by AI rather than read by humans. Therefore, the focus is on creating compelling reading and authoring experiences in XR. Discussions are needed on what interactions in XR would be most interesting and how source materials should be presented. The project suggests making previous articles available for users to annotate and cite. There are numerous undefined aspects of interaction in XR that differ from traditional computing, such as the absence of windows and buttons. Careful consideration is required in designing constraints and scaffolding within this open space. The aim is to enhance the user’s ability to author and read academic papers, emphasizing connected reading and writing. The document serves as a hypothetical user guide to a system yet to be fully developed, detailing how users can assemble or sculpt documents in the shape of a Cube. The Cube is a knowledge object in XR designed to help academics communicate visually and spatially, serving as a tool for thought and communication.\n\nDecisions Made: \n- The primary act of authoring in XR will focus on citations for an annotated bibliography. \n- To enable contributors to author in XR for an XR reading experience. \n- To make previous articles available for annotation and citation. \n- The Cube will serve as a knowledge object in XR for visual and spatial communication. \n\nQuestions Asked: \n- What interactions would be most interesting to perform in XR and with what source material? \n- How did all these knowledge objects get there, and do they have rich information connections accessible externally? \n- How can you choose what to see, and how can you hide what you do not want to see? \n- How will nesting spatial knowledge representations work in practice? \n- How can you avoid making a mess with 200 citations in your space? \n- How best to see relationships and connections? \n- What should the background environment look like so it does not interfere with the information? \n- How to deal with the physical room you are in? \n- How much should the user need to learn vs. how powerful should the interactions be? \n\nKeywords: \nPhilosophical \nTechnical \nXR \nFrode Hegland”,”citationIdentifiers”:[]},”137EEC50-F588-48F5-A5B2-2E884F8D7CBA”:{“urls”:[{“url”:””}],”tagIdentifier”:”document”,”phrase”:”Journal 2024″,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Journal 2024.liquid”,”identifier”:”137EEC50-F588-48F5-A5B2-2E884F8D7CBA”,”entry”:”This extensive text revolves around topics related to XR (Extended Reality), photography, interactive text, metadata, and academic reading and writing. Below are the key themes, topics, names, and keywords extracted to help you find this text more easily in the future:\n\nKeywords & Topics:\n1. XR (Extended Reality)\n2. HyperCard\n3. Interactive text\n4. Photography\n5. Metadata\n6. Visual-Meta\n7. Digital and analog photography\n8. Camera settings (ISO, aperture, shutter speed)\n9. Darkroom and LightRoom\n10. Lumix S5IIX\n11. Coding and Node-based interface\n12. DaVinci Resolve\n13. HyperCard Scripting and HyperTalk\n14. Reader’s code in Xcode\n15. Academic documents\n16. PDF documents\n17. Library visualization\n18. Metadata attributes\n19. Augmented metadata\n20. Environment metadata\n21. Virtual reality (VR) and augmented reality (AR)\n22. Spatial computing\n23. Interaction design\n24. User experience (UX)\n25. Academic reading\n26. AI integration in text\n27. Authoring and publishing\n28. Document visualization\n29. User story mapping\n30. Interactive knowledge graphs\n31. Reader and Author software\n32. Sloan Foundation project\n33. WebXR and visionOS\n34. Apple Vision Pro\n35. Meta Quest\n36. User interaction in XR\n37. Spatialized PDF\n38. Layered Hypertext\n39. Spatial Document Format\n40. Research and experimentation\n\nNames:\n1. Bill Atkinson\n2. Adam\n3. Fabien\n4. Brandel\n5. Peter\n6. Alan\n7. Daniel Wolpert\n8. Edward Grefenstette\n9. Ilya Miskov\n10. Doug Engelbart\n11. Bruce Horn\n12. Rob Swigart\n13. Bob Stein\n14. Kamian\n15. Dene\n16. Andrew Thompson\n17. M.J. Murdock Charitable Trust\n18. Shafquat Towheed\n19. Jack Park\n20. Johannes Goransson\n\nThese keywords, topics, and names should help you efficiently locate and reference the relevant sections of this text in the future.\n\n2024″,”citationIdentifiers”:[]},”CF12DA6C-4883-4D51-AA27-ED365F8D6BAA”:{“phrase”:”Hypertextual Elements”,”tagIdentifier”:”label”,”urls”:[{“url”:””}],”documentPath”:””,”entry”:””,”identifier”:”CF12DA6C-4883-4D51-AA27-ED365F8D6BAA”,”citationIdentifiers”:[]},”6653F39C-5452-4BB5-946D-1DBA4A0773D4″:{“phrase”:”Organizing in Author”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Organizing in Author.liquid”,”entry”:”It can be easy to get lost in the potential of XR versus traditional display and between Author and Reader.\nAn annotated bibliography can be created in Author based on documents in Reader.\nNotes from documents and thoughts can be created and stored spatially as part of the user’s ‘XR Office’.\nThe manuscript in Author can be exported as linear and viewed in various ways.\n\nThe manuscript can incorporate supplemental information in context, including diagrams, and each hypercard can connect and have smarts.\nUsers can have a spatial manuscript with notes, new text, and media, requiring separation of general notes and manuscript notes.\n3D objects, media, code, LLMs, geographic locations, and research data can be embedded in the manuscript.\nFraming options include voice-over tour, avatar, and view specs.\n\n2011 The Information (Gleick) is mentioned as relevant for the work.\n\nXR, Author, Reader, Annotated Bibliography, Citation, Map View, Personal Library, Manuscript, HyperCards, Semantic Encoding, Spatial Hypertext, Memory Palace, Voice Over Tour, Avatar, Geographical Location, Annotated Research Notes.\n\nBrief Note”,”identifier”:”6653F39C-5452-4BB5-946D-1DBA4A0773D4″,”citationIdentifiers”:[]},”9C4CF54E-D399-44D4-AF5F-D90994A05E14″:{“entry”:””,”phrase”:”HTML”,”citationIdentifiers”:[],”identifier”:”9C4CF54E-D399-44D4-AF5F-D90994A05E14″,”urls”:[{“url”:””}]},”4B4104A7-36A5-402F-A655-4F6FDDAFD593″:{“tagIdentifier”:”document”,”phrase”:”Organizing in Author”,”urls”:[{“url”:””}],”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Organizing in Author.liquid”,”identifier”:”4B4104A7-36A5-402F-A655-4F6FDDAFD593″,”entry”:”It can be easy for me to get lost in the potential of XR versus traditional display and between Author and Reader. \nFirst of all, I’ll look at doing an annotated bibliography. \nOn the issue of ‘organizing’ in XR, we need to look at the different obvious categories of organizations we should consider. \nWhat is available for views. \n\nAuthor as HyperCards to be exported as linear when required. \nEach hypercard can connect and have smarts. \nUser will need to see the document they are writing and potential sources they can draw on. \n\nKey Terms\/Concepts: \nAnnotated Bibliography, XR Office, Spatial Hypertext, HyperCards, Semantic Encoding, Annotated Research Notes, Memory Palace, Spatial Manuscript, 3D Objects, Author, Reader, Connections, Framing, ViewSpecs, Annotated Bibliography Document. \n\nBrief Note”,”citationIdentifiers”:[]},”6910CFAF-2D24-4439-8C3A-A3BB32F92894″:{“phrase”:”augment”,”urls”:[{“url”:””}],”tagIdentifier”:”note”,”documentPath”:””,”identifier”:”6910CFAF-2D24-4439-8C3A-A3BB32F92894″,”entry”:””,”citationIdentifiers”:[]},”D0B0E9FA-4693-4E45-8390-FFF532674842″:{“entry”:””,”phrase”:”PDF”,”citationIdentifiers”:[],”identifier”:”D0B0E9FA-4693-4E45-8390-FFF532674842″,”urls”:[{“url”:””}]},”823CE86D-A9BA-4B20-AA21-CE6B19772496″:{“tagIdentifier”:”note”,”urls”:[{“url”:””}],”phrase”:”Interactions”,”documentPath”:””,”entry”:””,”identifier”:”823CE86D-A9BA-4B20-AA21-CE6B19772496″,”citationIdentifiers”:[]},”ECA0D760-1525-4784-ACFF-E68E17879757″:{“tagIdentifier”:”in progress”,”urls”:[{“url”:””}],”phrase”:”To Do”,”documentPath”:””,”identifier”:”ECA0D760-1525-4784-ACFF-E68E17879757″,”entry”:”To Do in the future “,”citationIdentifiers”:[]},”E2F4E57F-4CD6-4D79-9D32-2EFD1F4595AC”:{“phrase”:”Various Early Thoughts”,”tagIdentifier”:”label”,”urls”:[{“url”:””}],”documentPath”:””,”entry”:””,”identifier”:”E2F4E57F-4CD6-4D79-9D32-2EFD1F4595AC”,”citationIdentifiers”:[]},”A3637D18-C244-474C-B9AD-2FD9323385FC”:{“entry”:”Members include Frode Hegland, Dene Grigar, Fabien Benetou, Mark Anderson, Brandel Zachernuk, Peter Wasilko, Leon Van Kammen, Rob Swigart”,”phrase”:”Future Text Lab”,”citationIdentifiers”:[],”tagIdentifier”:”institution”,”identifier”:”A3637D18-C244-474C-B9AD-2FD9323385FC”,”urls”:[{“url”:””}]},”C65DFDE3-59C8-4594-B624-DD2167B56621″:{“phrase”:”Author”,”urls”:[{“url”:””}],”tagIdentifier”:”in progress”,”documentPath”:””,”entry”:”Software by the Augmented Text Company.”,”identifier”:”C65DFDE3-59C8-4594-B624-DD2167B56621″,”citationIdentifiers”:[]},”8626F536-0374-40C9-9219-409F0E10F418″:{“entry”:””,”phrase”:”Visual-Meta”,”citationIdentifiers”:[],”identifier”:”8626F536-0374-40C9-9219-409F0E10F418″,”urls”:[{“url”:””}]},”1743088A-4797-4522-96AD-76E216B363C6″:{“phrase”:”XR Knowledge Space Walkthrough”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/XR Knowledge Space Walkthrough.liquid”,”entry”:”Keywords: \n- PhD \n- Computer Science \n- Norway \n- Edgar \n- Emily \n- Henning \n- Dene \n- Fabien \n- Hypertext \n- History \n- Brain Science \n- Knowledge Space \n- Extended Cognitive Space \n- Knowledge Maps \n- Interactive Diagrams \n- Point of View \n- Connections \n- Left Hand \n- Right Hand \n- Wrist Sphere \n- Wrist Cube \n- Finger Menu \n- Citation Tree \n- Voice Assistant \n- Ring Context Menu \n- Pinch \n- Annotate \n- Cite \n- Concept Mapping \n- Saved Environments \n- Authoring Cube \n- Abstract \n- Hypertext Functions \n- References \n- Outline \n- Metadata \n- Unfolding \n- Knowledge Container \n- Interaction Tests \n- Layouts \n- Physical XR \n- Interactive Book \n- Future of Text\n\nTopics: \n- Knowledge Management \n- User Interaction \n- Augmented Reality \n- Virtual Reality \n- Interactive Authoring \n- Cognitive Enhancement \n- Information Visualization \n- Digital Writing \n- User Interface Design \n\nNames: \n- Edgar \n- Emily \n- Henning \n- Dene \n- Fabien \n\nSpecific Keywords: \n- Technical “,”identifier”:”1743088A-4797-4522-96AD-76E216B363C6″,”citationIdentifiers”:[]},”30F107C3-AED2-441D-9963-7E1B82134D15″:{“entry”:””,”phrase”:”Doug Engelbart”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”urls”:[{“url”:””}],”identifier”:”30F107C3-AED2-441D-9963-7E1B82134D15″},”084D188F-B628-42FA-906E-7E0A92FEFAB2″:{“phrase”:”Dimensions of Gestures in XR”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Dimensions of Gestures in XR.liquid”,”entry”:”Based on the text provided, here are the extracted keywords, topics, and names:\n\nKeywords\/Topics:\n1. The Future of Text\n2. Authoring\n3. Editing\n4. Unified Document\n5. Views and Presentations\n6. Posters\n7. Gestures in XR (Extended Reality)\n8. Grounded Gestures\n9. Time-Bound Gesture\n10. Gesture Grammar\n11. Wow Views\n12. Knowledge Objects\n13. Virtual Space\n14. Interaction Design\n15. Academic Document\n16. Metadata\n17. Knowledge Map\n18. Interactive Timeline\n19. Geographic View\n20. Virtual Locomotion\n21. Brain Implants\n22. Vision Pro\n23. Magic Document\n24. Reference Section\n25. Icon Design\n26. Glossary\n27. Video Presentation\n28. Toggle Views\n29. Interaction\n\nNames:\n- LI, HUANG, TIAN, WANG, DAI (authors mentioned in the context of gestures in XR)\n\nSpecific Keywords Based on Analysis:\n- Technical=”,”identifier”:”084D188F-B628-42FA-906E-7E0A92FEFAB2″,”citationIdentifiers”:[]},”C68EB944-E1B1-472F-BB3B-DC63A4A01344″:{“phrase”:”Invisible Tech Revolution – VR”,”urls”:[{“url”:””}],”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Invisible Tech Revolution – VR.liquid”,”entry”:”Attention is increasingly focused on AI, but similar attention should be given to Virtual Reality (VR) to unlock its potential in spatial knowledge environments. VR could offer deeper ways to interact with AI, enhancing learning and communication. Currently, VR is mainly viewed as a gaming and media tool, but it holds much more potential. Although high-end devices like the Apple Vision Pro are expensive, advancements in technology could make VR headsets commonplace within 10-15 years. The rich interaction possibilities in VR, such as spatial manipulation and immersive environments, are not yet fully realized but could significantly augment knowledge work. There is a risk that corporate interests might limit the innovation and data portability necessary for this potential. Therefore, it is crucial to support open systems to allow diverse VR development. The Future Text Lab, with support from the Alfred P. Sloan Foundation, is working on VR for reading and authoring, aiming to explore these possibilities further.\n \nKeywords:\n- Philosophical\n- XR\n- Virtual Reality\n- Augmented Reality\n- Future Text Lab\n- AI\n- Innovation\n- Knowledge Work\n- Open Systems\n- Data Portability\n- Sloan XR\n- Dene Grigar\n- Frode Hegland\n- Agreements”,”identifier”:”C68EB944-E1B1-472F-BB3B-DC63A4A01344″,”citationIdentifiers”:[]},”80E732B8-C608-4203-BA6C-E5FF4A2530B3″:{“phrase”:”Feb 2025 Focus for Reader prompt”,”tagIdentifier”:”document”,”urls”:[{“url”:””}],”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Feb 2025 Focus for Reader prompt.liquid”,”entry”:”AI, prompt, Reader, macOS, Augmented Text Company”,”identifier”:”80E732B8-C608-4203-BA6C-E5FF4A2530B3″,”citationIdentifiers”:[]},”9F4B61E9-143F-488C-B751-D8FD865E1505″:{“urls”:[{“url”:””}],”phrase”:”Goals”,”tagIdentifier”:”note”,”documentPath”:””,”entry”:””,”identifier”:”9F4B61E9-143F-488C-B751-D8FD865E1505″,”citationIdentifiers”:[]},”F7B45F85-44D5-4B53-AEFB-4021454557CC”:{“urls”:[{“url”:””}],”phrase”:”Authoring for Future of Text Vol 6″,”tagIdentifier”:”document”,”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/iCloud~com~liquid~Author\/Documents\/Authoring for Future of Text Vol 6.liquid”,”identifier”:”F7B45F85-44D5-4B53-AEFB-4021454557CC”,”entry”:”The text outlines the development and goals of a user guide for an XR authoring system called ‘Knowledge Space,’ aimed at facilitating intelligent interaction and document creation in The Future of Text Volume 6, with a focus on designing flexible, interactive, and richly connected reading and writing experiences in XR environments.\n\nThe text discusses the creation of a ‘chatty’ User Guide for an XR authoring system, emphasizing the need for a comprehensive guide and workflow to build and test, with the ultimate goal of enhancing intelligent dialogue through written word interaction. The guide describes the XR system, ‘Knowledge Space,’ supported by the Alfred P. Sloan Foundation, designed for WebXR-compatible headsets, allowing users to create, organize, and interact with knowledge objects in a virtual space. Key interactions are managed through hand gestures, and users can author content in an ‘Authoring Cube,’ which provides different aspects for writing, referencing, and concept mapping. The intention is to produce a connected and interactive ‘document of thoughts’ for The Future of Text Volume 6, encouraging contributors to provide abstracts, references, and media for integration into the XR environment. The system aims to offer a richer reading experience, enabling readers to explore content deeply and understand its connections to prior work.\n\nTechnical, XR Authoring, User Guide, Knowledge Space, WebXR, Extended Reality, Alfred P. Sloan Foundation, Authoring Cube, Interactive Document, The Future of Text, Intelligent Dialogue, Virtual Reality, Concept Mapping, BibTeX\n\nNames: Alfred P. Sloan Foundation\n\nTopics: XR Authoring, Knowledge Space, WebXR, Interactive Document Creation, Intelligent Dialogue, Concept Mapping, BibTeX Integration, The Future of Text Volume 6\n\nArchived”,”citationIdentifiers”:[]},”B2191093-7190-4849-940B-CA1AEE0A2EAF”:{“urls”:[{“url”:””}],”phrase”:”Annotations”,”tagIdentifier”:”note”,”documentPath”:””,”entry”:”Annotations, annotation, highlight, highlights, extracte”,”identifier”:”B2191093-7190-4849-940B-CA1AEE0A2EAF”,”citationIdentifiers”:[]},”489B6C59-36A9-4594-B2EC-D84EF42D6D71″:{“phrase”:”Reader User Guide 6.5″,”tagIdentifier”:”document”,”urls”:[{“url”:””}],”documentPath”:”\/Users\/frodehegland\/Library\/Mobile Documents\/com~apple~CloudDocs\/Augmented Text Co.\/Reader\/Reader macOS User Guides\/Reader User Guide 6.5.liquid”,”entry”:”Augmented Text Company Reader user guide.”,”identifier”:”489B6C59-36A9-4594-B2EC-D84EF42D6D71″,”citationIdentifiers”:[]},”9F3D4379-8D0F-4490-A62C-7099A1F46EE5″:{“tagIdentifier”:”note”,”urls”:[{“url”:””}],”phrase”:”Cognition”,”documentPath”:””,”identifier”:”9F3D4379-8D0F-4490-A62C-7099A1F46EE5″,”entry”:”Brain, cognition, cognitive, thinking, augmentation, intellectual augmentation”,”citationIdentifiers”:[]},”91BE57E9-DA18-4734-A1B4-90A27D48B84A”:{“entry”:””,”phrase”:”Dene Grigar”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”91BE57E9-DA18-4734-A1B4-90A27D48B84A”,”urls”:[{“url”:””}]},”B2D4C001-0514-49F6-BE05-12CDBBD16102″:{“phrase”:”Visual Explorations”,”urls”:[{“url”:””}],”tagIdentifier”:”label”,”documentPath”:””,”identifier”:”B2D4C001-0514-49F6-BE05-12CDBBD16102″,”entry”:””,”citationIdentifiers”:[]},”6ED1B080-9F76-4BCA-AFD8-112580042D5F”:{“urls”:[{“url”:””}],”tagIdentifier”:”person”,”phrase”:”Bob Stein”,”documentPath”:””,”entry”:””,”identifier”:”6ED1B080-9F76-4BCA-AFD8-112580042D5F”,”citationIdentifiers”:[]},”014091C1-C948-422A-99BD-B38625A6678D”:{“entry”:””,”phrase”:”Jack Park”,”citationIdentifiers”:[],”tagIdentifier”:”person”,”identifier”:”014091C1-C948-422A-99BD-B38625A6678D”,”urls”:[{“url”:””}]}}},”Map”:{“connections”:[],”settings”:{“shouldShowHiddenNodes”:false,”nodeVisibility”:”showAll”,”hiddenTags”:[]},”layout”:{“nodePositions”:{“1912D88E-252A-4CDA-B163-E93F500E35C3”:{“y”:-195.59531250000001,”x”:-285.77734375},”C1A0643E-084F-46B4-8C25-A8A8FE9C45CB”:{“y”:-521.8125,”x”:-724.71484375},”48C7278B-EA99-4386-96A5-4B487081F8C6″:{“y”:-240.45506652029712,”x”:1113.5},”4DFA3568-35A2-446B-8DAE-4796263B1805″:{“y”:-277.93387754909298,”x”:-1126.5},”17594465-F578-466C-9344-BE4748C331D1″:{“y”:-134.52734375,”x”:-222.96484375},”9FA70A41-B2CD-47B9-B467-3C7F1F246624″:{“y”:-131.41796875,”x”:49.9921875},”35EC3ABD-45F3-42F6-B72D-8ACA8120381F”:{“y”:693.203125,”x”:-812.078125},”3C773CE1-CC7F-46E2-8C70-0F12CC69E8E9″:{“y”:-618.72265625,”x”:595.3828125},”9B60C524-9E6B-44E9-9349-406C82C0A64E”:{“y”:7.6484375,”x”:-548.66796875},”0790ABB6-498B-4770-AFB4-D357D533A8C0″:{“y”:-326.46875,”x”:28.90625},”592D7A0D-1346-4D3A-A96B-026F632F8FD3″:{“y”:-9.6991300201167121,”x”:148.93359375},”F97814A8-AAC8-4069-8E99-264E20F5EA30″:{“y”:-216.2890625,”x”:612.7890625},”BFF687CE-7A61-4F70-B18C-9E3316B34B8A”:{“y”:-565.28255208333326,”x”:-601.71875},”2F6607F0-18A5-40BD-9C04-3BB9D1B13901″:{“y”:-565.19140624999989,”x”:-778.91015625},”693382CA-C247-4E58-93C7-1F65724B5FF1″:{“y”:-273.8125,”x”:-363.58203125},”85627362-F096-47C1-B1E9-2E5621BA1ECA”:{“y”:-364.76718749999998,”x”:-384.71875},”B493FC9E-30B4-4C11-9A03-C3FEF0ABAA0F”:{“y”:11.613627704405573,”x”:-1119},”FE635BB3-9703-4E31-9722-D776F0287EE6″:{“y”:-665.67592086131947,”x”:-434.27155463385907},”72FFAAA3-23CA-4ECC-90E2-8B87A0EECC7C”:{“y”:-349.2074053006715,”x”:395.74506454106086},”0C6395F0-39E3-4D3A-B20E-C323E593FE7A”:{“y”:233.8626058947105,”x”:-740.44921875},”8B77E126-B343-4856-AB5B-ADFFB584F79A”:{“y”:-239.32822470398332,”x”:438},”E1EA557F-8F5B-4923-B177-162637293A22″:{“y”:90.54541015625,”x”:-660.6328125},”4F9AE243-974F-4ABB-AD1E-E74846397617″:{“y”:615,”x”:715.828125},”742BAD9E-EB89-4FF1-BCEC-A09D26518170″:{“y”:-169.58984375,”x”:-201.0390625},”54EFB8C5-E6C4-4986-AF9E-552EC79F2E4F”:{“y”:-387.91015625,”x”:-548.3515625},”AA0F8F0C-2CB8-4CFE-A7CC-2B0E8099F59F”:{“y”:-456.1953125,”x”:369.7265625},”10570052-762F-4AEE-B399-DC32269CCB4A”:{“y”:-455.41748046875,”x”:-369.54296875},”1DE035AD-B309-44BD-826F-684325B3E48C”:{“y”:-649.37109375,”x”:116.38671875},”BF9C84CB-3342-460A-9BD8-8EF7AEB0F781″:{“y”:-531.94755584292284,”x”:885},”2B21F792-183B-4794-8515-B81103C66E8E”:{“y”:-502.05473950696501,”x”:885.5},”80C5601A-3DB7-4C4E-8C78-5E049970F08B”:{“y”:-252.28749999999999,”x”:-397.53125},”E6D81C20-7586-4FB9-B076-B8B258902721″:{“y”:-429.89515904017856,”x”:-559.3828125},”58964B2C-9665-413F-AABD-8A3CE63E2150″:{“y”:-297.03515625,”x”:26.73046875},”CBDF999B-2DFA-4C83-9ED0-1EAD35A07D29″:{“y”:354.6487161939084,”x”:910.890625},”D4A80089-0FFE-4702-8F8B-40C952D3D612″:{“y”:346.00669878588189,”x”:788.44140625},”46BDFEC5-DCE2-4363-B008-F360C21AB0FF”:{“y”:-429.89515904017856,”x”:-338.5},”0751D56D-2203-43E8-B406-8C659B85AA00″:{“y”:-48.3828125,”x”:-403.71875},”D0A778BE-F14B-49A2-9544-DD859B5A0A2D”:{“y”:-186.88671875,”x”:-1139.3046875},”890CC805-5A4F-4030-9CAF-CF1BF651B435″:{“y”:89.112032048842224,”x”:1102.5},”92B01EC8-A48D-4957-9174-BD8EC8579E49″:{“y”:-163.86458333333331,”x”:-576.296875},”38866374-1BE6-4671-82FF-AC7C1EB9DC24″:{“y”:688.41974795649912,”x”:36.904134056779526},”B8063329-CFB3-4AC9-B7B2-7EDAB3803E28″:{“y”:-478.71875,”x”:716.37890625},”2C1A5B3A-C752-4001-AA77-8E8712B71E9A”:{“y”:-164.40656312785507,”x”:449},”494B4357-CB81-404F-B1B9-A6B2AF052486″:{“y”:-395.08203125,”x”:-672.44140625},”89DE9ED2-B079-40AE-950D-94350745465A”:{“y”:-196.06406250000001,”x”:-483.30859375},”8CC9F6C4-3A4E-44A9-9615-EE53C80F7B0B”:{“y”:-429.24609375,”x”:-827.90234375},”3039B8BD-0592-4DB2-BA3B-FE763B3DB18D”:{“y”:-442.41796875,”x”:-349.1953125},”28F78E71-9524-4F75-B01C-765D884BE30B”:{“y”:-455.41748046875,”x”:-310.90625},”C831C616-F58A-4087-BE6B-417657EF11C2″:{“y”:-275.953125,”x”:-817.0234375},”EBD599EB-A326-4EB3-88DE-87B7185AA6F8″:{“y”:-163.86458333333331,”x”:-491.05859375},”B783BE11-4B4B-43AE-A9E8-C1B805275A27″:{“y”:-163.86458333333331,”x”:-692.8125},”74B9950C-1222-4BE8-BE88-E57A4B74F6B4″:{“y”:600.78515625,”x”:759.47265625},”BC20EBCA-A66A-41BB-87A3-BD8B345D9A40″:{“y”:-591.04236851149346,”x”:-81.446886639754041},”C144D7E7-BDF3-495D-B58C-182CBBAA887F”:{“y”:-349.6641184426216,”x”:182.88231720280214},”FD0A8B48-FF0D-49C1-B7F3-C09418674583″:{“y”:-454.29439733634172,”x”:-15.828125},”A04FC419-2BF9-43D8-A783-3AD74D161BEF”:{“y”:-545.45548859682117,”x”:421.28125},”EF496879-3271-43CB-8B04-66047DBA5856″:{“y”:-472.16192317100717,”x”:859.5},”75D4616D-92E1-414E-BEFD-FB9FA1E6C3F7″:{“y”:36.7109375,”x”:282.6171875},”699A6FE5-B77F-4C21-A8A5-748351280264″:{“y”:58.46484375,”x”:-480.60546875},”795587DC-84A5-4037-8126-9AA270BAAAE3″:{“y”:-627.18359375,”x”:-344.34765625},”AB2C67CE-B179-42E3-BA4E-F539BE960AE6″:{“y”:-364.76718749999998,”x”:-730.18359375},”AE6A6998-AFD5-4069-B29E-D68221D86F6A”:{“y”:586.22265625,”x”:839.3046875},”189897A6-D44F-4BB7-A32E-948CFD7AAFD5″:{“y”:59.535358440350024,”x”:-248.8828125},”58DB00D7-865F-4BD6-8002-67962B5DFE00″:{“y”:-429.89515904017856,”x”:-607.75},”74EB3BE4-D89C-48E4-815D-37A9617F65BF”:{“y”:-440.04296875,”x”:-502.41796875},”BD9F51B9-9EFE-4651-B551-F491B4558B31″:{“y”:-194.18125000000001,”x”:-387.48828125},”B6CDB5CA-6654-4868-AF5D-5A4BFE9AC22A”:{“y”:-314.7890625,”x”:-667.33203125},”2A844FE8-2E27-4AAE-9248-D4A464CDAFFD”:{“y”:692.62890625,”x”:-742.5},”5AF9FDF2-3670-4022-875B-F36CB58AB3FB”:{“y”:-575.26090441980102,”x”:447.78125},”EBE798DD-F20D-4E4E-9EB4-C19161A7DC0A”:{“y”:-625.54296875,”x”:415.93359375},”50F08F51-8339-45D3-85BE-A81E796AA795″:{“y”:-565.46484375,”x”:-476.53515625},”9A78193C-AC2B-4EBC-ACB3-447D252F4EC8″:{“y”:0,”x”:0},”49E1E98B-A98D-4BC4-9BDB-BE587A82B42C”:{“y”:-509.57188175189179,”x”:-1159},”11667864-E60F-4726-AD13-7D77443C948A”:{“y”:-206.34375,”x”:-1237.5703125},”3D765011-2AD6-44A8-AF2E-D3A09273EE67″:{“y”:-252.28749999999999,”x”:-555.91796875},”B917D118-5836-4033-82ED-4FD70F74DF8E”:{“y”:292.50969165773415,”x”:790.19407799719966},”97DA5EFA-E179-4D1E-B906-E00EB69AA320″:{“y”:128.28515625,”x”:1108.7890625},”1404D553-CBFB-4B29-A8AB-C2A0A3F579A9″:{“y”:-565.37369791666663,”x”:-734.71875},”D6CAC6E2-449D-45BB-BC75-035387B9B3B7″:{“y”:454.03125,”x”:144.8359375},”A9012DCC-8425-4566-983A-ADFF811F0B6C”:{“y”:-196.06406250000001,”x”:-644.26171875},”8E391EEB-2D80-4A8F-9D1B-B2975C5F4E8E”:{“y”:-455.41748046875,”x”:-637.578125},”26C1D942-B62B-4279-92CF-3BE5856FC16E”:{“y”:-475.9140625,”x”:-308.7578125},”23717D11-05DA-404C-833B-C4D4EEC9CD27″:{“y”:616.35296262785539,”x”:1080.53515625},”5271A592-FD28-4A97-BE49-87B0DE240212″:{“y”:693.5,”x”:-877.04296875},”AFD3BEF8-7D10-499A-BA84-B63219826A55″:{“y”:-52.13101019507463,”x”:1109.5},”6005FF0E-58A1-44E5-B2B6-CE0196D4F619″:{“y”:618.90234375,”x”:845.7265625},”B3617A7C-2A5B-4E18-A7FE-99E9DB098240″:{“y”:393.7923570625909,”x”:916.62663700808014},”8121F87D-A3DA-4B21-8A09-79610729701E”:{“y”:-193.3740524389915,”x”:1129},”EED6613A-D24A-4B88-BEFB-3BEEEDD32D23″:{“y”:320.7734375,”x”:-392.0703125},”9CD74305-2283-4DD0-9AE2-1E67029C2045″:{“y”:371.99762527774658,”x”:-499.38587587584868},”2BDC4008-A162-4844-B66B-917046AD5E1A”:{“y”:-455.41748046875,”x”:-533.25},”76C0B6B6-875D-41F3-BC48-57264889A919″:{“y”:-195.07578125000001,”x”:-434.4765625},”9F906D86-6952-4AA4-83F6-81126A619575″:{“y”:-269.62890625,”x”:-882.1875},”70554F0F-92D9-4FE6-A916-CAC598FAE5B6″:{“y”:-429.89515904017856,”x”:-411.23828125},”883C92FD-CD18-4EEE-8235-11309CECB7AF”:{“y”:-162.11487544769358,”x”:-1130.5},”E94D61B5-E30A-487A-BE7A-F705CC89F440″:{“y”:-55.515625,”x”:47.47265625},”FF7EB6C7-9FBD-4E39-B2E0-9D4A3C204982″:{“y”:3.08203125,”x”:-34.8125},”DB4D51DC-5679-459A-BCB4-5B84C35A45EB”:{“y”:-591.7331885148385,”x”:818},”B19187C4-C1D5-4212-8D9D-761CF775D4AB”:{“y”:-364.76718749999998,”x”:-827.5},”41C49110-060B-44B7-9624-1BE7653FB14A”:{“y”:147.046875,”x”:-843.1015625},”43F22978-F312-4294-BC8E-7103EE9BDDCD”:{“y”:-287.67930823093343,”x”:57.82421875},”E7592DF6-7394-4047-AAB1-8E4EDF9942B4″:{“y”:-252.28749999999999,”x”:-483.921875},”3DDB45C2-B97A-4309-8B31-22359118A19C”:{“y”:-521.32421875,”x”:754.76953125},”36CCCE35-0337-4BBC-946D-394AC1753C2A”:{“y”:-429.89515904017856,”x”:-672.859375},”928315D7-D4C5-4817-8254-9CAAC807D5AD”:{“y”:-504.35546875,”x”:-865.55078125},”D76988BB-6379-470B-A5FB-8157E403441F”:{“y”:-574.3828125,”x”:-583.640625},”42214B85-8C5D-4341-9285-ED03157D8DBB”:{“y”:-521.68359375,”x”:1054.53125},”A63D74D4-DCBC-4D27-9F7B-541994AECBF8″:{“y”:-565.84765625,”x”:-590.8125},”20FE51BB-6385-4313-BB6B-10B489693828″:{“y”:-634.71484375,”x”:785.62890625},”36A90E72-50FD-419D-B095-5B604E78E654″:{“y”:-5.0499961137690121,”x”:1111.5},”78C4B89B-3A13-4060-99C9-4EAD434BE5D4″:{“y”:-625.39088385329126,”x”:-1190.5},”CA166148-454F-4B31-9087-7779BE4D6EF9″:{“y”:589.77734375,”x”:549.1171875},”EED92087-48B9-431F-9488-48D4D2509BDD”:{“y”:-364.76718749999998,”x”:-604.6015625},”84DF5CB1-8BEE-4DD3-9587-619F5742CE71″:{“y”:-211.99609375,”x”:924.48046875},”AA1873CD-B4AB-4D44-90D8-9F65CBCFDEFB”:{“y”:-287.53608060160275,”x”:1118.5},”F9291D9D-F96A-44BD-AA3F-20768F792586″:{“y”:69.52312875510529,”x”:-1147.5},”C8CD5D71-7CB2-4CA6-BD75-54F4F012D38B”:{“y”:42.031017967536606,”x”:1103},”ABD5C3C3-F960-4DDA-9039-4AA274946F65″:{“y”:-302.3759765625,”x”:-654.046875},”EB6B5BA5-C3BD-4835-971F-13D53BCEAD14″:{“y”:620.7109375,”x”:572.3203125},”3A13D918-523A-45E8-A629-2A9198ABE1AB”:{“y”:-504.92578125,”x”:-796.53125},”4F45F9CB-CE66-4825-B658-6916EDCFDB0F”:{“y”:-685.3046875,”x”:700.4921875},”F0D56176-2180-4474-ACD6-2F42E11FEC44″:{“y”:4.6481173042026853,”x”:-795.3671875},”2526C1F6-CD3C-4347-A4E5-3D189D7A6C8A”:{“y”:-14.56323997559852,”x”:414.5},”CC2631DB-7B51-4BA5-B36F-7BC189E8E5E8″:{“y”:-526.82421874999989,”x”:-428.7734375},”67B9BAED-C085-436D-89F9-7147E2F100F4″:{“y”:690.66370940587717,”x”:141.48689966026791},”F6850AC6-DD9B-4C30-B6B7-2E46920A468E”:{“y”:200.938745459556,”x”:-853.62890625},”8861C148-A9CF-4E0B-A8CA-A67033E0C4BF”:{“y”:-281.41015625,”x”:-230.77734375},”F6F33E1A-DD36-47BF-80A5-6ACF10BFCC3E”:{“y”:-220.02437649839328,”x”:-1126},”C6C43F23-65E3-46FF-82C1-63BCB2740A61″:{“y”:-46.295873346294144,”x”:-1124.5},”4DEAEFE2-A7F7-4B76-B391-4939E5786F4B”:{“y”:-451.66238070119209,”x”:-1122.5},”AE11978E-3783-4333-8965-BDFCB609D81D”:{“y”:50.965087890625,”x”:-653.53125},”83F44B66-6565-48F0-9677-9657E228E5D7″:{“y”:-90.65234375,”x”:-1218.5390625},”8D278E28-758A-45CA-8E4D-EBBD8711DD2B”:{“y”:-519.37890625,”x”:523.046875},”294895B6-4F2A-4EEE-93F6-770DF9BFF485″:{“y”:-490.83526769175023,”x”:87.5546875},”9CC23C69-1BA2-486C-8E04-2F89F45274BD”:{“y”:-335.84337859979269,”x”:-1189},”5F348810-D413-4387-A038-FF481FA0E6EA”:{“y”:-28.890625,”x”:-798.72265625},”3184136B-7DE9-4E0C-AA40-2D3995DD1EC0″:{“y”:-393.75287965049239,”x”:-1143.5},”B10121B8-3580-4C0A-93A5-DC04BE024241″:{“y”:-381.698108764214,”x”:1117.5},”E251DF31-D589-4AE5-BBA6-028FD41FB0E0″:{“y”:-47.662782263455085,”x”:-210.52865612300843},”AA209C4F-8D3B-4DC7-8D9D-099C5FB5E377″:{“y”:-528.67057291666663,”x”:-681.51953125},”A061DE71-A13E-4045-8759-13BEEF66F226″:{“y”:113.90685657105007,”x”:-246.25},”24C33A05-6E80-4B71-A1CD-DCB1EF8204B7″:{“y”:-252.28749999999999,”x”:-707.59375},”B811F27E-D675-4566-B7E6-CE680DFC266C”:{“y”:-429.89515904017856,”x”:-782.1875},”C39F3AF8-98E4-48BC-8236-8DB730404801″:{“y”:-390.08122641144803,”x”:270.30778927413053},”C4EE6197-3D99-474F-82C3-9ECEC4C1A1DB”:{“y”:-302.3759765625,”x”:-764.27734375},”6CD5ADB9-E973-4BB1-BF28-C0ADEE83C61E”:{“y”:-302.3759765625,”x”:-552.53515625},”3FB1F3D6-FB3A-4B52-9628-E15F303F4C18″:{“y”:-146.29303835768587,”x”:1127.5},”74709DDA-0F90-4531-A643-1FF67403C903″:{“y”:-455.41748046875,”x”:-587.08984375},”E418435C-7D63-4520-91BB-B9E890638323″:{“y”:-99.212024276380248,”x”:1090.5},”079089B7-AEC2-4136-99DE-26DE2BF8D289″:{“y”:-48.009363082116238,”x”:-810.23487020569178},”3A49DC5C-2D4A-43BC-B5E7-6A3ECE7C197D”:{“y”:1.0390625,”x”:582.109375},”53D35F71-91C8-4404-B8F1-E63A97DFA100″:{“y”:86.721107505700047,”x”:-246.69140625},”DB6654B7-3CE6-4954-8CE6-A0CB31DECE0D”:{“y”:-527.74739583333326,”x”:-551.890625},”00990C0A-A5F5-4A53-B6A3-9B5FAB766420″:{“y”:-311.1640625,”x”:-347.38671875},”FCF15E78-97E9-45B2-BE54-19ADC5089794″:{“y”:127.43262980580501,”x”:-1134},”FBF9F6E3-BEB1-432D-9E40-DFAA64DF5116″:{“y”:-89.484901551726793,”x”:416.5},”21356360-F805-4CBC-839F-C48BF1DCDF6D”:{“y”:-334.61709468290837,”x”:1119.5},”80B32BBD-5BCE-4731-B45D-4C0F78D5A994″:{“y”:-626.3029192106178,”x”:-38.360914499699902},”B81D0B14-3CCB-45F0-ADF1-E071D89163FC”:{“y”:-455.41748046875,”x”:-473.86328125},”3BD5F848-DE80-47F1-A29A-B2EF1492CF07″:{“y”:-252.42421874999999,”x”:-611.5078125},”E6C92F73-6BF4-47F8-B986-8B12CDD06375″:{“y”:-609.18359375,”x”:1054.71484375},”0CCA7A6E-3515-423A-BC41-07D618D5ED6E”:{“y”:-455.41748046875,”x”:-419.46484375},”D9D73D24-1301-4C84-8ACF-42C2D609DB92″:{“y”:-428.77912284551962,”x”:1110.5},”19228E18-66AC-43DC-88AC-6113D0F65D22″:{“y”:241.26322086821153,”x”:-947.75987787812198},”E1FD6AB3-4FAD-4565-B25E-155E43A0C265″:{“y”:424.82826820686864,”x”:828.29296875},”F187D161-49D4-4F4A-BA4A-533E1317CF66″:{“y”:-206.1875,”x”:-398.03125},”79E83F27-41AF-493F-86D9-55AC1D387F4E”:{“y”:-81.87109375,”x”:-507.57421875},”EE55509C-CC21-435A-9AB0-FDF93911A523″:{“y”:-529.59375,”x”:-798.703125},”893E96BB-C0A3-43C5-A4E8-A9E131E5DDC6″:{“y”:-104.20537439699386,”x”:-1171.5},”C088A01A-3819-4378-AA21-DB39C04A92A9″:{“y”:-429.89515904017856,”x”:-499.4921875},”9CB8E99B-ABC1-48CC-8ECB-53E6FAAA8AD7″:{“y”:-567.4813828025915,”x”:-1203},”8A110A0E-5171-49ED-96E2-68E94A567A76″:{“y”:-628.60546875,”x”:-605.0859375},”DFD3B33C-BE09-4A4E-A903-EA6FB844D33E”:{“y”:-391.3359375,”x”:-1115.8203125},”F6E18EF8-1055-4894-8EAC-44D2C6EF989D”:{“y”:32.349609375,”x”:-245.9609375},”14C6BEC9-B2C3-4127-B555-4DEDACD97B72″:{“y”:-561.84037217888067,”x”:876},”A7054ED3-5CCA-412B-9B36-F0162206B5AB”:{“y”:-455.81201171875,”x”:-754.078125},”10E83E8D-BBD1-4A8A-87E9-4728C46FBC64″:{“y”:382.45793840440177,”x”:779.20703125},”74CBAC14-718B-46B8-8959-2430FFC63A4F”:{“y”:-52.58984375,”x”:-299.05078125},”16BC8E05-9A84-4DEB-97C8-D95F157308CA”:{“y”:-134.7734375,”x”:-171.55078125},”CF23B6A8-05DA-4651-B436-40D26140B263″:{“y”:15.757201765019417,”x”:94.2265625},”66B4A894-696A-49D4-813A-1B995B83E6DB”:{“y”:-334.63671875,”x”:-1217.83984375},”18774C28-1F1A-42ED-9074-BC9F0B660E91″:{“y”:-364.76718749999998,”x”:-493.80078125},”1D0F307B-F734-423A-9A2C-D2746FE17D8C”:{“y”:-302.3759765625,”x”:-592.91015625},”EBBEB19B-E670-476E-BC77-26962F760F85″:{“y”:-544.28168682189221,”x”:-94.268656908256617}}},”nodes”:[{“isHidden”:false,”webLinkPath”:”https:\/\/futuretextlab.info”,”title”:”futuretextlab.info”,”identifier”:”6005FF0E-58A1-44E5-B2B6-CE0196D4F619″,”isStruckthrough”:false,”type”:”webLink”,”definition”:””},{“title”:”AI”,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”46BDFEC5-DCE2-4363-B008-F360C21AB0FF”},{“title”:”using”,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”18774C28-1F1A-42ED-9074-BC9F0B660E91″},{“title”:”Making Text More Visually Parseable”,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”C6C43F23-65E3-46FF-82C1-63BCB2740A61″},{“title”:”Author localisation volunters”,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”5AF9FDF2-3670-4022-875B-F36CB58AB3FB”},{“title”:”Annotations”,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”F6E18EF8-1055-4894-8EAC-44D2C6EF989D”},{“title”:”Authoring for The Future of Text VI in XR”,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”2C1A5B3A-C752-4001-AA77-8E8712B71E9A”},{“title”:”Augmented Text Company”,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”4F45F9CB-CE66-4825-B658-6916EDCFDB0F”},{“title”:”and”,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”1D0F307B-F734-423A-9A2C-D2746FE17D8C”},{“title”:”Underlying all of this work is research into “,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”A9012DCC-8425-4566-983A-ADFF811F0B6C”},{“title”:”AI Overview Fedb 2025 prompt for reader”,”isHidden”:false,”type”:”text”,”isStruckthrough”:false,”identifier”:”BF9C84CB-3342-460A-9BD8-8EF7AEB0F781″},{“title”:”Dene Grigar”,”isHidden”:false,”identifier”:”D9D73D24-1301-4C84-8ACF-42C2D609DB92″,”type”:”text”,”isStruckthrough”:false},{“title”:”Code Snippets”,”isHidden”:false,”identifier”:”AE11978E-3783-4333-8965-BDFCB609D81D”,”type”:”text”,”isStruckthrough”:false},{“title”:”Doug Engelbart”,”isHidden”:false,”identifier”:”10E83E8D-BBD1-4A8A-87E9-4728C46FBC64″,”type”:”text”,”isStruckthrough”:false},{“title”:”Demo for Authoring in XR for FoT 6″,”isHidden”:false,”identifier”:”4DFA3568-35A2-446B-8DAE-4796263B1805″,”type”:”text”,”isStruckthrough”:false},{“title”:”‘Binding’ Mechanism”,”isHidden”:false,”identifier”:”F0D56176-2180-4474-ACD6-2F42E11FEC44″,”type”:”text”,”isStruckthrough”:false},{“title”:”Cognition”,”isHidden”:false,”identifier”:”89DE9ED2-B079-40AE-950D-94350745465A”,”type”:”text”,”isStruckthrough”:false},{“title”:”JSON”,”isHidden”:false,”identifier”:”2A844FE8-2E27-4AAE-9248-D4A464CDAFFD”,”type”:”text”,”isStruckthrough”:false},{“title”:”The use case is”,”isHidden”:false,”identifier”:”B19187C4-C1D5-4212-8D9D-761CF775D4AB”,”type”:”text”,”isStruckthrough”:false},{“title”:”Goals”,”isHidden”:false,”identifier”:”1404D553-CBFB-4B29-A8AB-C2A0A3F579A9″,”type”:”text”,”isStruckthrough”:false},{“title”:”Bob Horn”,”isHidden”:false,”identifier”:”890CC805-5A4F-4030-9CAF-CF1BF651B435″,”type”:”text”,”isStruckthrough”:false},{“title”:”Foundational”,”isHidden”:false,”identifier”:”78C4B89B-3A13-4060-99C9-4EAD434BE5D4″,”type”:”text”,”isStruckthrough”:false},{“title”:”Invisible Tech Revolution – VR”,”isHidden”:false,”identifier”:”FBF9F6E3-BEB1-432D-9E40-DFAA64DF5116″,”type”:”text”,”isStruckthrough”:false},{“title”:”Ismail Serageldin”,”isHidden”:false,”identifier”:”CBDF999B-2DFA-4C83-9ED0-1EAD35A07D29″,”type”:”text”,”isStruckthrough”:false},{“title”:”are to augment academic authoring in”,”isHidden”:false,”identifier”:”BFF687CE-7A61-4F70-B18C-9E3316B34B8A”,”type”:”text”,”isStruckthrough”:false},{“title”:”Edgar Hegland”,”isHidden”:false,”identifier”:”23717D11-05DA-404C-833B-C4D4EEC9CD27″,”type”:”text”,”isStruckthrough”:false},{“title”:”Organizing in XR”,”isHidden”:false,”identifier”:”294895B6-4F2A-4EEE-93F6-770DF9BFF485″,”type”:”text”,”isStruckthrough”:false},{“title”:”Frames”,”isHidden”:false,”identifier”:”EBBEB19B-E670-476E-BC77-26962F760F85″,”type”:”text”,”isStruckthrough”:false},{“title”:”Ge Li notes from 26 March meeting”,”isHidden”:false,”identifier”:”592D7A0D-1346-4D3A-A96B-026F632F8FD3″,”type”:”text”,”isStruckthrough”:false},{“title”:”Gestures”,”isHidden”:false,”identifier”:”C088A01A-3819-4378-AA21-DB39C04A92A9″,”type”:”text”,”isStruckthrough”:false},{“title”:”one’s “,”isHidden”:false,”identifier”:”B81D0B14-3CCB-45F0-ADF1-E071D89163FC”,”type”:”text”,”isStruckthrough”:false},{“title”:”The core function we aim to “,”isHidden”:false,”identifier”:”A7054ED3-5CCA-412B-9B36-F0162206B5AB”,”type”:”text”,”isStruckthrough”:false},{“title”:”Agenda for 16th of June – Halfway Presentation”,”isHidden”:false,”identifier”:”80B32BBD-5BCE-4731-B45D-4C0F78D5A994″,”type”:”text”,”isStruckthrough”:false},{“title”:”Philosophical”,”isHidden”:false,”identifier”:”E7592DF6-7394-4047-AAB1-8E4EDF9942B4″,”type”:”text”,”isStruckthrough”:false},{“title”:”XR”,”isHidden”:false,”identifier”:”50F08F51-8339-45D3-85BE-A81E796AA795″,”type”:”text”,”isStruckthrough”:false},{“title”:”and”,”isHidden”:false,”identifier”:”10570052-762F-4AEE-B399-DC32269CCB4A”,”type”:”text”,”isStruckthrough”:false},{“title”:”Batch Processing import of PDF to Reader”,”isHidden”:false,”identifier”:”2B21F792-183B-4794-8515-B81103C66E8E”,”type”:”text”,”isStruckthrough”:false},{“title”:”Frode Hegland”,”isHidden”:false,”identifier”:”B10121B8-3580-4C0A-93A5-DC04BE024241″,”type”:”text”,”isStruckthrough”:false},{“title”:”with a basis of thinking around “,”isHidden”:false,”identifier”:”DB6654B7-3CE6-4954-8CE6-A0CB31DECE0D”,”type”:”text”,”isStruckthrough”:false},{“title”:”Navigation”,”isHidden”:false,”identifier”:”36CCCE35-0337-4BBC-946D-394AC1753C2A”,”type”:”text”,”isStruckthrough”:false},{“title”:”AI Summary of First Tapestry-Knowledge Space meeting”,”isHidden”:false,”identifier”:”C39F3AF8-98E4-48BC-8236-8DB730404801″,”type”:”text”,”isStruckthrough”:false},{“title”:”augment”,”isHidden”:false,”identifier”:”8E391EEB-2D80-4A8F-9D1B-B2975C5F4E8E”,”type”:”text”,”isStruckthrough”:false},{“title”:”Organize”,”isHidden”:false,”identifier”:”2BDC4008-A162-4844-B66B-917046AD5E1A”,”type”:”text”,”isStruckthrough”:false},{“title”:”final-narrative-sloan-revised2″,”isHidden”:false,”identifier”:”F9291D9D-F96A-44BD-AA3F-20768F792586″,”type”:”text”,”isStruckthrough”:false},{“title”:”Leon Van Kammen”,”isHidden”:false,”identifier”:”3FB1F3D6-FB3A-4B52-9628-E15F303F4C18″,”type”:”text”,”isStruckthrough”:false},{“title”:”and”,”isHidden”:false,”identifier”:”3D765011-2AD6-44A8-AF2E-D3A09273EE67″,”type”:”text”,”isStruckthrough”:false},{“title”:”Various Early Thoughts”,”isHidden”:false,”identifier”:”49E1E98B-A98D-4BC4-9BDB-BE587A82B42C”,”type”:”text”,”isStruckthrough”:false},{“title”:”Decisions”,”isHidden”:false,”identifier”:”9CB8E99B-ABC1-48CC-8ECB-53E6FAAA8AD7″,”type”:”text”,”isStruckthrough”:false},{“title”:”Visual-Meta”,”isHidden”:false,”identifier”:”92B01EC8-A48D-4957-9174-BD8EC8579E49″,”type”:”text”,”isStruckthrough”:false},{“title”:”Author”,”isHidden”:false,”identifier”:”EBE798DD-F20D-4E4E-9EB4-C19161A7DC0A”,”type”:”text”,”isStruckthrough”:false},{“title”:”Authoring”,”isHidden”:false,”identifier”:”53D35F71-91C8-4404-B8F1-E63A97DFA100″,”type”:”text”,”isStruckthrough”:false},{“title”:”Sloan XR”,”isHidden”:false,”identifier”:”8A110A0E-5171-49ED-96E2-68E94A567A76″,”type”:”text”,”isStruckthrough”:false},{“title”:”Authoring”,”isHidden”:false,”identifier”:”AB2C67CE-B179-42E3-BA4E-F539BE960AE6″,”type”:”text”,”isStruckthrough”:false},{“title”:”HTML”,”isHidden”:false,”identifier”:”5271A592-FD28-4A97-BE49-87B0DE240212″,”type”:”text”,”isStruckthrough”:false},{“title”:”and”,”isHidden”:false,”identifier”:”928315D7-D4C5-4817-8254-9CAAC807D5AD”,”type”:”text”,”isStruckthrough”:false},{“title”:”layout”,”isHidden”:false,”identifier”:”E6D81C20-7586-4FB9-B076-B8B258902721″,”type”:”text”,”isStruckthrough”:false},{“title”:”Authoring for Future of Text Vol 6″,”isHidden”:false,”identifier”:”FCF15E78-97E9-45B2-BE54-19ADC5089794″,”type”:”text”,”isStruckthrough”:false},{“title”:”Visual Explorations”,”isHidden”:false,”identifier”:”893E96BB-C0A3-43C5-A4E8-A9E131E5DDC6″,”type”:”text”,”isStruckthrough”:false},{“isHidden”:false,”webLinkPath”:”https:\/\/thefutureoftext.org\/symposium\/”,”title”:”thefutureoftext.org\/symposium”,”identifier”:”EB6B5BA5-C3BD-4835-971F-13D53BCEAD14″,”isStruckthrough”:false,”type”:”webLink”,”definition”:””},{“title”:”Reader User Guide 6.5″,”isHidden”:false,”identifier”:”DB4D51DC-5679-459A-BCB4-5B84C35A45EB”,”type”:”text”,”isStruckthrough”:false},{“title”:”Knowledge Object Interactions”,”isHidden”:false,”identifier”:”3184136B-7DE9-4E0C-AA40-2D3995DD1EC0″,”type”:”text”,”isStruckthrough”:false},{“title”:”Annotated Bibliography”,”isHidden”:false,”identifier”:”85627362-F096-47C1-B1E9-2E5621BA1ECA”,”type”:”text”,”isStruckthrough”:false},{“title”:”Interactions”,”isHidden”:false,”identifier”:”AA209C4F-8D3B-4DC7-8D9D-099C5FB5E377″,”type”:”text”,”isStruckthrough”:false},{“title”:”Tapestry”,”isHidden”:false,”identifier”:”1DE035AD-B309-44BD-826F-684325B3E48C”,”type”:”text”,”isStruckthrough”:false},{“title”:”Document”,”isHidden”:false,”identifier”:”189897A6-D44F-4BB7-A32E-948CFD7AAFD5″,”type”:”text”,”isStruckthrough”:false},{“title”:”Technical”,”isHidden”:false,”identifier”:”3BD5F848-DE80-47F1-A29A-B2EF1492CF07″,”type”:”text”,”isStruckthrough”:false},{“title”:”We are looking at various”,”isHidden”:false,”identifier”:”EE55509C-CC21-435A-9AB0-FDF93911A523″,”type”:”text”,”isStruckthrough”:false},{“title”:”Publishing”,”isHidden”:false,”identifier”:”A061DE71-A13E-4045-8759-13BEEF66F226″,”type”:”text”,”isStruckthrough”:false},{“title”:”Visual”,”isHidden”:false,”identifier”:”58DB00D7-865F-4BD6-8002-67962B5DFE00″,”type”:”text”,”isStruckthrough”:false},{“isHidden”:false,”webLinkPath”:”https:\/\/liquidsoftware.atlassian.net\/jira\/software\/projects\/READ\/boards\/2″,”title”:”Development Jira”,”identifier”:”3C773CE1-CC7F-46E2-8C70-0F12CC69E8E9″,”isStruckthrough”:false,”type”:”webLink”,”definition”:””},{“title”:”Reader Library to include not only PDF”,”isHidden”:false,”identifier”:”14C6BEC9-B2C3-4127-B555-4DEDACD97B72″,”type”:”text”,”isStruckthrough”:false},{“title”:”Reader”,”isHidden”:false,”identifier”:”20FE51BB-6385-4313-BB6B-10B489693828″,”type”:”text”,”isStruckthrough”:false},{“title”:”The Ring Menu & The Palm for Word Processing Space”,”isHidden”:false,”identifier”:”FD0A8B48-FF0D-49C1-B7F3-C09418674583″,”type”:”text”,”isStruckthrough”:false},{“title”:”to make all this possible”,”isHidden”:false,”identifier”:”1912D88E-252A-4CDA-B163-E93F500E35C3″,”type”:”text”,”isStruckthrough”:false},{“title”:”Physical”,”isHidden”:false,”identifier”:”41C49110-060B-44B7-9624-1BE7653FB14A”,”type”:”text”,”isStruckthrough”:false},{“title”:”XR Knowledge Space Walkthrough”,”isHidden”:false,”identifier”:”883C92FD-CD18-4EEE-8235-11309CECB7AF”,”type”:”text”,”isStruckthrough”:false},{“title”:”Various Notes”,”isHidden”:false,”identifier”:”E94D61B5-E30A-487A-BE7A-F705CC89F440″,”type”:”text”,”isStruckthrough”:false},{“title”:”Marked”,”isHidden”:false,”identifier”:”795587DC-84A5-4037-8126-9AA270BAAAE3″,”type”:”text”,”isStruckthrough”:false},{“title”:”with a focus on our”,”isHidden”:false,”identifier”:”B783BE11-4B4B-43AE-A9E8-C1B805275A27″,”type”:”text”,”isStruckthrough”:false},{“title”:”and”,”isHidden”:false,”identifier”:”76C0B6B6-875D-41F3-BC48-57264889A919″,”type”:”text”,”isStruckthrough”:false},{“title”:”aspects.”,”isHidden”:false,”identifier”:”80C5601A-3DB7-4C4E-8C78-5E049970F08B”,”type”:”text”,”isStruckthrough”:false},{“title”:”Our”,”isHidden”:false,”identifier”:”2F6607F0-18A5-40BD-9C04-3BB9D1B13901″,”type”:”text”,”isStruckthrough”:false},{“title”:”XR Diagrams in Traditional Documents”,”isHidden”:false,”identifier”:”B493FC9E-30B4-4C11-9A03-C3FEF0ABAA0F”,”type”:”text”,”isStruckthrough”:false},{“title”:”Posture in XR”,”isHidden”:false,”identifier”:”0C6395F0-39E3-4D3A-B20E-C323E593FE7A”,”type”:”text”,”isStruckthrough”:false},{“title”:”Peter Wasilko”,”isHidden”:false,”identifier”:”48C7278B-EA99-4386-96A5-4B487081F8C6″,”type”:”text”,”isStruckthrough”:false},{“title”:”Design Notes 2 April”,”isHidden”:false,”identifier”:”CF23B6A8-05DA-4651-B436-40D26140B263″,”type”:”text”,”isStruckthrough”:false},{“title”:”Hypertextual Elements”,”isHidden”:false,”identifier”:”5F348810-D413-4387-A038-FF481FA0E6EA”,”type”:”text”,”isStruckthrough”:false},{“title”:”Journal 2024″,”isHidden”:false,”identifier”:”67B9BAED-C085-436D-89F9-7147E2F100F4″,”type”:”text”,”isStruckthrough”:false},{“title”:”Future Text Lab”,”isHidden”:false,”identifier”:”AE6A6998-AFD5-4069-B29E-D68221D86F6A”,”type”:”text”,”isStruckthrough”:false},{“title”:”Circular Desk for Authoring”,”isHidden”:false,”identifier”:”19228E18-66AC-43DC-88AC-6113D0F65D22″,”type”:”text”,”isStruckthrough”:false},{“title”:”Hypertextuality and Virtual Reality”,”isHidden”:false,”identifier”:”D6CAC6E2-449D-45BB-BC75-035387B9B3B7″,”type”:”text”,”isStruckthrough”:false},{“title”:”legends”,”isHidden”:false,”identifier”:”B917D118-5836-4033-82ED-4FD70F74DF8E”,”type”:”text”,”isStruckthrough”:false},{“title”:”Bob Stein”,”isHidden”:false,”identifier”:”97DA5EFA-E179-4D1E-B906-E00EB69AA320″,”type”:”text”,”isStruckthrough”:false},{“title”:”Metadata”,”isHidden”:false,”identifier”:”BD9F51B9-9EFE-4651-B551-F491B4558B31″,”type”:”text”,”isStruckthrough”:false},{“title”:”Feb 2025 Focus for Reader prompt”,”isHidden”:false,”identifier”:”EF496879-3271-43CB-8B04-66047DBA5856″,”type”:”text”,”isStruckthrough”:false},{“title”:”Journal 2023″,”isHidden”:false,”identifier”:”38866374-1BE6-4671-82FF-AC7C1EB9DC24″,”type”:”text”,”isStruckthrough”:false},{“title”:”thoughts”,”isHidden”:false,”identifier”:”0CCA7A6E-3515-423A-BC41-07D618D5ED6E”,”type”:”text”,”isStruckthrough”:false},{“title”:”We look at the “,”isHidden”:false,”identifier”:”24C33A05-6E80-4B71-A1CD-DCB1EF8204B7″,”type”:”text”,”isStruckthrough”:false},{“title”:”Cube”,”isHidden”:false,”identifier”:”E1EA557F-8F5B-4923-B177-162637293A22″,”type”:”text”,”isStruckthrough”:false},{“title”:”approach.”,”isHidden”:false,”identifier”:”EBD599EB-A326-4EB3-88DE-87B7185AA6F8″,”type”:”text”,”isStruckthrough”:false},{“title”:”Spatial Hypertext”,”isHidden”:false,”identifier”:”3A13D918-523A-45E8-A629-2A9198ABE1AB”,”type”:”text”,”isStruckthrough”:false},{“title”:”Cube Interactions”,”isHidden”:false,”identifier”:”43F22978-F312-4294-BC8E-7103EE9BDDCD”,”type”:”text”,”isStruckthrough”:false},{“title”:”Mark Anderson”,”isHidden”:false,”identifier”:”AA1873CD-B4AB-4D44-90D8-9F65CBCFDEFB”,”type”:”text”,”isStruckthrough”:false},{“title”:”Design Notes”,”isHidden”:false,”identifier”:”0790ABB6-498B-4770-AFB4-D357D533A8C0″,”type”:”text”,”isStruckthrough”:false},{“title”:”Volume & Knowledge Object Design”,”isHidden”:false,”identifier”:”F6F33E1A-DD36-47BF-80A5-6ACF10BFCC3E”,”type”:”text”,”isStruckthrough”:false},{“title”:”Desk and Desk-Free”,”isHidden”:false,”identifier”:”F6850AC6-DD9B-4C30-B6B7-2E46920A468E”,”type”:”text”,”isStruckthrough”:false},{“title”:”Alan Kay”,”isHidden”:false,”identifier”:”E1FD6AB3-4FAD-4565-B25E-155E43A0C265″,”type”:”text”,”isStruckthrough”:false},{“title”:”Knowledge Space (Shanghai thoughts)”,”isHidden”:false,”identifier”:”8B77E126-B343-4856-AB5B-ADFFB584F79A”,”type”:”text”,”isStruckthrough”:false},{“title”:”Agreements”,”isHidden”:false,”identifier”:”ABD5C3C3-F960-4DDA-9039-4AA274946F65″,”type”:”text”,”isStruckthrough”:false},{“title”:”Data Processes”,”isHidden”:false,”identifier”:”70554F0F-92D9-4FE6-A916-CAC598FAE5B6″,”type”:”text”,”isStruckthrough”:false},{“title”:”We document our”,”isHidden”:false,”identifier”:”C4EE6197-3D99-474F-82C3-9ECEC4C1A1DB”,”type”:”text”,”isStruckthrough”:false},{“title”:”Image Inclusion for Author”,”isHidden”:false,”identifier”:”BC20EBCA-A66A-41BB-87A3-BD8B345D9A40″,”type”:”text”,”isStruckthrough”:false},{“title”:”To Do”,”isHidden”:false,”identifier”:”FE635BB3-9703-4E31-9722-D776F0287EE6″,”type”:”text”,”isStruckthrough”:false},{“title”:”Hypertext”,”isHidden”:false,”identifier”:”CC2631DB-7B51-4BA5-B36F-7BC189E8E5E8″,”type”:”text”,”isStruckthrough”:false},{“title”:”Organizing in Author”,”isHidden”:false,”identifier”:”A04FC419-2BF9-43D8-A783-3AD74D161BEF”,”type”:”text”,”isStruckthrough”:false},{“title”:”Authoring in XR 27th of January 2025″,”isHidden”:false,”identifier”:”4DEAEFE2-A7F7-4B76-B391-4939E5786F4B”,”type”:”text”,”isStruckthrough”:false},{“title”:”through improving;”,”isHidden”:false,”identifier”:”B811F27E-D675-4566-B7E6-CE680DFC266C”,”type”:”text”,”isStruckthrough”:false},{“title”:”References”,”isHidden”:false,”identifier”:”28F78E71-9524-4F75-B01C-765D884BE30B”,”type”:”text”,”isStruckthrough”:false},{“title”:”Notes”,”isHidden”:false,”identifier”:”6CD5ADB9-E973-4BB1-BF28-C0ADEE83C61E”,”type”:”text”,”isStruckthrough”:false},{“title”:”Fabien Benetou”,”isHidden”:false,”identifier”:”21356360-F805-4CBC-839F-C48BF1DCDF6D”,”type”:”text”,”isStruckthrough”:false},{“title”:”Jim Strahorn”,”isHidden”:false,”identifier”:”36A90E72-50FD-419D-B095-5B604E78E654″,”type”:”text”,”isStruckthrough”:false},{“title”:”Dimensions of Gestures in XR”,”isHidden”:false,”identifier”:”2526C1F6-CD3C-4347-A4E5-3D189D7A6C8A”,”type”:”text”,”isStruckthrough”:false},{“title”:”is to “,”isHidden”:false,”identifier”:”74709DDA-0F90-4531-A643-1FF67403C903″,”type”:”text”,”isStruckthrough”:false},{“title”:”Symposium”,”isHidden”:false,”identifier”:”CA166148-454F-4B31-9087-7779BE4D6EF9″,”type”:”text”,”isStruckthrough”:false},{“title”:”Brandel Zachernuk”,”isHidden”:false,”identifier”:”8121F87D-A3DA-4B21-8A09-79610729701E”,”type”:”text”,”isStruckthrough”:false},{“title”:”Jack Park”,”isHidden”:false,”identifier”:”C8CD5D71-7CB2-4CA6-BD75-54F4F012D38B”,”type”:”text”,”isStruckthrough”:false},{“title”:”Rob Swigart”,”isHidden”:false,”identifier”:”AFD3BEF8-7D10-499A-BA84-B63219826A55″,”type”:”text”,”isStruckthrough”:false},{“title”:”for The Future of Text 6″,”isHidden”:false,”identifier”:”EED92087-48B9-431F-9488-48D4D2509BDD”,”type”:”text”,”isStruckthrough”:false},{“title”:”PDF”,”isHidden”:false,”identifier”:”35EC3ABD-45F3-42F6-B72D-8ACA8120381F”,”type”:”text”,”isStruckthrough”:false},{“title”:”What to author”,”isHidden”:false,”identifier”:”9CC23C69-1BA2-486C-8E04-2F89F45274BD”,”type”:”text”,”isStruckthrough”:false},{“title”:”Ge Li”,”isHidden”:false,”identifier”:”E418435C-7D63-4520-91BB-B9E890638323″,”type”:”text”,”isStruckthrough”:false},{“title”:”Vint Cerf”,”isHidden”:false,”identifier”:”D4A80089-0FFE-4702-8F8B-40C952D3D612″,”type”:”text”,”isStruckthrough”:false},{“title”:”David Millard”,”isHidden”:false,”identifier”:”B3617A7C-2A5B-4E18-A7FE-99E9DB098240″,”type”:”text”,”isStruckthrough”:false}]},”references-index”:{“indexes”:[0,1,2,3,4,5,6]}}