diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 00000000..dd84ea78
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,38 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: ''
+labels: ''
+assignees: ''
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Desktop (please complete the following information):**
+ - OS: [e.g. iOS]
+ - Browser [e.g. chrome, safari]
+ - Version [e.g. 22]
+
+**Smartphone (please complete the following information):**
+ - Device: [e.g. iPhone6]
+ - OS: [e.g. iOS8.1]
+ - Browser [e.g. stock browser, safari]
+ - Version [e.g. 22]
+
+**Additional context**
+Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 00000000..bbcbbe7d
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,20 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+title: ''
+labels: ''
+assignees: ''
+
+---
+
+**Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+
+**Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/.gitignore b/.gitignore
index 2952a641..bb111d62 100644
--- a/.gitignore
+++ b/.gitignore
@@ -169,4 +169,12 @@ cython_debug/
# vs code
.vscode
-*.bin
\ No newline at end of file
+*.bin
+
+.DS_Store
+
+# gpt4all-chat
+CMakeLists.txt.user
+gpt4all-chat/meta/*
+gpt4all-chat/models/*
+build_*
diff --git a/.gitmodules b/.gitmodules
index 371af62e..eb06ee48 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,3 +1,3 @@
-[submodule "peft"]
- path = peft
- url = https://github.com/huggingface/peft.git
+[submodule "llama.cpp"]
+ path = gpt4all-chat/llmodel/llama.cpp
+ url = https://github.com/manyoso/llama.cpp.git
diff --git a/eval_data/user_oriented_instructions.jsonl b/eval_data/user_oriented_instructions.jsonl
deleted file mode 100644
index ed6d5d98..00000000
--- a/eval_data/user_oriented_instructions.jsonl
+++ /dev/null
@@ -1,252 +0,0 @@
-{"id": "user_oriented_task_0", "motivation_app": "Grammarly", "instruction": "The sentence you are given might be too wordy, complicated, or unclear. Rewrite the sentence and make your writing clearer by keeping it concise. Whenever possible, break complex sentences into multiple sentences and eliminate unnecessary words.", "instances": [{"input": "If you have any questions about my rate or if you find it necessary to increase or decrease the scope for this project, please let me know.", "output": "If you have any questions about my rate or find it necessary to increase or decrease this project's scope, please let me know."}]}
-{"id": "user_oriented_task_1", "motivation_app": "Grammarly", "instruction": "Analyze the word choice, phrasing, punctuation, and capitalization in the given email. How may the writer of this email sound to the reader? These tones include Disheartening, Accusatory, Worried, Curious, Surprised, Disapproving, Unassuming, Formal, Assertive, Confident, Appreciative, Concerned, Sad, Informal, Regretful, Encouraging, Egocentric, Joyful, Optimistic, and Excited.", "instances": [{"input": "Hi Jen, \nI hope you're well. Can we catch up today? I'd appreciate your input on my presentation for tomorrow's meeting. I'd especially love it if you could double-check the sales numbers with me. There's a coffee in it for you!", "output": "Confident"}]}
-{"id": "user_oriented_task_2", "motivation_app": "Grammarly", "instruction": "Rewrite the given text and correct grammar, spelling, and punctuation errors.", "instances": [{"input": "If you'd told me year ago that today I would finish a marathon, I would of laughed. Your support had a huge affect on me!", "output": "If you'd told me a year ago that today I would finish a marathon, I would have laughed. Your support had a huge effect on me!"}]}
-{"id": "user_oriented_task_3", "motivation_app": "Google Scholar", "instruction": "You are given a paper citation, convert it to the requested citation style.", "instances": [{"input": "Chicago: Vaswani, Ashish, Shazeer, Noam, Parmar, Niki, Uszkoreit, Jakob, Jones, Llion, Gomez, Aidan N., Kaiser, Lukasz, and Illia Polosukhin. \"Attention Is All You Need.\" arXiv, (2017). https://doi.org/10.48550/arXiv.1706.03762.\nMLA:", "output": "Vaswani, Ashish, et al. \"Attention Is All You Need.\" arXiv, 2017, https://doi.org/10.48550/arXiv.1706.03762."}]}
-{"id": "user_oriented_task_4", "motivation_app": "Grammarly", "instruction": "Desk jobs require writing a lot of emails, so it isn't surprising we get tired of repeating ourselves. Come up with several synonyms for the given word.", "instances": [{"input": "Sincerely", "output": "Best regards, All the best, Cheers, Best"}]}
-{"id": "user_oriented_task_5", "motivation_app": "Gmail", "instruction": "If you could help me write an email to my friends inviting them to dinner on Friday, it would be greatly appreciated.", "instances": [{"input": "", "output": "Hi there,\n\nI hope you're all doing well. I'm inviting you over for dinner on Friday night. Please let me know if you can make it. I'll be cooking your favorite dishes!\n\nLooking forward to seeing you,"}]}
-{"id": "user_oriented_task_6", "motivation_app": "Gmail", "instruction": "The tone of the email should be changed and rewritten in a more professional manner.", "instances": [{"input": "Subject: Asking for Vacation Time\nHi [Employer], \nI hope all is well. I am writing to request a day off on [date]. I have already taken care of my work responsibilities for that day and will be available to answer any questions or concerns you may have. \nThank you for your time, \n[Your name]", "output": "Subject: Request for Vacation Time\nHello [Employer],\nI am writing to request vacation time from work. I would like to take [Number of Days] off, starting on [Date]. I will be sure to complete all of my work before taking my vacation, and I will be available by email if anything comes up while I'm away. Thank you for your consideration.\n[Your name]"}]}
-{"id": "user_oriented_task_7", "motivation_app": "Twitter", "instruction": "Write a social media post about the call for collaboration on a crowdsourcing project in a persuasive way.", "instances": [{"input": "", "output": "Looking for collaborators on a new crowdsourcing project! This is a great opportunity to get involved in something innovative and impactful. If you're interested, please let me know."}]}
-{"id": "user_oriented_task_8", "motivation_app": "IMDB", "instruction": "My favorite witty review of The Rings of Power series is this:", "instances": [{"input": "", "output": "The Rings of Power series is one of the best things to come out of Middle Earth since, well, the Ring itself! If you're a fan of fantasy, adventure, and intrigue, then this is definitely the series for you. The characters are richly drawn and the plot is twisty and full of surprises. I can't recommend it highly enough!"}]}
-{"id": "user_oriented_task_9", "motivation_app": "you.com (YouWrite)", "instruction": "A helpful writing exercise is writing a story without using adjectives or adverbs. This will help you learn to use other words and phrases to describe what you are trying to say. Here are some examples:", "instances": [{"input": "", "output": "The trees were silhouetted against the sky. The wind was blowing. The leaves were rustling. The grass was waving. The flowers were nodding. The birds were singing. The bees were buzzing. The squirrels were running. The rabbits were hopping. The deer were grazing. The wolves were howling. The moon was rising. The stars were shining."}]}
-{"id": "user_oriented_task_10", "motivation_app": "Google Search", "instruction": "Come up with some search queries on google about coding stuff.", "instances": [{"input": "", "output": "- binary search\n- pop in javascript\n- audio chunking algorithm"}]}
-{"id": "user_oriented_task_11", "motivation_app": "StackOverflow", "instruction": "Discover the most relevant StackOverflow questions for your search term. And output a list of their URLs.", "instances": [{"input": "remove item in array", "output": "1- https://stackoverflow.com/questions/5767325/how-can-i-remove-a-specific-item-from-an-array\n2- https://stackoverflow.com/questions/19544452/remove-last-item-from-array\n3- https://stackoverflow.com/questions/15292278/how-do-i-remove-an-array-item-in-typescript\n4- https://stackoverflow.com/questions/3954438/how-to-remove-item-from-array-by-value\n5- https://stackoverflow.com/questions/40462369/remove-item-from-stored-array-in-angular-2\n6- https://stackoverflow.com/questions/16994212/remove-an-item-from-array-using-underscorejs"}]}
-{"id": "user_oriented_task_12", "motivation_app": "GeeksforGeeks", "instruction": "You are given an algorithm or a search query about coding. Look for a code snippet related to the search query.", "instances": [{"input": "Python loop", "output": "#Print each fruit in a fruit list:\nfruits = [\"apple\", \"banana\", \"cherry\"]\nfor x in fruits:\n print(x)"}]}
-{"id": "user_oriented_task_13", "motivation_app": "GeeksforGeeks", "instruction": "You are provided with a description of the code, specifications, or prompts. Your help is needed in automating code generation to speed up the development process.", "instances": [{"input": "python function to determine whether a number is prime.", "output": "def is_prime(n):\n if n == 1:\n return False\n for i in range(2, int(n**0.5)+1):\n if n % i == 0:\n return False\n return True"}]}
-{"id": "user_oriented_task_14", "motivation_app": "Google Search", "instruction": "When you're stuck on a coding problem, the best place to look for help is in the documentation. Read the docs carefully and see if there's a section that covers your specific issue. Often, the answer will be right there in front of you. If not, try searching for key terms related to your problem. Come up with the useful links about the given search term.", "instances": [{"input": "sql find duplicate values", "output": "- https://iteration-utilities.readthedocs.io/en/latest/generated/duplicates.html\n- https://orange3-text.readthedocs.io/en/latest/widgets/duplicatedetection.html\n- https://beets.readthedocs.io/en/v1.3.14/plugins/duplicates.html\n- https://doma.readthedocs.io/en/latest/sql/\n- https://koalas.readthedocs.io/en/latest/reference/api/databricks.koalas.DataFrame.drop_duplicates.html\n- https://sqlparse.readthedocs.io/en/latest/analyzing/"}]}
-{"id": "user_oriented_task_15", "motivation_app": "MS Excel", "instruction": "We have entered the home supplies budget in the following table. Calculate the last column with the given function and rewrite the completed table.\nNote that, columns in a row are separated using a comma, and rows are separated using a \"|\".", "instances": [{"input": "Item, Qty, Price, Subtotal | mango, 5, 500, ? | apple, 4, 150, ? | oil, 2, 1500, ? \nFunction: column2 * column 3", "output": "Item, Qty, Price, Subtotal | mango, 5, 500, 2500 | apple, 4, 150, 600 | oil, 2, 1500, 3000"}]}
-{"id": "user_oriented_task_16", "motivation_app": "MS Excel", "instruction": "Please write the Excel function name associated with each description.", "instances": [{"input": "- Returns the number of days between two dates\n- Returns the starting position of a text string within another text string.\n- Returns the number in the middle of the set of given numbers", "output": "- DAYS\n- FIND\n- MEDIAN"}]}
-{"id": "user_oriented_task_17", "motivation_app": "MS Word", "instruction": "What are the steps to using random (yet coherent) text as filler content within your Word document?", "instances": [{"input": "", "output": "You can quickly add several paragraphs to your document by using =rand(). These steps will guide you through using it:\nPut your Microsoft Word cursor (not your mouse cursor) where you would like random text to appear. It is necessary to place the cursor at the beginning of a line. The cursor just needs to be at the beginning, not on a blank line.\n2- Type =rand()and press Enter. You will now see random text added to your document."}]}
-{"id": "user_oriented_task_18", "motivation_app": "MS Powerpoint", "instruction": "Design a soothing pastel color palette for your slides. Pastel colors generally come across as pretty and delicate, so you\u2019ll want to make sure your presentation calls for a similar mood. Choose up to five colors or color codes.", "instances": [{"input": "", "output": "Color codes: #FDB3AE #CAE4E2 #FBDF74"}]}
-{"id": "user_oriented_task_19", "motivation_app": "MS Excel", "instruction": "I bought two shirts from the store and each one cost me $10. I also bought a pair of pants and they cost me $20. Then, I bought a dress and it cost me $30. Also, I bought a pair of shoes and they cost me $40. Lastly, I bought a jacket and it cost me $50.\n\nMake a table chart showing items, the number, and the price of these items. Separate columns with \"|\".", "instances": [{"input": "", "output": "Item | Number | Price\n\nShirts | 2 | $10\nPants | 1 | $20\nDress | 1 | $30\nShoes | 1 | $40\nJacket | 1 | $50"}]}
-{"id": "user_oriented_task_20", "motivation_app": "ludwig.guru", "instruction": "Find sentences from reliable sources such as the Guardian or Forbes that contain the exact match for the given sentence or phrase. Also, mention the source of each sentence.", "instances": [{"input": "There are great options", "output": "- Stella McCartney's faux fur has fashionable, luxury and ethical credentials and there are great options at Dries Van Noten too. (Independent)\n- And in the capital, Funchal, there are great options for self-caterers; I stayed in a beach house in the old town, one of a collection of seafront villas (funchalcottages.co.uk). (The Guardian - Travel)\n- City by city around the world, there are great options for shoppers to bring home not just special goods, but special memories. (Forbes)\n- Yes, there are great options out there that aren't owned by any big company (yet), but Twitter has an opportunity here to act as an app Switzerland, and attract some ad business while it's at it. (TechCrunch)"}]}
-{"id": "user_oriented_task_21", "motivation_app": "ludwig.guru", "instruction": "During writing, we added an asterisk for the word that did not come to mind. You will need to provide several examples to demonstrate all the words that can be used in the sentence instead of the asterisk.", "instances": [{"input": "we * to know", "output": "1- We need to know.\n2- We want to know.\n3- we wanted to know.\n4- We deserve to know why.\n5- We ought to know."}]}
-{"id": "user_oriented_task_22", "motivation_app": "ludwig.guru", "instruction": "Think of alternatives and paraphrases for the underlined word.", "instances": [{"input": "what we have _expected", "output": "+ what we have projected \n+ what we have anticipated\n+ what we have waited\n+ what we have hoped for\n+ what we have accustomed"}]}
-{"id": "user_oriented_task_23", "motivation_app": "LinkedIn", "instruction": "Design a skill assessment\u00a0questioner for R (Programming Language).", "instances": [{"input": "", "output": "Q1. How does a matrix differ from a data frame?\nQ2. What value does this statement return?\nunclass(as.Date(\"1971-01-01\"))\nQ3. What do you use to take an object such as a data frame out of the workspace?\nQ4. What function joins two or more column vectors to form a data frame?\nQ5. What is the value of y in this code?\nx <- NA\ny <- x/1"}]}
-{"id": "user_oriented_task_24", "motivation_app": "LinkedIn", "instruction": "Write a template for First-Person LinkedIn profile summary.", "instances": [{"input": "", "output": "[Degree] in [Area of Study] from [School], I entered the [Industry] world to explore my passion for ____, ____, and ____. Currently, I work as a [Job Title] for [Company], where I'm focused on ____, ____, and ____."}]}
-{"id": "user_oriented_task_25", "motivation_app": "LinkedIn", "instruction": "Write a LinkedIn post to announce that you have accepted a new job offer.", "instances": [{"input": "", "output": "\u201cI\u2019m excited beyond words to share with you my decision to accept the role of Marketing Director at the XYZ Company!\nI couldn\u2019t have had this opportunity if not for the help of my friend, [name], who shared this job opportunity with me, and my former boss, [boss\u2019s name], for her stellar recommendation and guidance.\nI happily look forward to starting this new journey and growing professionally with my new family\u2014the marketing rock stars of XYZ Company.\u201d"}]}
-{"id": "user_oriented_task_26", "motivation_app": "LinkedIn", "instruction": "Make a list of desirable Skills for software engineers to add to LinkedIn.", "instances": [{"input": "", "output": "Test-Driven Development\nObject-Oriented Programming (OOP)\nMVC\nREST API\nTechnical Writing\nDebugging"}]}
-{"id": "user_oriented_task_27", "motivation_app": "LinkedIn", "instruction": "You will be asked to answer the following question as part of the LinkedIn Skill assessments.", "instances": [{"input": "You find that your project has a tag and branch both named push-notifications, which causes confusion when trying to print out given reference. How can you specify which branch you want to look at?", "output": "use git show refs/head/push-notifications"}]}
-{"id": "user_oriented_task_28", "motivation_app": "Amazon", "instruction": "Take the title of the product and extract its attributes. The attributes in this case refer to the characteristics of the products, such as their brand and color. Your answer must a list following the format of \"attribute: value\".", "instances": [{"input": "SAMSUNG 980 PRO SSD 2TB PCIe NVMe Gen 4 Gaming M.2 Internal Solid State Hard Drive Memory Card, Maximum Speed, Thermal Control, MZ-V8P2T0B", "output": "Brand: Samsung, Style: 980 PRO, Capacity: 2TB, Hard Disk Interface: NVMe, Hard Disk Description: Solid State Hard Drive"}]}
-{"id": "user_oriented_task_29", "motivation_app": "Amazon", "instruction": "You should write a Product Description that will persuade the buyer that the product is beneficial.", "instances": [{"input": "Flipslide Game, Electronic Handheld Game | Flip, Slide, and Match the Colors to Beat the Clock - 4 Game Modes - Multiplayer Fun", "output": "Flipside is an addictive, multi-player puzzle game of skill! Flip, slide & match the colors to beat the clock. Four thrilling game modes including, Speed Mode, Multiplayer Mode, Level Mode and Memory mode make challenging yourself or playing with friends a fun distraction for everyone! Play at home, on the road, or wherever you'd like! Recommended for ages 8+."}]}
-{"id": "user_oriented_task_30", "motivation_app": "Amazon", "instruction": "What other Amazon products might interest someone who visited the given product?", "instances": [{"input": "Zeroll Zerolon Hardcoat Anodized Commercial Ice Cream Scoop with Unique Liquid Filled Heat Conductive Handle Easy Release Made in USA, 1.5-Ounce, Black", "output": "- Ice Scoop, Fashion Ice Cream Scoop, Premium Stainless Steel Cookie Scoop, Dog Food Scoop, Sturdy Flour Scoop, Utility Candy Scoop, Dishwasher Safe (Silver/8oz/9 Inch)\n- AmazonCommercial Non-Stick Heat Resistant Silicone Spatula Set, 2 Small & 2 Large Spatulas, Multicolor, Pack of 4\n- Winco Acrylic 4-Hole Ice Cream Cone Stand,Clear,Medium\n- Zeroll 1065FS-ZT Original Zelato Tubmate Aluminum Gelato Spade for Leveling Tubs Packing and Hand-Mixing, Self-Defrosting Fluid-Filled Handle Longer-Lasting Thicker Blade , 9-Inch, Gray"}]}
-{"id": "user_oriented_task_31", "motivation_app": "Amazon", "instruction": "Compare the given item with similar products based on its properties such as price, rating, etc. For product comparison, use a table and separate the columns with \"|\".", "instances": [{"input": "Item: iBayam Journal Planner Pens Colored Pens Fine Point Markers Fine Tip Drawing Pens Porous Fineliner Pen for Journaling Writing Note Taking Calendar Coloring Art Office Back to School Supplies, 18 Color", "output": "Product | this item |\u00a0Fineliner Color Pens Set, 0.38mm Fine Tip Pens, Porous Fine Point Makers Drawing Pen, Perfect for Writing in Bullet Journal and Planner, 24 Assorted Colors |\u00a0iBayam Fineliner Pens, 24 Bright Colors Fine Point Pens Colored Pens for Journaling Note Taking Writing Drawing Coloring Planner Calendar, Office School Teacher Classroom Fine Tip Marker Pens Supplies |\u00a024 colors paint pens for journaling, Colored pens for note taking 0.4mm fineliner cute fine point smooth writing pens, Fast dry felt tip pens agenda office school teacher art markers drawing supplies\nCustomer Rating | 4.7 out of 5 stars (87557) |\u00a0 4.5 out of 5 stars (2099) | 4.7 out of 5 stars (8666) | 4.5 out of 5 stars (396) \nPrice | $6.39 | $7.66 | $8.99 | $10.99\nSold By | Qi US | ai-natebok | Qi US | MingNor\nColor | Black, Grey, Red, Blue, Magenta, Pink, Purple, Violet, Pale Yellow, Yellow, Orange, Raw Sienna, Sap Green, C Green, O Green, Lake Blue, Burnt Sienna, Crimson | 24 Color | 24 Colors | 24 colors\nItem Dimensions | 6.6 x 6.2 x 0.6 inches | 6.3 x 5.8 x 0.6 inches | 6.6 x 6.2 x 0.6 inches | 6.22 x 0.31 x 0.31 inches\nPoint Type | Fine | Extra Fine | Fine | Fine"}]}
-{"id": "user_oriented_task_32", "motivation_app": "Amazon", "instruction": "Write a review based on the given information.", "instances": [{"input": "- Product: Persil Discs Laundry Detergent Pacs, Original Scent, High Efficiency (HE) Compatible, Laundry Soap, 62 Count\n- Sentiment: Positive", "output": "I love this brand of soap it's always gotten my kids' clothes clean. When I'm done washing with the soap my kids is closed look feel and smell and feel clean and I like that that's what I want is to be able to wash my kids' clothes and not stress that it didn't come out clean or it's still kind of stinks or it just didn't seem like it washed very well. And would this pod of soap it's really easy to use I just put two pods in the wash and if it's a big wash and my kids' clothes are really dirty I just use three and it gets my kids's clothes washed the first time and I really like that. I think it's definitely worth the money. I am glad that I purchased it. It works so well. I love using it. It's a really good buy for me and for getting my family's clothes clean especially my kids' clothes clean. Great buy!"}]}
-{"id": "user_oriented_task_33", "motivation_app": "Netflix", "instruction": "Write a short description about the given movie or series.", "instances": [{"input": "The Witcher (2019)", "output": "Geralt of Rivia, a mutated monster-hunter for hire, journeys toward his destiny in a turbulent world where people often prove more wicked than beasts."}]}
-{"id": "user_oriented_task_34", "motivation_app": "Netflix", "instruction": "Think of some series or movies that would be enjoyable for someone who liked the given series.", "instances": [{"input": "Squid Game", "output": "- 3%\n- Clickbait\n- Circle\n- Hellbound\n- Sweet Home"}]}
-{"id": "user_oriented_task_35", "motivation_app": "Netflix", "instruction": "Give examples of popular shows and movies in the genre.", "instances": [{"input": "Genre: Crime TV Shows", "output": "Breaking Bad, Peaky Blinders, Arrow, Money Heist, Better Call Saul, DAHMER, The Blacklist, Lucifer, NCIS"}]}
-{"id": "user_oriented_task_36", "motivation_app": "Netflix", "instruction": "In what genres does the given movie or series fall?", "instances": [{"input": "Manifest", "output": "Mystery, Sci-Fi, Drama"}]}
-{"id": "user_oriented_task_37", "motivation_app": "Netflix", "instruction": "Give a brief description of the given category of movies and shows.", "instances": [{"input": "Period Dramas", "output": "Want to escape the contemporary world? Explore these historical dramas and shows from the time that have magnificent art and costume design, lots of drama, and a lot of history."}]}
-{"id": "user_oriented_task_38", "motivation_app": "Overleaf", "instruction": "Provide an example of how a table of contents can be generated automatically in a LATEX document. Make sure your example contains the necessary commands.", "instances": [{"input": "", "output": "To create the table of contents is straightforward, the command \\tableofcontents does the job. Sections, subsections and chapters are included in the table of contents. To manually add entries, for example when you want an unnumbered section, use the command \\addcontentsline as shown in the following example:\n\n\\documentclass{article}\n\\usepackage{blindtext}\n\\usepackage{titlesec}\n\\title{Sections and Chapters}\n\\author{Gubert Farnsworth}\n\\date{ }\n\\begin{document}\n\\maketitle\n\\tableofcontents\n\\section{Introduction}\nThis is the first section.\n\\blindtext\n\\addcontentsline{toc}{section}{Unnumbered Section}\n\\section*{Unnumbered Section}\n\\blindtext\n\\section{Second Section}\n\\blindtext\n\\end{document}"}]}
-{"id": "user_oriented_task_39", "motivation_app": "Overleaf", "instruction": "Imagine you have a FEN notation of a chessboard. How can you draw a board to show this state of the chessboard in LATEX? Create a LATEX code that includes all the required commands and packages.", "instances": [{"input": "", "output": "The following example uses the setfen key in a \\chessboard command to demonstrate a FEN example taken from the skak package documentation:\n\n\\documentclass{article}\n\\usepackage{xskak}\n\\begin{document}\n\\newchessgame\n\\chessboard[setfen=r5k1/1b1p1ppp/p7/1p1Q4/2p1r3/PP4Pq/BBP2b1P/R4R1K w - - 0 20]\n\\end{document}"}]}
-{"id": "user_oriented_task_40", "motivation_app": "Overleaf", "instruction": "What is the output of the given LATEX code?", "instances": [{"input": "\\documentclass{article}\n\\begin{document}\nFirst document. This is a simple example, with no \nextra parameters or packages included.\n\\end{document}", "output": "First document. This is a simple example, with no extra parameters or packages included."}]}
-{"id": "user_oriented_task_41", "motivation_app": "Overleaf", "instruction": "You should find a few LATEX templates appropriate for the task at hand and list the links to them.", "instances": [{"input": "Homework Assignment", "output": "- https://www.overleaf.com/latex/templates/gijss-homework-template/xrhhfgqcfbft\n- https://www.overleaf.com/latex/templates/homework-solutions/xykkzgnbzmxp\n- https://www.overleaf.com/latex/templates/ocen-cfd-article-template/mtpnnssgzjzw\n- https://www.overleaf.com/latex/templates/pitt-state-physics-homework-template/wdsxknmntnxk"}]}
-{"id": "user_oriented_task_42", "motivation_app": "Overleaf", "instruction": "Write a simple guide for uploading the bibliography database\n on Overleaf.", "instances": [{"input": "", "output": "If you already have a bibliography file that you need to use in an Overleaf project, you can upload it. Click the Upload icon located on top of the left panel. A dialog box will appear, you can either drag and drop the .bib file(s) or choose select from your computer to upload file(s) from your device. Once the upload process is finished you can include the .bib file in your main .tex document."}]}
-{"id": "user_oriented_task_43", "motivation_app": "Spotify", "instruction": "Choose Spotify playlists that match the given mood. Provide a link to these playlists for easy access.", "instances": [{"input": "Feeling Good", "output": "- https://open.spotify.com/playlist/37i9dQZF1DX9XIFQuFvzM4\n- https://open.spotify.com/playlist/37i9dQZF1DXcEKFjZJYZcc"}]}
-{"id": "user_oriented_task_44", "motivation_app": "Spotify", "instruction": "Curate a Spotify playlist based on the vibe. Publish this playlist as a song list.", "instances": [{"input": "Vibe: coding on weekend", "output": "- Candles by Jon Hopkins\n- Train from Paris by Mindtrap\n- Intro by The xx\n- Take Me Back by WYS\n- Riverline by Tor\n- The Dream by SwuM"}]}
-{"id": "user_oriented_task_45", "motivation_app": "Spotify", "instruction": "How would you describe the meaning of this lyrics? Describe some of the metaphors.", "instances": [{"input": "My lover\u2019s got humor\nShe\u2019s the giggle at a funeral\nKnows everybody\u2019s disapproval\nI should\u2019ve worshipped her sooner", "output": "Instead of using a straight-forward line like, \u201cshe is funny,\u201d the writer opts for the metaphoric version, \u201cshe\u2019s the giggle at a funeral.\u201d This is a powerful metaphor stating that the writer's lover is so full of humor that even in really sad moments, she finds a way to lighten the moment."}]}
-{"id": "user_oriented_task_46", "motivation_app": "Spotify", "instruction": "Suggest some Audiobooks for First-Time Listeners. You can attract them by choosing popular works.", "instances": [{"input": "", "output": "+ Harry Potter and the Sorcerer's Stone By J.K. Rowling\n+ The Vanishing Half by Brit Bennett\n+ The Fellowship of the Ring by J.R.R. Tolkien\n+ And Then There Were None by Agatha Christie.\n+ The Woman in Cabin 10 by Ruth Ware.\n+ The Hitchhiker's Guide to the Galaxy by Douglas Adams.\n+ Spinning Silver by Naomi Novik.\n+ Good Omens by Terry Pratchett and Neil Gaiman.\n+ Charlotte's Web by E. B."}]}
-{"id": "user_oriented_task_47", "motivation_app": "Spotify", "instruction": "Make a list of the most popular podcasts.", "instances": [{"input": "", "output": "+ Top Podcasts\n+ The Joe Rogan Experience\n+ Distractible\n+ Call Her Daddy\n+ Huberman Lab\n+ Anything Goes with Emma Chamberlain\n+ Stuff You Should Know\n+ The Psychology of your 20\u2019s\n+ On Purpose with Jay Shetty\n+ Armchair Expert\n+ Archetypes\n+ Rachel Maddow Presents: Ultra\n+ The Mel Robbins Podcast\n+ Lex Fridman Podcast\n+ The Daily\n+ Think Fast, Talk Smart: Communication Techniques\n+ TED Talks Daily"}]}
-{"id": "user_oriented_task_48", "motivation_app": "Reddit", "instruction": "You should take the time to read the situation and problem carefully and give the best advice you can.", "instances": [{"input": "I'm roadtripping down New Zealand with a friend and he is driving me insane. He hasn't enjoyed all the tours we've been on and doesn't want to chat about ANYTHING. We're a week into our trip and this is the quietest I've ever been when hanging out with another person as he rebuffs all my attempts at conversation. He wears his airpods in the car and around the accommodation - not that I've kept trying to talk to him. Honestly, he is bringing some seriously bad vibes to everything and it's ruining my trip.\nWe don't have anything booked in yet for the South Island, so I'm planning to ditch him in Wellington at the ferry. Probably book my own car in Picton and just relax alone for the rest of the trip. (Just thinking about it is such sharp relief that it makes me feel emotional. I'll pay whatever price I have to just to get away from him.)\nObviously telling him he is ruining my trip with his negative energy isn't the best way to approach this.\nAny advice for the best approach so we can split amicably? Or at least not have an argument that ends with very hurt feelings?", "output": "I would be as honest as you can while being nice. Next time you\u2019re both in the hotel or wtv just say \u201chey man, I was thinking about when we get to X\u2014it seems like we like to travel really differently and I think it\u2019s stressing us both out, so how do you feel about splitting up after that stop?\u201d"}]}
-{"id": "user_oriented_task_49", "motivation_app": "CNN News", "instruction": "You are given a description that provides a set of facts or a scenario. It is up to you to craft a story from these facts and scenarios. The missing pieces must be filled in with imaginative but logical information.", "instances": [{"input": "Ten European football teams \u2013 the Netherlands, England, Belgium, Denmark, France, Germany, Norway, Sweden, Switzerland and Wales \u2013 will participate in a season-long \u201cOneLove\u201d campaign promoting inclusion and opposing discrimination.", "output": "Every country except Sweden and Norway has qualified for the 2022 World Cup in Qatar and each captain of these eight nations will wear a distinctive OneLove armband \u2013 which features a heart containing colors from all backgrounds \u2013 during the tournament.\nThe Netherlands FA, which is spearheading the campaign, chose the colors to represent all heritages, backgrounds, genders and sexual identities; the armband will be worn in Qatar where same-sex relationships are a criminal offense.\nSweden and Norway will participate in the initiative during the upcoming Nations League matches, while England will also wear black armbands during both its UEFA Nations League matches to mark the death of Queen Elizabeth II.\n\u201cThis is an important message which suits the game of football: on the field everybody is equal and this should be the case in every place in society. With the OneLove band we express this message,\u201d said Virgil van Dijk, the Netherlands captain.\n\u201cOn behalf of the Dutch team I have been wearing this band for quite a while now. It is good to see that other countries are joining this initiative.\u201d\nOneLove was founded in the Netherlands in 2020 to emphasize that all football fans have at least one thing in common \u2013 their love of football \u2013 and to speak out against any form of discrimination.\nAs well as focusing on public messaging, the initiative has also developed to offer diversity training to grassroots club.\n\u201cOur love of football unites us all. No matter where you come from, what you look like and who you love. Football is there for everyone and our sport must stand up for the people across the world who face discrimination and exclusion,\u201d said Germany captain Manuel Neuer.\n\u201cI am proud to be sending out this message with my colleagues from the other national teams. Every single voice counts.\u201d\nIn June, England\u2019s captain Harry Kane revealed that he had discussed taking a collective stance regarding human rights in Qatar with Denmark\u2019s Christian Eriksen and France captain Hugo Lloris.\n\u201cI am honored to join my fellow national team captains in supporting the important OneLove campaign,\u201d he said on Tuesday.\n\u201cAs captains we may all be competing against each other on the pitch, but we stand together against all forms of discrimination. This is even more relevant at a time when division is common in society. Wearing the armband together on behalf of our teams will send a clear message when the world is watching.\u201d"}]}
-{"id": "user_oriented_task_50", "motivation_app": "Google Search", "instruction": "Based on the given query, suggest some related search queries.", "instances": [{"input": "learning french", "output": "learning french for beginners\nearning french online\nlearning french free\nfrench language course\nlearning french review\nhow to learn french on your own\nbest way to learn french online"}]}
-{"id": "user_oriented_task_51", "motivation_app": "Google Scholar", "instruction": "Please suggest a few papers to consider based on the search term given. The names of the papers should be listed.", "instances": [{"input": "Topic: scaling law + machine learning", "output": "- Learning to limit data collection via scaling laws: A computational interpretation for the legal principle of data minimization\n- Data and parameter scaling laws for neural machine translation\n- Explaining neural scaling laws\n- Scaling laws for autoregressive generative modeling\n- Scaling laws for neural language models"}]}
-{"id": "user_oriented_task_52", "motivation_app": "Gmail", "instruction": "Prepare an email signature template for the given person or company.", "instances": [{"input": "a graduate student of computer science", "output": "[Your name]\n[Your degree] in Computer Science\n[Your university]\n\nWebsite: [Your website]\nORCID ID: [Your ORCID ID]"}]}
-{"id": "user_oriented_task_53", "motivation_app": "yelp", "instruction": "React properly to reviews from your customers. In your responses, you can highlight your business' policies and customer service.", "instances": [{"input": "Another repeat visit and no disappointment from us. Great place for breakfast or brunch - and you won't need to eat again for awhile. The restaurant is close to the Tenderloin so be aware of your surroundings.\nWe enjoyed the Succotash Omelet (at least my wife enjoyed it), the Pulled Pork Benedict and the Hangtown Fry. Portions are large and pricing is fair. I wish we could have managed a Beignet Flight!\nDefinitely a spot to seek out. It can be crowed at times so be patient. Well worth any wait.", "output": "Hi [customer's name], thank you so much for the wonderful feedback. We\u2019re happy to hear that you enjoyed your experience with us and that you love [restauran't name] so much! We\u2019re beyond thankful for your support and can\u2019t wait to see you back again soon! Thanks!"}]}
-{"id": "user_oriented_task_54", "motivation_app": "Twitter", "instruction": "Write a good Twitter Bio. Try to cover all the provided main points in your generated bio.", "instances": [{"input": "Name: Jane\nMain points:\n- Architect\n- Interested in Tech\nWritten in first person\nUse casual tone", "output": "I like to think of myself as a kind of architect. I'm interested in the tech side of things, and I love coming up with new ideas for how to make things better."}]}
-{"id": "user_oriented_task_55", "motivation_app": "Twitter", "instruction": "We need to write a thematic tweet to share company news and facts. Please take a careful look at the facts and details provided and create a tweet based on them.", "instances": [{"input": "- news: Applications for summer internships are open now\n- Start with a thematic tie-in to Halloween", "output": "Halloween is right around the corner, and we're celebrating with some exciting news: applications are now open for our summer internships!\nIf you want to work with us this summer, we'd love to hear from you. We're looking for people who are passionate about [company name], and committed to helping us grow. If that sounds like you, check out all the details on our website: [website address]."}]}
-{"id": "user_oriented_task_56", "motivation_app": "Indeed", "instruction": "A job description is a document that clearly states the requirements, duties, responsibilities, and skills required to perform a specific role. You need to write a job description based on the given information. You should describe responsibilities and duties in your job description in a logical order. Remember that potential employees often get their first impression of your company from your job description. So, the job description should convey the attractiveness of your company.", "instances": [{"input": "Company: [Company name]\nJob: Software Engineer\nJob Responsibilities:\nReceive and perform code reviews with other engineers.\nWrite unit, integration, and end-to-end tests to verify functionality using automated testing frameworks such as Pytest.\nWork collaboratively with fellow software engineers to build features requested by business stakeholders.\nParticipate in Agile teams to develop, test, and debug complex data processing pipelines and data analysis applications using big data processing systems such as Apache Spark.\nDiagnose, debug, and perform root cause analysis of issues and defects.\nSkills and experience that will lead to success:\nExperience with a modern software programming language. We use Python, and Pyspark extensively.\nExperience with cloud systems like AWS or Azure\nExperience writing SQL queries \nStrong written and verbal communication skills\nEagerness to work in a highly-collaborative environment\nSome visualization experience such as Tableau or PowerBI is helpful", "output": "Software Engineer\n[company name] is looking for a self-motivated software engineer to join our team. We're a team of engineers and data scientists who work with large-scale data processing systems such as Apache Spark and Amazon Redshift to analyze data in the cloud.\nYou'll be responsible for receiving and performing code reviews with other engineers, writing unit, integration, and end-to-end tests to verify functionality using automated testing frameworks such as Pytest, working collaboratively with fellow engineers to build features requested by business stakeholders, participating in Agile teams to develop, test and debug complex data processing pipelines and data analysis applications using big data processing systems such as Apache Spark.\nIn addition to being able to diagnose, debug, and perform root cause analysis of issues and defects when needed, we're looking for someone with some visualization experience (Tableau or PowerBI) who can bring their own ideas about how best use these tools on a project.\nSkills that will lead to success include: Experience with Python programming languages; experience with cloud systems like AWS or Azure; experience writing SQL queries; strong written communication skills; eagerness to work in a highly collaborative environment;"}]}
-{"id": "user_oriented_task_57", "motivation_app": "Gmail", "instruction": "Considering the reason for the rejection, write a polite rejection letter for rejecting a candidate.", "instances": [{"input": "Reason: You came in second", "output": "Dear [Candidate Name],\n\nThank you for applying to [company name]! We're sorry to inform you that we're going to be passing on your application.\nWe were impressed with your initiative and would love to have had the opportunity to work with you. At this time, however, we're not able to offer any positions at our company.\nPlease accept our sincerest apologies for passing on your application, and know that we remain open to working with you in the future!"}]}
-{"id": "user_oriented_task_58", "motivation_app": "Blogger", "instruction": "We've already decided on the title and topic for our blog post. I would appreciate it if you wrote an introduction paragraph to attract the readers' attention.", "instances": [{"input": "Blog title: Healthy Apple Muffins\nBlog Topic: recipe and tips for cooking a good apple muffin", "output": "Let me tell you this, apple muffins are extremely easy to make, and they are totally worth the effort. If you've never been much of a cook or baker before, I can promise you that making these will change your mind. And if they don't get the job done, at least they'll taste good!"}]}
-{"id": "user_oriented_task_59", "motivation_app": "Blogger", "instruction": "Design an outline for a blog post based on the given information and list the sections accordingly.", "instances": [{"input": "This blog post is going to about making a list of last-minute gift ideas. Also, the tone of the post is going to be relaxed and casual.", "output": "Section: Gift card to a favorite restaurant or store\nSection: Movie tickets\nSection: A massage or mani/pedi\nSection: A fun game console\nSection: Books\nSection: Candles, flameless candles\nSection: Portable battery charger for phones/tablets\nSection: Water filter pitcher\nSection: Personalized items with photos/artwork\nSection: Favorite team gear (hat, sweatshirt)\nTakeaway: Don't stress about out running out of time to buy, make a gift."}]}
-{"id": "user_oriented_task_60", "motivation_app": "Blogger", "instruction": "Write a section for a blog post and try to cover all of the provided information about this section in your text.", "instances": [{"input": "Blog Topic: 7 Fall Fashion Trends Worth Taking on Your Trip\nSection Title: Trusty Jeans\nMain point of the section: jeans can be worn all year and look good with everything .", "output": "Jeans are one of the most iconic fashion items for a reason \u2013 they look great with everything and can be worn year-round. While this Fall, the trends lean toward a baggier fit, bringing along your favorite, trusty jeans is a must no matter where you\u2019re headed. If you can\u2019t get on board with the baggy jean trend, at least opt for a straight leg and ditch the skinnies."}]}
-{"id": "user_oriented_task_61", "motivation_app": "Instagram", "instruction": "Create a detailed caption for an Instagram post about a discount. A proper caption should explain the product, offer the discount, and tell the user how to claim it.", "instances": [{"input": "Product: Golang course\nProduct description: teaching the foundational skills for building cloud services, web applications, DevOps tools, and backend development\nDiscount: 50%\nHow to claim: Use the coupon code: GOLANG", "output": "We've just launched the Golang course. In this course you will learn the foundational skills needed for high-paying careers involving building cloud services, web applications, DevOps tools, and backend development. Upon completion of the course, you will be able to build a web server that end-users can use to interact and consume as a REST API.\nLearning outcomes\nAs a graduate you\u2019ll be able to:\n1- Articulate the advantages and disadvantages of Go compared to similar languages.\n2- Design a business case for when to develop a project in Go.\n3- Set up a Go environment.\n4- Program a basic backend using Go.\nCheck out the Golang course syllabus to view the course project.\n\nEnroll this week and save 50% with our new course special. Use the coupon code: GOLANG at checkout to save 50% on your subscription to the Golang course. Start learning soon\u2014this special promotion is available only until [date]."}]}
-{"id": "user_oriented_task_62", "motivation_app": "Instagram", "instruction": "Create a captivating Instagram caption based on the provided details. Try to think of as many captions as you can.", "instances": [{"input": "I am sharing images of the first snow of the year taken from a window in an office", "output": "- It's snowing! Here's the first snow of the year from our office windows.\n- Let the photos do the talking. The first snow of the year is just around the corner.\n- I know it's not snowing outside, but just seeing the first flakes of the year from our office window is enough to make me feel like winter has finally arrived. :D\n- First snow of the year and I'm in a good mood. Because I get to do my favorite thing: Look at pictures of snow on Instagram\n- The first snow of the year and I'm getting cozy in my office chair. \ud83d\ude0d\n- Snowpocalypse: the sweet, delicate joy of first snowfall.\n- Snow day: here's to a great start to the week!\n- Snowing in my office and I am not even mad.\n- It's a snowy day in the office. I'm wearing my long johns, drinking hot chocolate, and binge-watching Game of Thrones. What are you doing?"}]}
-{"id": "user_oriented_task_63", "motivation_app": "Instagram", "instruction": "You are given a topic for an Instagram post. Help the post reach a broader audience by suggesting hashtags related to the post.", "instances": [{"input": "Another episode of women in science is out now", "output": "#womeninSTEM #WomenScience #science #research #stempodcast"}]}
-{"id": "user_oriented_task_64", "motivation_app": "Instagram", "instruction": "Use appropriate emojis to convey the meaning in the given text.", "instances": [{"input": "Cute cat\nplayful\nSoft", "output": "\ud83d\udc31 Cute cat\n\ud83d\ude1b playful\n\ud83d\udc9b Soft"}]}
-{"id": "user_oriented_task_65", "motivation_app": "YouTube", "instruction": "The topic of a YouTube post has been described and based on the information, you need to write a hook for starting the post. A catchy hook will keep your readers interested so they keep reading. It would be great if you could come up with as many hooks as you can.", "instances": [{"input": "A video showing how to make a tasty cup of coffee", "output": "- How do you make a great cup of coffee? We asked a professional.\n- Real life stories of the greatest coffee shops in the world.\n- You'll definitely want a cup of this after watching it so be sure not to miss this.\n- Coffee is something most people drink every day. This video tells you everything about it.\n- Watch this video on 7 habits to make you a better barista."}]}
-{"id": "user_oriented_task_66", "motivation_app": "YouTube", "instruction": "Using the provided topic as a starting point, brainstorm ideas for videos that can be made about it on YouTube.", "instances": [{"input": "In spite of the fact that procrastination feels bad to you, why do you do it?", "output": "+ The reason why you procrastinate\n+ A documentary about procrastination\n+ How to quit procrastination\n+ Recording people talking about their procrastination habits\n+ Interview people while they are procrastinating\n+ People who have failed to accomplish their New Year's resolutions explain why"}]}
-{"id": "user_oriented_task_67", "motivation_app": "YouTube", "instruction": "You should choose a YouTube video title based on the video's content. A video's title tells viewers what to expect from it. It should be direct, honest, and clear. The title of the video needs to capture the attention of viewers, so do not use an unclear or ambiguous one.", "instances": [{"input": "A research study has been conducted to determine if exercise really can \"boost\" your metabolism.", "output": "You Will Not Believe The Results Of A Recent Study On Exercise And The Metabolism!"}]}
-{"id": "user_oriented_task_68", "motivation_app": "YouTube", "instruction": "Choosing a name for your product or business YouTube channel is an important part of the process. Based on the description of the product or business, you should come up with some interesting names. Take some time to brainstorm your ideas.", "instances": [{"input": "Here you will find videos and content that will help students prepare for the application process to graduate schools as well as how to apply to graduate schools", "output": "GradSchoolPrep, GoGradSchool, GradSchoolVidz, Apply2Gradschool"}]}
-{"id": "user_oriented_task_69", "motivation_app": "merriam-webster.com", "instruction": "Find the answer that best describes the underlined SAT word. Select the correct option and explain the meaning of the underlined word.", "instances": [{"input": "Despite the _cacophony, the student tried to study. \nA. Loud sounds\nB. Difficult subject\nC. Late hour\nD. Low lighting", "output": "A. Loud sounds, Although it comes from the Greek for \u201cbad sound,\u201d cacophony can now also mean \u201can incongruous or chaotic mixture\u201d of other things, like colors or smells."}]}
-{"id": "user_oriented_task_70", "motivation_app": "merriam-webster.com", "instruction": "Find synonyms for the given word. You need to write down how the provided synonyms differ from the original word in terms of meaning, usage, etc.", "instances": [{"input": "adversity", "output": "- mischance:The words mischance and adversity are synonyms, but do differ in nuance. Specifically, mischance applies especially to a situation involving no more than slight inconvenience or minor annoyance.\n- misfortune: The meanings of misfortune and adversity largely overlap; however, misfortune may apply to either the incident or conjunction of events that is the cause of an unhappy change of fortune or to the ensuing state of distress.\n- mishap: The synonyms mishap and adversity are sometimes interchangeable, but mishap applies to an often trivial instance of bad luck."}]}
-{"id": "user_oriented_task_71", "motivation_app": "merriam-webster.com", "instruction": "Discuss the origins and history of the word that has been given to you.", "instances": [{"input": "oblique", "output": "Middle English oblique, oblike, borrowed from Anglo-French oblic, oblique, borrowed from Latin obl\u012bquus \"slanting, transverse,\" from ob- \"against, facing\" + -l\u012bquus, of uncertain meaning and origin"}]}
-{"id": "user_oriented_task_72", "motivation_app": "Yelp", "instruction": "Give the provided brand a motto to use as a part of its marketing strategies.", "instances": [{"input": "The Ice cream shop is a small, quaint shop located in the heart of the historic district. They offer a variety of ice cream flavors, as well as Italian ice, gelato, and sorbet. The shop has a few tables and chairs, but most of their business is to-go. They offer a loyalty program where you can get a free scoop of ice cream after 10 purchases.", "output": "- It's a scoop of friendly!\n- Come in and have a treat!\n- For a sweet treat, stop by and see us."}]}
-{"id": "user_oriented_task_73", "motivation_app": "Gmail", "instruction": "A confirmation email should be written appropriately for the situation.", "instances": [{"input": "A meeting has been scheduled, and the sender expects the other to review the slides.", "output": "Hi [name],\n\nI hope you're having a great week so far. I wanted to follow up on our meeting and let you know that we've scheduled a time for you and your team to come in next week. I'll have some slides prepared for you to review, and if there are any questions, I'd be happy to answer them! Just email me back at [email] when you have a moment.\n\nThanks so much for all of your help with this project\u2014we're excited to get started!"}]}
-{"id": "user_oriented_task_74", "motivation_app": "Gmail", "instruction": "You need to write an email to negotiate your salary.", "instances": [{"input": "", "output": "Dear [hiring manager's name],\n\nI wanted to take a moment to thank you for the offer. I'm honored that you would consider me for this position, and I'm excited to be working with you.\nI know that it's not easy to get a sense of how much salary should be offered in a given situation, so I wanted to explain why I believe my skill set is worth more than the offered salary. \nMy experience as a [position title] will prepare me well for this role, which requires extensive project management skills, as well as strong written communication skills and a deep understanding of the company's product offerings. These are all areas where I've already built up substantial experience over the past few years and am confident that they would serve me well in your company. \n\nI hope we can reach an agreement soon!"}]}
-{"id": "user_oriented_task_75", "motivation_app": "Gmail", "instruction": "Write an email to attendees as a reminder that the event is coming up.", "instances": [{"input": "", "output": "Hello, [name]!\n\nWe're excited to announce the [event name] event that you signed up for. The location is [location], and the date and time is [date and time].\n\nIf you have any questions or concerns about the event, please let us know by emailing us at [email]."}]}
-{"id": "user_oriented_task_76", "motivation_app": "Github", "instruction": "It would be helpful if you could suggest an acronym for the repository.", "instances": [{"input": "A python framework for creating, editing, and invoking Noisy Intermediate Scale Quantum (NISQ) circuits.", "output": "Cirq"}]}
-{"id": "user_oriented_task_77", "motivation_app": "Github", "instruction": "Write a step-by-step guide for resolving GitHub merge conflicts.", "instances": [{"input": "", "output": "1- Under your repository name, click Pull requests.\n2- In the \"Pull Requests\" list, click the pull request with a merge conflict that you'd like to resolve.\n3- Near the bottom of your pull request, click Resolve conflicts.\n4- Decide if you want to keep only your branch's changes, keep only the other branch's changes, or make a brand new change, which may incorporate changes from both branches. Delete the conflict markers <<<<<<<, =======, >>>>>>> and make the changes you want in the final merge.\n5- If you have more than one merge conflict in your file, scroll down to the next set of conflict markers and repeat steps four and five to resolve your merge conflict.\n6- Once you've resolved all the conflicts in the file, click Mark as resolved.\n7- If you have more than one file with a conflict, select the next file you want to edit on the left side of the page under \"conflicting files\" and repeat steps four through seven until you've resolved all of your pull request's merge conflicts.\n8- Once you've resolved all your merge conflicts, click Commit merge. This merges the entire base branch into your head branch.\n9- If prompted, review the branch that you are committing to.\nIf the head branch is the default branch of the repository, you can choose either to update this branch with the changes you made to resolve the conflict, or to create a new branch and use this as the head branch of the pull request.\nIf you choose to create a new branch, enter a name for the branch.\nIf the head branch of your pull request is protected you must create a new branch. You won't get the option to update the protected branch.\nClick Create branch and update my pull request or I understand, continue updating BRANCH. The button text corresponds to the action you are performing.\n10- To merge your pull request, click Merge pull request. For more information about other pull request merge options, see \"Merging a pull request.\""}]}
-{"id": "user_oriented_task_78", "motivation_app": "Github", "instruction": "One or more jobs can be run automatically by a workflow. The workflows are defined in the repository as YAML files and will run if an event occurs in the repository, as well as manually, or on a certain schedule.\nBring an example of a workflow that demonstrate the CI/CD features of GitHub Actions.", "instances": [{"input": "", "output": "Using scripts to test your code on a runner is a common example. When this workflow is triggered, it automatically runs a script."}]}
-{"id": "user_oriented_task_79", "motivation_app": "IMDB", "instruction": "The movie tagline is the film's advertising slogan, usually printed on posters prior to the official release. It is a short description or comment on a movie to capture the essence of the movie, and ultimately make you watch the movie. Here is an example:", "instances": [{"input": "Movie: Joker (2019)", "output": "Put on a happy face."}]}
-{"id": "user_oriented_task_80", "motivation_app": "IMDB", "instruction": "You are given some reviews for a movie. Each video might have mentioned some negative or positive points about the movie. Read them carefully and extract the positive and negative points mentioned in all of the reviews. Then, make a list of these points and summarize them as positive points and negative points.", "instances": [{"input": "Review 1: I'm not sure if it's just the fact that Ed Norton really grates on me in this film, but I never really bought into the idea of Fight Club being some kind of cultural phenomenon. Yes it's an innovative story, told via a strong script, engaging characters and intense cinematography. But is it really worthy of such high praise? Personally, I find it starts to fall apart halfway through and actually becomes quite tedious towards the end. Everything just becomes a little bit childish and contrived as the story loses touch with its base elements and the rage against the machine vibe takes over. I have huge respect for the way this film was made but, much like The Matrix, I feel like people attach way more importance to this movie than it deserves.\nReview 2: The film tells the story of men who cannot adapt to the social order in today's world, under the leadership of Tyler Durden, to discover their natural instinct, masculinity, and to escape from their unhappy lives.\nReview 3: Despite a good theme, great acting and important messages that this movie convey in an unorthodox way, I think it fails to connect the audience with the storyline and leaves him in a world of confusion. Although, majority of reviews find this movie entertaining and interesting, yet I would choose to be a minority that believes that this movie is extremely overrated.\nReview 4: Is creating chaos, explosions and membership in a militant sect the way to get rid of material shackles ?! The visual effects are fascinating and the story is compelling until the second half. Unfortunately, it doesn't end well.", "output": "Positive points: \n- Innovative story\n- Strong script\n- Engaging characters\n- Intense cinematography\n- Good theme\n- Great acting\n- Important messages\n- Fascinating visual effects \n\nNegative points: \n- Ed Norton grates on the viewer\n- Starts to fall apart halfway through\n- Childish and contrived towards the end\n- Loses touch with its base elements\n- World of confusion\n- Overrated"}]}
-{"id": "user_oriented_task_81", "motivation_app": "IMDB", "instruction": "Give a brief summary of the movie or series' plot.", "instances": [{"input": "Breaking Bad (TV Series 2008\u20132013)", "output": "Walter H. White is a chemistry genius, but works as a chemistry teacher in an Albequerque, New Mexico high school. His life drastically changes when he's diagnosed with stage III terminal lung cancer, and given a short amount of time left to live: a mere matter of months. To ensure his handicapped son and his pregnant wife have a financial future, Walt uses his chemistry background to create and sell the world's finest crystal methamphetamine. To sell his signature \"blue meth,\" he teams up with Jesse Pinkman, a former student of his. The meth makes them very rich very quickly, but it attracts the attention of his DEA brother in law Hank. As Walt and Jesse's status in the drug world escalates, Walt becomes a dangerous criminal and Jesse becomes a hot-headed salesman. Hank is always hot on his tail, and it forces Walt to come up with new ways to cover his tracks."}]}
-{"id": "user_oriented_task_82", "motivation_app": "IMDB", "instruction": "Decide whether children can watch the given movie. Mention what makes it inappropriate if you think they can't.", "instances": [{"input": "The Dark Knight (2008)", "output": "No, children should not watch The Dark Knight. The movie is rated PG-13 for intense sequences of violence and action, some menace, and brief language."}]}
-{"id": "user_oriented_task_83", "motivation_app": "IMDB", "instruction": "Write a short analysis of the cinematography in the movie.", "instances": [{"input": "Dead Poets Society", "output": "'Dead Poets Society' uses film techniques such as camera angles and camera techniques to help emphasise certain emotions between each character. The Film techniques are used to help the audience understand the theme, character, emotion and setting of a specific scene. All movies use Film Techniques to help the audience join in the experience and share the emotions that occurring in the film. Film techniques used in this film help shape meaning and give a better understanding of the relationship between the characters in the film. Although this film does not only use film techniques to emphasize the relationship of only characters, it uses it to emphasizes the relationship of characters with the themes of the film such as, poetry, carpe diem (seize the day), The school etc."}]}
-{"id": "user_oriented_task_84", "motivation_app": "tripadvisor.com", "instruction": "Please let me know your thoughts on the given place and why you think it deserves to be visited.", "instances": [{"input": "Barcelona, Spain", "output": "There\u2019s nowhere in the world like Barcelona. From days spent strolling the Gothic Quarter to afternoons at the beach and nights partying \u2018til dawn, the city is magnetic. While tapas and bustling mercados are worth every bit of attention they get in guidebooks, Barcelona\u2019s true crown jewel is the work of Antoni Gaud\u00ed. His iconic creations, like the jaw-dropping Basilica of La Sagrada Familia, are instantly recognizable and have fueled millions of travel dreams. You won\u2019t be the only tourist here (especially during the summer), but even amid the crowds you\u2019ll find your own Mediterranean magic any time of year."}]}
-{"id": "user_oriented_task_85", "motivation_app": "tripadvisor.com", "instruction": "Consider the best time of year to visit the given city, and provide your reasons for choosing that time.", "instances": [{"input": "Sydney, Australia", "output": "March to May: The best times to visit Sydney are between March and May and September and November. You\u2019ll find more comfortable temperatures and fewer crowds \u2014 which means you can likely nab better prices on airfare, too.\nBut if you want to warm up with the masses from winter back home, the average daily temperatures in January can hit highs of 30\u00b0C/86\u00b0F and lows around 20\u00b0C/67\u00b0F."}]}
-{"id": "user_oriented_task_86", "motivation_app": "tripadvisor.com", "instruction": "Make a questionnaire to help hotel guests write hotel reviews.", "instances": [{"input": "", "output": "1- Your overall rating of this property:\n2- Title of your review:\n3- Summarize your visit or highlight an interesting detail:\n4- Your Review:\n5- What sort of trip was this? Business/Couples/Family/Friends/Solo\n6- When did you travel?\n7- Could you say a little more about it? (optional)\nWe'd love your opinion! Anything you can share will help other travelers choose their perfect hotel. Thanks.\nHotel Style & Amenities: \nDoes this hotel have laundry service? Yes/No/Not Sure\nDoes this hotel have a game room? Yes/No/Not Sure\nDoes this hotel have a sun terrace? Yes/No/Not Sure\nIs the front desk of the hotel staffed 24-hours a day? Yes/No/Not Sure\n8- How expensive is this hotel? Budget/Mid-range/Luxury\n9- Add a tip to help travelers choose a good room PE.g., best views, quieter floors, accessibility, etc.)\n10- Do you have photos to share? (optional)"}]}
-{"id": "user_oriented_task_87", "motivation_app": "tripadvisor.com", "instruction": "Make a list of the top places in the U.S. to visit at the given time.", "instances": [{"input": "November", "output": "1- For mild temperatures and local brews: San Diego\n2- For the last gasp of fall foliage: Manchester, VT\n3- For unexpected art and mountain views: Las Vegas\n4- For ice skating and family fun: Chicago\n5- For a waterfront Thanksgiving experience: Key West, FL\n6- For great food and ghost tours: Savannah, GA"}]}
-{"id": "user_oriented_task_88", "motivation_app": "tripadvisor.com", "instruction": "Name the top cities in France that should not be missed. Include the best aspects of each place as well.", "instances": [{"input": "", "output": "1- Paris, Best for: Historic sights and romantic vibes\n2- Nice, Best for: Architecture and dining\n3- Lyon, Best for: Dining and live music\n4- Bordeaux, Best for: Wine and antiques\n5- Marseille, Best for: Museums and nightlife\n6- Strasbourg, Best for: Historic architecture and dining"}]}
-{"id": "user_oriented_task_89", "motivation_app": "StackOverflow", "instruction": "Answer the following question.", "instances": [{"input": "How do I compare strings in Java?\nI've been using the == operator in my program to compare all my strings so far. However, I ran into a bug, changed one of them into .equals() instead, and it fixed the bug.\nIs == bad? When should it and should it not be used? What's the difference?", "output": "== tests for reference equality (whether they are the same object).\n.equals() tests for value equality (whether they are logically \"equal\").\nObjects.equals() checks for null before calling .equals() so you don't have to (available as of JDK7, also available in Guava).\nConsequently, if you want to test whether two strings have the same value you will probably want to use Objects.equals().\nYou almost always want to use Objects.equals(). In the rare situation where you know you're dealing with interned strings, you can use ==."}]}
-{"id": "user_oriented_task_90", "motivation_app": "StackOverflow", "instruction": "You are given a question and two possible answers. The task is to evaluate each answer and determine if it is correct. If incorrect, explain why.", "instances": [{"input": "Is there a way of reading one single character from the user input? For instance, they press one key at the terminal and it is returned (sort of like getch()). I know there's a function in Windows for it, but I'd like something that is cross-platform.\nAnswer 1: Here's a link to the ActiveState Recipes site that says how you can read a single character in Windows, Linux and OSX :\nhttps://code.activestate.com/recipes/134892/\nAnswer 2: if you just want to hold the screen so you can see the result on the terminal just write `input()` at the end of the code and it will hold the screen", "output": "Answer 1 is correct.\nAnswer 2 is incorrect because it does not answer the question. The user is asking how to read a single character."}]}
-{"id": "user_oriented_task_91", "motivation_app": "StackOverflow", "instruction": "You are given two questions. Check if they are duplicates.", "instances": [{"input": "Question1: I am trying to understand the advantages of multiprocessing over threading. I know that multiprocessing gets around the Global Interpreter Lock, but what other advantages are there, and can threading not do the same thing?\nQuestion2: I am learning how to use the threading and the multiprocessing modules in Python to run certain operations in parallel and speed up my code.\nI am finding this hard (maybe because I don't have any theoretical background about it) to understand what the difference is between a threading.Thread() object and a multiprocessing.Process() one.\nAlso, it is not entirely clear to me how to instantiate a queue of jobs and having only 4 (for example) of them running in parallel, while the other wait for resources to free before being executed.\nI find the examples in the documentation clear, but not very exhaustive; as soon as I try to complicate things a bit, I receive a lot of weird errors (like a method that can't be pickled, and so on).\nSo, when should I use the threading and multiprocessing modules?\nCan you link me to some resources that explain the concepts behind these two modules and how to use them properly for complex tasks?", "output": "Yes, they are duplicates."}]}
-{"id": "user_oriented_task_92", "motivation_app": "Leetcode", "instruction": "Design a programming problem related to the subject that has been given to you. Use some examples and constraints to improve your question.", "instances": [{"input": "Dynamic Programming", "output": "Given an integer array nums, return the length of the longest strictly increasing subsequence.\n\n Example 1:\nIn: nums = [10,9,2,5,3,7,101,18]\nOut: 4\nExplanation: The longest increasing subsequence is [2,3,7,101], therefore the length is 4.\n\nExample 2:\nIn: nums = [0,1,0,3,2,3]\nOut: 4\n\nExample 3:\nIn: nums = [7,7,7,7,7,7,7]\nOut: 1\n \nConstraints:\n1 <= nums.length <= 2500\n-104 <= nums[i] <= 104\n \nFollow up: Can you come up with an algorithm that runs in O(n log(n)) time complexity?"}]}
-{"id": "user_oriented_task_93", "motivation_app": "Leetcode", "instruction": "Provide a Java solution to the following problem.", "instances": [{"input": "Given an integer n, return true if it is a power of three. Otherwise, return false.\nAn integer n is a power of three, if there exists an integer x such that n == 3^x.\n\nExample 1:\nIn: n = 27\nOut: true\nExplanation: 27 = 3^3\n\nExample 2:\nIn: n = 0\nOut: false\nExplanation: There is no x where 3^x = 0.\n\nExample 3:\nIn: n = -1\nOut: false\nExplanation: There is no x where 3x = (-1).\n \nConstraints:\n-231 <= n <= 231 - 1\nFollow up: Could you solve it without loops/recursion?", "output": "public class Solution {\n public boolean isPowerOfThree(int n) {\n return (Math.log10(n) / Math.log10(3)) % 1 == 0;\n }\n}"}]}
-{"id": "user_oriented_task_94", "motivation_app": "Leetcode", "instruction": "Think of topics that are most common in classic interview questions for a job in computer science.", "instances": [{"input": "", "output": "+ Array\n+ Strings\n+ Linked List\n+ Trees\n+ Sorting and Searching\n+ Dynamic Programming\n+ Design\n+ Math"}]}
-{"id": "user_oriented_task_95", "motivation_app": "Leetcode", "instruction": "You will need to guide this person through the scenario they have described for the interview.", "instances": [{"input": "Finally! After a while of shooting in the dark and frantically fiddling with sample inputs on the whiteboard, you've came up with an algorithm for solving the coding question your interviewer gave you.\nWhew. Such a relief to have a clear path forward. To not be flailing anymore.\nNow you're cruising, getting ready to code up your solution.\nWhen suddenly, your interviewer throws you a curve ball.\n\"What if we thought of the problem this way?\"\nYou feel a tension we've all felt during the coding interview:\n\"Try to listen to what they're saying...but don't lose your train of thought...ugh, I can't do both!\"", "output": "Your interviewer probably won't fault you for going down the wrong path at first. They've seen really smart engineers do the same thing. They understand it's because you only have a partial view of the maze.\nThey might have let you go down the wrong path for a bit to see if you could keep your thinking organized without help. But now they want to rush you through the part where you discover the dead end and double back. Not because they don't believe you can manage it yourself. But because they want to make sure you have enough time to finish the question.\nBut here's something they will fault you for: failing to listen to them. Nobody wants to work with an engineer who doesn't listen.\nSo when you find yourself in that crucial coding interview moment, when you're torn between holding your train of thought and considering the idea your interviewer is suggesting...remember this:\nListening to your interviewer is the most important thing.\nTake what they're saying and run with it. Think of the next steps that follow from what they're saying.\nEven if it means completely leaving behind the path you were on. Trust the route your interviewer is pointing you down.\nBecause they can see the whole maze."}]}
-{"id": "user_oriented_task_96", "motivation_app": "Leetcode", "instruction": "You are given a programming problem and its implementation. Analyze the problem and implementation and explain the algorithm and approach used to solve the problem.", "instances": [{"input": "Table: Person\n| Column Name | Type |\n| personId | int |\n| lastName | varchar |\n| firstName | varchar |\npersonId is the primary key column for this table.\nThis table contains information about the ID of some persons and their first and last names.\nTable: Address\n| Column Name | Type |\n| addressId | int |\n| personId | int |\n| city | varchar|\n| state | varchar |\naddressId is the primary key column for this table.\nEach row of this table contains information about the city and state of one person with ID = PersonId.\nWrite an SQL query to report the first name, last name, city, and state of each person in the Person table. If the address of a personId is not present in the Address table, report null instead.\nReturn the result table in any order.\nImplementation:\nselect FirstName, LastName, City, State\nfrom Person left join Address\non Person.PersonId = Address.PersonId\n;", "output": "Since the PersonId in table Address is the foreign key of table Person, we can join these two tables to get the address information of a person.\nConsidering there might be no address information for every person, we should use outer join instead of the default inner join."}]}
-{"id": "user_oriented_task_97", "motivation_app": "Wikipedia", "instruction": "The given article should be checked for any scientific errors and corrected if necessary.", "instances": [{"input": "Zinc is an essential mineral that is required for good health. It is involved in many biochemical processes in the body and is necessary for the proper functioning of the immune system. Excess zink is not harmful as it is excreted in the urine.", "output": "Although zinc is an essential requirement for good health, excess zinc can be harmful. Excessive absorption of zinc suppresses copper and iron absorption. The free zinc ion in solution is highly toxic to plants, invertebrates, and even vertebrate fish. The Free Ion Activity Model is well-established in the literature, and shows that just micromolar amounts of the free ion kills some organisms. A recent example showed 6 micromolar killing 93% of all Daphnia in water.\nThe free zinc ion is a powerful Lewis acid up to the point of being corrosive. Stomach acid contains hydrochloric acid, in which metallic zinc dissolves readily to give corrosive zinc chloride. Swallowing a post-1982 American one cent piece (97.5% zinc) can cause damage to the stomach lining through the high solubility of the zinc ion in the acidic stomach."}]}
-{"id": "user_oriented_task_98", "motivation_app": "Wikipedia", "instruction": "Summarize the article you have been given in a brief manner.", "instances": [{"input": "Mathematics and art are related in a variety of ways. Mathematics has itself been described as an art motivated by beauty. Mathematics can be discerned in arts such as music, dance, painting, architecture, sculpture, and textiles. This article focuses, however, on mathematics in the visual arts.\nMathematics and art have a long historical relationship. Artists have used mathematics since the 4th century BC when the Greek sculptor Polykleitos wrote his Canon, prescribing proportions conjectured to have been based on the ratio 1:\u221a2 for the ideal male nude. Persistent popular claims have been made for the use of the golden ratio in ancient art and architecture, without reliable evidence. In the Italian Renaissance, Luca Pacioli wrote the influential treatise De divina proportione (1509), illustrated with woodcuts by Leonardo da Vinci, on the use of the golden ratio in art. Another Italian painter, Piero della Francesca, developed Euclid's ideas on perspective in treatises such as De Prospectiva Pingendi, and in his paintings. The engraver Albrecht D\u00fcrer made many references to mathematics in his work Melencolia I. In modern times, the graphic artist M. C. Escher made intensive use of tessellation and hyperbolic geometry, with the help of the mathematician H. S. M. Coxeter, while the De Stijl movement led by Theo van Doesburg and Piet Mondrian explicitly embraced geometrical forms. Mathematics has inspired textile arts such as quilting, knitting, cross-stitch, crochet, embroidery, weaving, Turkish and other carpet-making, as well as kilim. In Islamic art, symmetries are evident in forms as varied as Persian girih and Moroccan zellige tilework, Mughal jali pierced stone screens, and widespread muqarnas vaulting.", "output": "This article discusses the relationship between mathematics and art, with a focus on visual arts. It describes how artists have used mathematics since ancient times, and how mathematics has inspired various textile arts."}]}
-{"id": "user_oriented_task_99", "motivation_app": "Wikipedia", "instruction": "Improve the article using your knowledge of the topic in order to make it more informative.", "instances": [{"input": "The \"fair trade\" movement, also known as the \"trade justice\" movement, promotes the use of labour, environmental and social standards for the production of commodities, particularly those exported from the Third and Second Worlds to the First World. Such ideas have also sparked a debate on whether trade itself should be codified as a human right.", "output": "The \"fair trade\" movement, also known as the \"trade justice\" movement, promotes the use of labour, environmental and social standards for the production of commodities, particularly those exported from the Third and Second Worlds to the First World. Such ideas have also sparked a debate on whether trade itself should be codified as a human right.\nImporting firms voluntarily adhere to fair trade standards or governments may enforce them through a combination of employment and commercial law. Proposed and practiced fair trade policies vary widely, ranging from the common prohibition of goods made using slave labour to minimum price support schemes such as those for coffee in the 1980s. Non-governmental organizations also play a role in promoting fair trade standards by serving as independent monitors of compliance with labeling requirements. As such, it is a form of Protectionism."}]}
-{"id": "user_oriented_task_100", "motivation_app": "Wikipedia", "instruction": "Make the article available in a second language by translating it into it.", "instances": [{"input": "Dentistry, also known as dental medicine and oral medicine, is the branch of medicine focused on the teeth, gums, and mouth. It consists of the study, diagnosis, prevention, management, and treatment of diseases, disorders, and conditions of the mouth, most commonly focused on dentition (the development and arrangement of teeth) as well as the oral mucosa. Dentistry may also encompass other aspects of the craniofacial complex including the temporomandibular joint. The practitioner is called a dentist.\nTranslate to French:", "output": "La dentisterie, \u00e9galement connue sous le nom de m\u00e9decine dentaire et de m\u00e9decine buccale, est la branche de la m\u00e9decine ax\u00e9e sur les dents, les gencives et la bouche. Il consiste en l'\u00e9tude, le diagnostic, la pr\u00e9vention, la gestion et le traitement des maladies, troubles et affections de la bouche, le plus souvent ax\u00e9s sur la dentition (le d\u00e9veloppement et la disposition des dents) ainsi que sur la muqueuse buccale.\nLa dentisterie peut \u00e9galement englober d'autres aspects du complexe craniofacial, y compris l'articulation temporo-mandibulaire. Le praticien est appel\u00e9 dentiste."}]}
-{"id": "user_oriented_task_101", "motivation_app": "Wikipedia", "instruction": "An evaluation of the article's quality should be carried out. In order to do so, you should review the quality of the writing and the explanation of the topic.", "instances": [{"input": "The 20th century saw a revolution in music listening as the radio gained popularity worldwide and new media and technologies were developed to record, edit and distribute music. Music performances became increasingly visual with the broadcast and recording of performances.\n20th-century music brought new freedom and wide experimentation with new musical styles and forms that challenged the accepted rules of music of earlier periods. The invention of musical amplification and electronic instruments, especially the synthesizer, in the mid-20th century revolutionized classical and popular music, and accelerated the development of new forms of music.", "output": "The article is well-written and researched, and provides a good overview of the topic."}]}
-{"id": "user_oriented_task_102", "motivation_app": "Wikipedia", "instruction": "Please answer the following question based on the information provided in the article.", "instances": [{"input": "Development studies is an interdisciplinary branch of social science. Development studies is offered as a specialized master's degree in a number of reputed universities around the world. It has grown in popularity as a subject of study since the early 1990s, and has been most widely taught and researched in developing countries and countries with a colonial history, such as the UK, where the discipline originated. Students of development studies often choose careers in international organisations such as the United Nations, World Bank, non-governmental organisations (NGOs), media and journalism houses, private sector development consultancy firms, corporate social responsibility (CSR) bodies and research centers.\nWhat are some popular careers for students of development studies?", "output": "Some popular careers for students of development studies include working for international organisations such as the United Nations, World Bank, or NGOs, as well as working in media, journalism, private sector development consultancy, or corporate social responsibility."}]}
-{"id": "user_oriented_task_103", "motivation_app": "Coursera", "instruction": "Design a syllabus for the given course. Students should be given a list of the chapters with brief explanations of each chapter's purpose.", "instances": [{"input": "Programming for Everybody (Getting Started with Python)", "output": "- Chapter One - Why we Program?\nThese are the course-wide materials as well as the first part of Chapter One where we explore what it means to write programs. We finish Chapter One and have the quiz and first assignment in the third week of the class. Throughout the course you may want to come back and look at these materials. This section should not take you an entire week.\n- Installing and Using Python\nIn this module you will set things up so you can write Python programs. Not all activities in this module are required for this class so please read the \"Using Python in this Class\" material for details.\n- Chapter One: Why We Program (continued)\nIn the first chapter we try to cover the \"big picture\" of programming so you get a \"table of contents\" of the rest of the book. Don't worry if not everything makes perfect sense the first time you hear it. This chapter is quite broad and you would benefit from reading the chapter in the book in addition to watching the lectures to help it all sink in. You might want to come back and re-watch these lectures after you have funished a few more chapters.\n- Chapter Two: Variables and Expressions\nIn this chapter we cover how a program uses the computer's memory to store, retrieve and calculate information.\n- Chapter Three: Conditional Code\nIn this section we move from sequential code that simply runs one line of code after another to conditional code where some steps are skipped. It is a very simple concept - but it is how computer software makes \"choices\".\n- Chapter Four: Functions\nThis is a relatively short chapter. We will learn about what functions are and how we can use them. The programs in the first chapters of the book are not large enough to require us to develop functions, but as the book moves into more and more complex programs, functions will be an essential way for us to make sense of our code.\n- Chapter Five: Loops and Iteration\nLoops and iteration complete our four basic programming patterns. Loops are the way we tell Python to do something over and over. Loops are the way we build programs that stay with a problem until the problem is solved."}]}
-{"id": "user_oriented_task_104", "motivation_app": "Coursera", "instruction": "List the personality traits that are required to be successful in the given job.", "instances": [{"input": "Social Media Marketer", "output": "- Engaging with people\n- Thinking creatively\n- Following trends"}]}
-{"id": "user_oriented_task_105", "motivation_app": "Coursera", "instruction": "Please list the courses that someone interested in the first course might like.", "instances": [{"input": "Bitcoin and Cryptocurrency Technologies", "output": "- Blockchain 360: A State of the Art for Professionals\n- Blockchain Scalability and its Foundations in Distributed Systems\n- Blockchain Business Models\n- Financial Technology (Fintech) Innovations Specialization"}]}
-{"id": "user_oriented_task_106", "motivation_app": "Coursera", "instruction": "Come up with the courses that one is supposed to take in order to be an expert in a given field.", "instances": [{"input": "Graphic Design", "output": "1- Fundamentals of Graphic Design\n2- Introduction to Typography\n3- Introduction to Imagemaking\n4- Ideas from the History of Graphic Design\n5- Brand New Brand"}]}
-{"id": "user_oriented_task_107", "motivation_app": "National Geographic", "instruction": "Create a daily itinerary based on the given information.", "instances": [{"input": "Our family is looking for a 9-day Morocco trip that has light to moderate activity levels.", "output": "1- DAY 1: MARRAKECH\nArrive in time to meet your fellow travelers for a welcome gathering this evening. Your arrival transfer is included.\n2- DAY 2: MARRAKECH/OUARZAZATE\nWind past red slopes and green terraces as we make our way south through the stunning valleys of Morocco\u2019s High Atlas Mountains, stopping to enjoy a picnic lunch beside a sparkling river. Arrive in Ouarzazate, known as the \u201cgateway to the Sahara,\u201d and step behind the scenes at a film studio to see how the surrounding dunes have starred on the silver screen.\n3- DAY 3: OUARZAZATE/MERZOUGA\nToday, travel between jagged peaks and palm oases toward the desert town of Merzouga. Settle into our hotel near the incredible Erg Chebbi, a sweeping sea of dunes formed by wind-blown sand. As the sun sets, savour a traditional Amazigh dinner, then spread a blanket on the sand for an evening of desert stargazing.\n4- DAY 4: MERZOUGA\nThis morning, hop aboard a 4x4 and ride across the dunes to discover fossils left behind by an ancient sea, and be entranced by Gnaoua musicians in a local village. Return to the hotel for free time to cool off in the pool. Enjoy a tasty Amazigh dinner, then gather around a glowing bonfire for a storytelling session.\n5- DAY 5: MERZOUGA/DADES GORGE\nVenture into the rugged landscape of the Dades Gorge, a dramatic and colourful valley carved by the waters of the Dades River. Visit a local village for lunch and enjoy a special dish\u2014traditional Amazigh pizza! Wind through the valley on a guided walk as you learn about the amazing geology behind the weird and wonderful rock formations of the Gorge. This evening, enjoy free time to relax at the hotel.\n6- DAY 6: DADES GORGE/A\u00cfT BEN HADDOU\nDrive through the Ounila Valley to the ancient fortified village of A\u00eft Ben Haddou, a UNESCO World Heritage site where clay houses crowd together in a traditional ksar behind defensive walls. Explore alleyways twisting between historic structures, and stand guard atop the clay brick ramparts, taking in views of the green oasis around this imposing fort. Entertain your taste buds at a hands-on tagine cooking class, followed by a delicious dinner.\n7- DAY 7: A\u00cfT BEN HADDOU/MARRAKECH\nJourney through the mountains to Marrakech, known as the \u201cred city\u201d for its walls, houses, and many mosques built from red clay. After checking into your hotel and resting up, head to a local henna workshop to have your hands painted in this ancient temporary tattooing art. Then, explore the lively Djemaa el Fna, an ancient marketplace still used as the main square in Marrakech. After, enjoy free time to discover more of Marrakech.\n8- DAY 8: MARRAKECH \nWake up as the call to prayer echoes across the city. After breakfast, set out on a guided walk through the narrow, maze-like streets of the medina, lined with colorful shops brimming with rugs, spices, and much more. Spend the rest of the day exploring Marrakech on your own, or opt to explore the Marjorelle Garden. In the evening, gather for a farewell dinner in a traditional riad, a grand house with an elegant interior courtyard.\n9- DAY 9: MARRAKECH\nDepart at any time."}]}
-{"id": "user_oriented_task_108", "motivation_app": "National Geographic", "instruction": "Give students tips on how to keep their nerves under control during class presentations.", "instances": [{"input": "", "output": "Everyone gets a little nervous when speaking in front of a group. That\u2019s normal. But the more preparation you\u2019ve done\u2014meaning plenty of researching, organizing, and rehearsing\u2014the more confident you\u2019ll be. Preparation is the key. And if you make a mistake or stumble over your words, just regroup and keep going. Nobody\u2019s perfect, and nobody expects you to be."}]}
-{"id": "user_oriented_task_109", "motivation_app": "National Geographic", "instruction": "You can easily make your holidays more eco-friendly by following these simple steps:", "instances": [{"input": "", "output": "- If you prefer to buy a tree, look for local growers with advice on where to recycle. To complete the natural look, make ornaments from collected and dried foliage.\n- Another way to be environmentally friendly during the holidays is to use candles made with soy, ethical beeswax, and coconut wax instead of those made with petroleum-based paraffin wax. LED lights are also an environmentally friendly option among artificial lights.\n-Another way to reduce waste is to use brown paper grocery bags or furoshiki, which is swaddling gifts in reusable squares of cloth, rather than wrapping paper. \n- Instead of throwing away the excess food, find groups that redistribute it at epa.gov by searching for \u201creduce wasted food.\u201d What\u2019s no longer edible might be compostable."}]}
-{"id": "user_oriented_task_110", "motivation_app": "instructables", "instruction": "Think of an idea to help you win a challenge given to you. Write the steps you need to take to develop the idea as well.", "instances": [{"input": "What can you make by recycling and repurposing some old, previously used materials? We want to see it!\nProjects must incorporate some kind of used materials - take that old used thing, and turn it into something new and different!\nE-waste is a huge problem. What can you make using old electronics and other salvageable electronic materials? \nWe have a special judges prize for our favorite Instructable that incorporates or is related to reusing e-waste.\nBe smart and be safe; note that potentially deadly projects (like Lichtenberg/fractal wood burning machines) will not be allowed to be published or submitted to this contest.", "output": "This power booster recycles a 'dead' battery to power a 1.8 volt red LED light. It's mainly made from recycled parts.\nWhen a 1.5 volt battery is new it will probably have more than 1.5 volts in it. As the battery is used the voltage will decrease until the voltage is not enough to run the application ( E.g. a red LED) and will be called 'dead' . But a Dead battery still has some power remaining in it - just not enough voltage. The trick is to use some of that remaining power to increase the voltage in a circuit. The trade off being there will be a reduction in current.\nHere is the required steps:\n1- Connect a switch to a battery holder. A soldiering iron and hot glue gun assisted with this.\n2- Make a small pyramid above the battery holder using 4 bamboo skewer sticks secured with hot glue.\n3- Soldier on a 1.8 volt red LED at the top of the mini Christmas tree.\n4- Test the LED glows with a new battery (1.6 volts)\n5- Test the LED doesn't glow with a dead battery (1.2 volts)\n6- Add in the buzzer circuit, along with a switch.\n7- Add in the windings circuit - to touch the striker of the buzzer a little copper foil was added.\n8- Insert a dead battery (1.2 volt) with the buzzer circuit turned switch off - the LED will not glow but when the buzzer circuit is switched on the Red LED lights up."}]}
-{"id": "user_oriented_task_111", "motivation_app": "instructables", "instruction": "There are many creative projects we can build at home that are related to the given theme, so let's list some of them.", "instances": [{"input": "Theme: Circuits", "output": "1- Build a UV Level Monitoring Budgie - Using IoT and Weather Data APIs\n2- DIY Weather Station With ESP32\n3- Modular Display Clock\n4- Overengineered Fume Extractor Project"}]}
-{"id": "user_oriented_task_112", "motivation_app": "instructables", "instruction": "Make a list of the materials that will be required to build the given tool.", "instances": [{"input": "Coffee Darkness Meter", "output": "- Arduino Nano (or Uno)\n- 0.96\" OLED display\n- TSL2561 lux and IR sensor on a brake-out board (I used Adafruit's)\n- ON/OFF toggle switch\n- Momentary push button\n- 9V battery and battery clip\n- 2x IR LEDs\n- 2x 100ohm resistors\n- 2x 10K resistors\n- 2x 2.2K resistors\n- wire for connecting\n- case for everything (3D printed)"}]}
-{"id": "user_oriented_task_113", "motivation_app": "instructables", "instruction": "Provide a detailed explanation of the steps that are required in order to construct the given project.", "instances": [{"input": "Steam Engine Using an Empty Beer Can", "output": "Step 1: Close Drink Opening\nTo build up steam the beverage can needs to act as a pressure vessel and so needs to be completely sealed, except where the steam will come out of the jet nozzles. It also needs to rotate on itself. Bend up the outer tab into a L shape using some pilers and then bend it forward so a piece of string can hang from it over the center of the can. This will allow the can to rotate on itself. Close the pouring opening by using a knife or other thin object, bending the opening back up so it almost where it was before the beverage can was opened.\nStep 2: Nozzel Jets\nMake a couple of holes in the side of the can, about half way up, opposite each other using a small drill. The diameter of the holes should be about the same as the thin plastic tubing to be used as jet nozzles. Enlarge the holes so they are elliptical allowing the nozzle tubes to lie parallel against the cans side.\nStep 3: Making a Pressure Vessel\nSeal the top of the opened can using super glue and baking soda to make a small pressure vessel. Add some super glue to the closed opening and then sprinkle on some baking soda - they will combine to make a plastic. Keep adding super glue and baking soda until the top is sealed.\nStep 4: Attaching the Nozzle Jets\nFit in the thin plastic tubes to the holes drilled in the sides of the can and seal with super glue and baking soda. Try to have the tubes pointing horizontal in the direction you want to rotate the can when the steam exits the nozzles.\nStep 5: Creating Steam\nAdd a little water into the can though one of the nozzles. Just add maybe a teaspoon or so. \nStep 6: In Action\nThe can was suspended from a tripod over the methylated sprits flame. After a couple of minutes enough steam had built up to rotate the can."}]}
-{"id": "user_oriented_task_114", "motivation_app": "instructables", "instruction": "Estimate the cost of supplies required to create the given project and calculate the total cost.", "instances": [{"input": "DIY Water Chiller\nRequired items are: Ice-maker, Icebox/cooler, Temperature regulator, Aquarium pump x2, 12V power supply", "output": "Ice-maker ($90)\nIcebox/cooler ($17)\nTemperature regulator ($16)\nAquarium pump x2 ($13)\n12V power supply ($8)\nTotal cost = $144"}]}
-{"id": "user_oriented_task_115", "motivation_app": "Socratic by Google", "instruction": "Answer the following literature question as accurately as possible based on your knowledge of literature for high school students.", "instances": [{"input": "What does the green light symbolize at the end of the book?", "output": "The green light symbolizes several different things to different characters throughout the book, but overall, it stands for the inaccessibility of the American Dream. At the start of the novel, the light symbolizes Gatsby's unwavering love for Daisy. He stands at the end of the dock gazing across the bay, reaching for the light, waiting for Daisy to come back to him. His hope, like the eternal green light, is unfaltering. It isn't until Daisy admits later that she had never really noticed the light that the facade began to slip. Nick, the narrator, starts to recognize it as a symbol of Gatsby's unhealthy desperation rather than his love. This is parallel to Gatsby's experience of the American Dream and money. Gatsby is unbelievably rich at the beginning of the novel. This solidifies his belief in the American Dream, though he still reaches for the acceptance of those who are wealthier and more popular. Nick believes money is not an object to Gatsby, though he has no idea how he made all of it. As the story unfolds and the mystery and mystique around Gatsby fade, so does Gatsby and Nick's blind belief and trust in the American Dream. After Gatsby is murdered, the green light is symbolic of the ways that money and affluence ruin the lives of those without. It is also symbolic of the way that the American dream ruins the lives of those eternally reaching for it."}]}
-{"id": "user_oriented_task_116", "motivation_app": "Wysa", "instruction": "Develop a mental exercise that can help people manage their anxiety and explain how it works.", "instances": [{"input": "", "output": "Submodalities\nGiving a physical form to your anxiety can help your mind feel more in control of it. In this exercise, we will visualize what anxiety looks and feels like, and learn to reduce its intensity. Here are the questions that you need to answer:\n1- Everyone feels anxiety differently. Where in your body do you feel it?\n2- Now close your eyes and picture your anxiety. Try to give it color, shape, and temperature. What does it feel like?\n3- Does it move, or is it in one place? If it moves, tell how it is moving.\n4- Now for the good part, close your eyes and try changing this picture. Change how it moves. Slow it done shrink its size. Try to move it away from your body. Imagine it fading. Keep doing this until you feel like you can change how it feels. Changing which submodality helped you curb your anxiety? Was it its shape, size, or something else?\n5- Do you feel better now?"}]}
-{"id": "user_oriented_task_117", "motivation_app": "Wysa", "instruction": "Give me an idea of the mental exercises you do to inspire yourself.", "instances": [{"input": "", "output": "1- Identify your strengths\n2- Understand you motivation style\n3- Build worthiness\n4- Dream ahead"}]}
-{"id": "user_oriented_task_118", "motivation_app": "Weather", "instruction": "Make a list of outdoor activities that are suitable for the given weather condition.", "instances": [{"input": "Temperature: 57 F, Showers", "output": "- Take a walk in the rain\n- Jump in puddles\n- Put on your own musical in the rain\n- Paint with Mud\n- Build a shelter \n- Make Nature Boats \n- Create a Dam"}]}
-{"id": "user_oriented_task_119", "motivation_app": "Weather", "instruction": "Please provide us with a list of the best conservation starter questions related to the weather.", "instances": [{"input": "", "output": "1. Beautiful day, isn't it?\n2. Can you believe all of this rain we've been having?\n3. It looks like it's going to storm.\n4. We couldn't ask for a nicer day, could we?\n5. How about this weather?\n6. It's so nice out today, isn't it?\n7. The weather's nice today, right?\n8. It\u2019s freezing today! Hopefully it doesn\u2019t snow.\n9. Wow, it\u2019s really hot/cold for this time of year.\n10. It\u2019s really been pouring all day, huh?"}]}
-{"id": "user_oriented_task_120", "motivation_app": "Weather", "instruction": "In relation to the given weather scenario, give some tips on how to adjust the travel plans with it.", "instances": [{"input": "a sudden temperature change", "output": "- Make sure that you have a good variety of options, and pack for the weather of your destination.\n- Rapid temperature changes can have major impacts on your body. Make sure you are aware of your particular responses to temperature change and pack accordingly. \n- Make sure to pack ointments, creams and lotions to help keep your skin healthy while you are on the go. \n- With extreme heat, you may experience rash, fainting, headaches, and fatigue as well as nausea, sweating, and loss of coordination. Be particularly aware of heat exhaustion and heat stroke as these are serious medical conditions that can have serious consequences. In cold climates, be aware of frostbite and hypothermia, intense shivering, goosebumps, difficulty using your hands, and false sense of warmth can indicate a developing cold illness."}]}
-{"id": "user_oriented_task_121", "motivation_app": "Notion", "instruction": "Write a to-do list based on the given information.", "instances": [{"input": "Daily tasks in the pharmacy", "output": "- Police sidewalk and entrance for litter \n- Assess window displays to make sure they are not outdated or faded\n- Clean all doors and remove outdated and unnecessary signs \n- Fill bags at each checkout area \n- Clean and straighten checkout counters \n- Tidy up seating/waiting area \n- Face (pull product forward) in all departments \n- Check one department a day for short and outdated product \n- Empty all trash receptacles\n- Vacuum (when the store is closed or only if customer traffic allows for it during store hours)"}]}
-{"id": "user_oriented_task_122", "motivation_app": "Notion", "instruction": "Design a template table for keeping track of all subscriptions.", "instances": [{"input": "", "output": "Service | Category | Status | Billing | Pricing | Renewal Date | Next Renewal | Monthly Costs | Yearly Costs | Nptes"}]}
-{"id": "user_oriented_task_123", "motivation_app": "Notion", "instruction": "Plan an agenda for the daily standup meetings. Assign a time duration to each topic on the agenda.", "instances": [{"input": "", "output": "[10 min] Status check\n- Current and upcoming work\n- Have spare bandwidth\n- Any blockers?\n[5 min] Big project status + general updates\n[5 min] Discuss blockers, otherwise social time\n[2 min] Add follow Ups"}]}
-{"id": "user_oriented_task_124", "motivation_app": "Doulingo", "instruction": "Answer the following question.", "instances": [{"input": "How do you say \"good evening\" in French.", "output": "bonsoir"}]}
-{"id": "user_oriented_task_125", "motivation_app": "Doulingo", "instruction": "What sound does this make?", "instances": [{"input": "\u597d", "output": "h\u01ceo"}]}
-{"id": "user_oriented_task_126", "motivation_app": "Doulingo", "instruction": "Give a grammar tip on how to end a German verb.", "instances": [{"input": "", "output": "Verbs have different endings depending on the subject of the sentence. Lucky for you er and sie use the same ending.\nSubject | Ending\nich | e\ndu | st\ner / sie | t"}]}
-{"id": "user_oriented_task_127", "motivation_app": "Doulingo", "instruction": "This is a test for the reading ability of French. Read the given story carefully, and then answer the question in the end.", "instances": [{"input": "Lucie est \u00e0 la maison avec sa petite-fille, Lin.\nLucie: Oh, non! Nous n'avons pas de pain!\nLin: Ah! Tu vas au supermarch\u00e9?\nLucie: Oui, je veux une baguette pour mon sandwich.\nDo Lin and Lucie have a lot of bread?", "output": "No, that's wrong."}]}
-{"id": "user_oriented_task_128", "motivation_app": "Tasty", "instruction": "Come up with healthy and easy dinner ideas for weeknights.", "instances": [{"input": "", "output": "- Chicken & Veggie Stir-Fry\n- Classic Chicken Noodle Soup\n- Low-Carb Eggplant Lasagna\n- Avocado Lime Salmon\n- Zesty One-Pot Shrimp Pasta\n- Grilled Filet Mignon Street Tacos\n- Bruschetta Pasta"}]}
-{"id": "user_oriented_task_129", "motivation_app": "Tasty", "instruction": "Provide a cooking hack for improving the flavor of the given food.", "instances": [{"input": "popcorn", "output": "Everyone knows butter on popcorn is delicious. But supposedly adding some soy sauce to popcorn butter makes for a next-level popcorn topper. According to several sites, like Food52, the addition of salty soy sauce to creamy butter brings out the savory, cheesy, and nutty flavors in popcorn."}]}
-{"id": "user_oriented_task_130", "motivation_app": "Tasty", "instruction": "Make a list of snacks and foods to serve as party snacks on a game day!", "instances": [{"input": "", "output": "- Classic Chocolate Cake\n- Buffalo Chicken Sliders\n- Smoky Oven-Fried Chicken\n- Classic Hot Crab Dip For A Crowd\n- Potato Crust Breakfast Pizza\n- Muffin Tin Deviled Eggs"}]}
-{"id": "user_oriented_task_131", "motivation_app": "Tasty", "instruction": "Describe how to prepare the given food in your own words. Note down the ingredients you will need and the steps you will take to prepare them.", "instances": [{"input": "Chewy Chocolate Chip Cookies", "output": "Ingredients (for 12 cookies):\n\u00bd cup granulated sugar(100 g)\n\u00be cup brown sugar(165 g), packed\n1 teaspoon salt\n\u00bd cup unsalted butter(115 g), melted\n1 egg\n1 teaspoon vanilla extract\n1 \u00bc cups all-purpose flour(155 g)\n\u00bd teaspoon baking soda\n4 oz milk or semi-sweet chocolate chunks(110 g)\n4 oz dark chocolate chunk(110 g), or your preference\nPreparation:\nIn a large bowl, whisk together the sugars, salt, and butter until a paste forms with no lumps.\nWhisk in the egg and vanilla, beating until light ribbons fall off the whisk and remain for a short while before falling back into the mixture.\nSift in the flour and baking soda, then fold the mixture with a spatula (Be careful not to overmix, which would cause the gluten in the flour to toughen resulting in cakier cookies).\nFold in the chocolate chunks, then chill the dough for at least 30 minutes. For a more intense toffee-like flavor and deeper color, chill the dough overnight. The longer the dough rests, the more complex its flavor will be.\nPreheat oven to 350\u00b0F (180\u00b0C). Line a baking sheet with parchment paper.\nScoop the dough with an ice-cream scoop onto a parchment paper-lined baking sheet, leaving at least 4 inches (10 cm) of space between cookies and 2 inches (5 cm) of space from the edges of the pan so that the cookies can spread evenly.\nBake for 12-15 minutes, or until the edges have started to barely brown.\nCool completely before serving."}]}
-{"id": "user_oriented_task_132", "motivation_app": "Tasty", "instruction": "Suggest a recipe for a vegan dessert.", "instances": [{"input": "", "output": "Honey Lime Fruit Salad\nIngredients: (for 4 servings)\n\u00bd lb fresh strawberry(225 g), quartered\n2 kiwis, peeled and diced\n2 mangoes, diced\n2 bananas, sliced\n\u00bd lb fresh blueberry(225 g)\n2 tablespoons honey\n1 lime, juiced\n\nPreparation:\nPlace sliced fruits in a large bowl.\nIn a small bowl, mix honey and lime juice. Pour syrup over the fruit and mix."}]}
-{"id": "user_oriented_task_133", "motivation_app": "Sudoku", "instruction": "Design a medium-level sudoku puzzle.", "instances": [{"input": "", "output": "| | | | | |6|1|4|9|\n| |8| | |1|3| | | |\n| | |6|4|2| |7| | |\n|7| | |9| | |4|3| |\n|3|6| | | | |5|9|1|\n| |9|5| |8|1| | |7|\n| | | |6| |2|9| |5|\n|1| | |5|4| |8|6| |\n|6|5|2| | | | |7| |"}]}
-{"id": "user_oriented_task_134", "motivation_app": "Sudoku", "instruction": "Solve the given Sudoku puzzle.", "instances": [{"input": "The Sudoku puzzle is:\n|1| |5| | | | |8| |\n| | | | | |3| |6|4|\n|8|3|4|6|7| |9| | |\n|2|9|1| | |8|3|7|6|\n| | |6| | |9|8|1| |\n| | | | | |2| | | |\n| |2| | |9|7|6| | |\n|5|4| | | | | |9|8|\n|6|1| | | |5|4|3|7|", "output": "The solved Sudoku puzzle is:\n|1|6|5|9|2|4|7|8|9|\n|9|7|2|8|5|3|1|6|4|\n|8|3|4|6|7|1|9|2|5|\n|2|9|1|5|4|8|3|7|6|\n|4|5|6|7|3|9|8|1|2|\n|7|8|3|1|6|2|5|4|9|\n|3|2|8|4|9|7|6|5|1|\n|5|4|7|3|1|6|2|9|8|\n|6|1|9|2|8|5|4|3|7|"}]}
-{"id": "user_oriented_task_135", "motivation_app": "Goodreads", "instruction": "Think of a motivational quote that you have read in a book. Try to keep it positive and sweet.", "instances": [{"input": "", "output": "\u201cBelieve in yourself. You are braver than you think, more talented than you know, and capable of more than you imagine.\u201d\n\u2015 Roy T. Bennett, The Light in the Heart"}]}
-{"id": "user_oriented_task_136", "motivation_app": "Goodreads", "instruction": "Choose the best books from the given genre.", "instances": [{"input": "Crime & Mystery", "output": "1- The Girl with the Dragon Tattoo\n2- And Then There Were None\n3- Angels & Demons\n4- Rebecca\n5- In Cold Blood\n6- The Godfather\n7- The Lovely Bones\n8- Gone Girl\n9- The Name of the Rose\n10- Shutter Island"}]}
-{"id": "user_oriented_task_137", "motivation_app": "Goodreads", "instruction": "Give a brief description of the book that you are given.", "instances": [{"input": "Matilda", "output": "Matilda is a little girl who is far too good to be true. At age five-and-a-half she's knocking off double-digit multiplication problems and blitz-reading Dickens. Even more remarkably, her classmates love her even though she's a super-nerd and the teacher's pet. But everything is not perfect in Matilda's world...\nFor starters she has two of the most idiotic, self-centered parents who ever lived. Then there's the large, busty nightmare of a school principal, Miss (\"The\") Trunchbull, a former hammer-throwing champion who flings children at will, and is approximately as sympathetic as a bulldozer. Fortunately for Matilda, she has the inner resources to deal with such annoyances: astonishing intelligence, saintly patience, and an innate predilection for revenge."}]}
-{"id": "user_oriented_task_138", "motivation_app": "Goodreads", "instruction": "In the event that a person enjoyed the book that was given, please suggest other books that they might also like.", "instances": [{"input": "The Great Gatsby", "output": "To Kill a Mockingbird, The Catcher in the Rye, 1984, Animal Farm, Pride and Prejudice, The Hunger Games, Of Mice and Men, Lord of the Flies"}]}
-{"id": "user_oriented_task_139", "motivation_app": "Goodreads", "instruction": "Indicate the genre of the book to which it belongs.", "instances": [{"input": "Love in the Time of Cholera", "output": "Fiction"}]}
-{"id": "user_oriented_task_140", "motivation_app": "Wolfram alpha", "instruction": "Solve this equation.", "instances": [{"input": "x^3 - 4x^2 + 6x - 24 = 0", "output": "x = 4, x= -i * sqrt(6), x= i * sqrt(6)"}]}
-{"id": "user_oriented_task_141", "motivation_app": "Wolfram alpha", "instruction": "Prove the given statement using your information.", "instances": [{"input": "\"If x is a positive integer or a solution to x+3>4, then x>0 and x>12.\"", "output": "To determine if it's true, let's look first at the assumptions. We are assuming that either x is a positive integer, or that it solves the inequality x+3>4.\nNext let's consider the conclusion. We are concluding that x must satisfy both inequalities x>0 and x>12. If we look more closely, we see that once we satisfy the second inequality, the first is redundant. (If x>12, then it must already be larger than zero.)\nNow, in order for this statement to be true, we need that if x solves either of the assumptions, then it must solve x>12. Well, the first assumption is that x is a positive integer, which means that x\u22651, so in this case the conclusion holds. The second assumption is that x+3>4, or equivalently, that x>1, which means the conclusion holds as well."}]}
-{"id": "user_oriented_task_142", "motivation_app": "Wolfram alpha", "instruction": "Based on the facts that have been provided, prove the following statement.", "instances": [{"input": "Statement: \"For every integer k, k^2 + 2k + 1 \u2265 0\"\nFact: The square of any real number is non-negative.", "output": "Let k be an integer. Then (k + 1)^2 \u2265 0 because the square\nof any real number is non-negative. But (k+1)^2 = k^2+2k+1. So,\nby combining these two equations, we find that k^2 + 2k + 1 \u2265 0."}]}
-{"id": "user_oriented_task_143", "motivation_app": "Wolfram alpha", "instruction": "Verify the correctness of the given statement.", "instances": [{"input": "\"For all integers j and k, if j and k are odd, then jk is odd.\"", "output": "True"}]}
-{"id": "user_oriented_task_144", "motivation_app": "Wolfram alpha", "instruction": "Provide a formula for computing the nth term in the given sequence", "instances": [{"input": "5, 14, 23, 32, 41, ...", "output": "an = 9*n - 4"}]}
-{"id": "user_oriented_task_145", "motivation_app": "ESPN", "instruction": "Create a table listing all games that meet the specified criteria in the National Football League. Use the season, local time, game, and score as columns of the table.", "instances": [{"input": "Ravens home games in 2011", "output": "season | local time | game | score\n2011-12 | Sun, Jan 15 | Texans (11-7) at Ravens (13-4) | 13 - 20\n2011-12 | Sat, Dec 24 | Browns (4-11) at Ravens (11-4) | 14 - 20\n2011-12 | Sun, Dec 11 | Colts (0-13) at Ravens (10-3) | 10 - 24\n2011-12 | Thu, Nov 24 | 49ers (9-2) at Ravens (8-3) | 6 - 16\n2011-12 | Sun, Nov 20 | Bengals (6-4) at Ravens (7-3) | 24 - 31\n2011-12 | Sun, Oct 30 | Cardinals (1-6) at Ravens (5-2) | 27 - 30\n2011-12 | Sun, Oct 16 | Texans (3-3) at Ravens (4-1) | 14 - 29\n2011-12 | Sun, Oct 2 | Jets (2-2) at Ravens (3-1) | 17 - 34\n2011-12 | Sun, Sep 11 | Steelers (0-1) at Ravens (1-0) | 7 - 35"}]}
-{"id": "user_oriented_task_146", "motivation_app": "ESPN", "instruction": "Using a table, compare the career overviews of the given players in Major League Baseball. Use \"|\" for separating the columns in the table.", "instances": [{"input": "Derek Jeter, Albert Pujols", "output": "| D. Jeter | A. Pujols\ngames played | 2747 | 2746\ngames started | 2734 | 2704\nbatting average | .310 | .296\nhome runs | 260 | 703\nruns batted in | 1311 | 2218"}]}
-{"id": "user_oriented_task_147", "motivation_app": "(Wolfram alpha)?", "instruction": "You will be tested on your knowledge of classic witticisms and aphorisms by completing the given aphorism. Write the original quote as the answer.", "instances": [{"input": "two things are infinite", "output": "The universe and human stupidity; and I'm not sure about the universe. (attributed to Albert Einstein)"}]}
-{"id": "user_oriented_task_148", "motivation_app": "IMDB", "instruction": "A list of all movies that meet the criteria given should be compiled.", "instances": [{"input": "movies directed by Spike Lee by release date", "output": "| | release date\n1 | American Utopia | 10/09/2020\n2 | Da 5 Bloods | 12/06/2020\n3 | BlacKkKlansman | 10/08/2018\n4 | Pass Over | 20/04/2018\n5 | Rodney King | 28/04/2017"}]}
-{"id": "user_oriented_task_149", "motivation_app": "(Wolfram alpha)?", "instruction": "Using a given amount, determine an appropriate tip.", "instances": [{"input": "14% tip on $47.50", "output": "amount of tip = $6.65 \namount with tip = $54.15"}]}
-{"id": "user_oriented_task_150", "motivation_app": "Wordle", "instruction": "Identify all words that match the pattern given.", "instances": [{"input": "H_AR_", "output": "haart, heard, hears, heart, hoard, hoary"}]}
-{"id": "user_oriented_task_151", "motivation_app": "merriam-webster.com", "instruction": "Come up with words that rhyme with the given word.", "instances": [{"input": "instruct", "output": "abduct, bucked, chucked, conduct, construct, deconstruct, deduct, destruct, ducked, duct, induct, obstruct, plucked, reconstruct, shucked, sucked, trucked, tucked"}]}
-{"id": "user_oriented_task_152", "motivation_app": "(Wolfram alpha)?", "instruction": "Look for poems that mention the given object or character. The names of the poems and their poets should be written.", "instances": [{"input": "santa claus", "output": "Santeclaus by William Gilley\nA Visit From St. Nicholas by Clement Clarke Moore"}]}
-{"id": "user_oriented_task_153", "motivation_app": "Messenger", "instruction": "Respond to the last text in the chat using the correct emojis to convey your feelings.", "instances": [{"input": "+ Hey, how did your exams go?\n- They were alright. I'm just glad they're over.\n+ Yeah, I know how you feel. I'm just glad I don't have to worry about them anymore.", "output": "- \ud83d\ude0c\ud83d\ude0a"}]}
-{"id": "user_oriented_task_154", "motivation_app": "Messenger", "instruction": "For answering the chat, think of a funny response you can use.", "instances": [{"input": "+ how are you?", "output": "- I still haven\u2019t figured it out."}]}
-{"id": "user_oriented_task_155", "motivation_app": "Messenger", "instruction": "The last message in the chat is a partial response. You should complete and rewrite it.", "instances": [{"input": "+ What are going to do now?\n- I don't know. I had to give up my dream of being a mathematician.\n+ Why", "output": "+ Why is that? What happened?"}]}
-{"id": "user_oriented_task_156", "motivation_app": "Messenger", "instruction": "Change the response to have a more empathic tone in the chat.", "instances": [{"input": "+ How are you doing?\n- Fine, I had a bunch of problems today.\n+ What kind of problems?", "output": "+ Oh, Is there anything I can help you with?"}]}
-{"id": "user_oriented_task_157", "motivation_app": "Messenger", "instruction": "Currently, there is a chat in which people are arranging a meeting in the future. Retrieve the meeting information from the conversation.", "instances": [{"input": "Person 1: Hey, when are you free? I'd like to meet up.\nPerson 2: I'm free tomorrow afternoon.\nPerson 1: Great, what time?\nPerson 2: How about 3 pm?\nPerson 1: I have another meeting at that time. what about 5 pm?\nPerson 2: That works. Where should we meet?\nPerson 1: How about the coffee shop on Main Street?\nPerson 2: Sounds good. I've heard they have great coffee there. See you tomorrow!", "output": "Date: Tomorrow\nTime: 5 pm\nPlace: Coffee shop on Main Street"}]}
-{"id": "user_oriented_task_158", "motivation_app": "Messenger", "instruction": "The given text must be classified as offensive or not offensive.", "instances": [{"input": "You acted really crazy last night.", "output": "offensive"}]}
-{"id": "user_oriented_task_159", "motivation_app": "https://cohere.ai/", "instruction": "Give a brief summary of the intention of the dialogue that just happened.", "instances": [{"input": "Customer: Hi there, I'm looking for a new phone.\nAI: Hi! What type of phone are you looking for?\nCustomer: I'm not sure. Maybe something with a good camera?\nAI: We have a few phones with great cameras. Would you like to see some options?\nCustomer: Yeah, that would be great.", "output": "A customer wants to buy a new phone."}]}
-{"id": "user_oriented_task_160", "motivation_app": "https://cohere.ai/", "instruction": "Describe the content of the article in a brief manner.", "instances": [{"input": "A study published earlier this year by Zee and her team examined the role of light in sleep for healthy adults in their 20s. Sleeping for only one night with a dim light, such as a TV set with the sound off, raised the blood sugar and heart rate of the young people during the sleep lab experiment. An elevated heart rate at night has been shown in prior studies to be a risk factor for future heart disease and early death, while higher blood sugar levels are a sign of insulin resistance, which can ultimately lead to type 2 diabetes.", "output": "A study finds that sleeping with a light pollution may increase your health risks."}]}
-{"id": "user_oriented_task_161", "motivation_app": "https://cohere.ai/", "instruction": "Correct the transcription of an excerpt containing errors.", "instances": [{"input": "I got got charged interest on ly credit card but I paid my pull balance one day due date. I not missed a pavement year yet. Man you reverse the interest charge?", "output": "I was charged interest on my credit card but I paid my balance on the due date. I haven't missed a payment year yet. Can you reverse the interest charge?"}]}
-{"id": "user_oriented_task_162", "motivation_app": "https://cohere.ai/", "instruction": "Take a look at the contract and extract the parties of the agreement from it.", "instances": [{"input": "As of the 13th day of December 2021, this Music Recording Agreement (\"Agreement\") is made between Good Kid, a Toronto-based musical group (\"Artist\"), and Universal Music Group, a record label with license number 545345 (\"Record Label\"). The Artist and the Recording Label may be referred to in this Agreement individually as \"Parties\" and collectively as \"Parties.\" Work under this Agreement will begin on March 15, 2022, and will last for the duration of the Agreement.", "output": "Artist: Good Kid\nRecording Label: Universal Music Group"}]}
-{"id": "user_oriented_task_163", "motivation_app": "Gmail", "instruction": "On the basis of the subject of the email, determine whether the email should be treated as spam or not.", "instances": [{"input": "Dermatologists don't like her!", "output": "Spam"}]}
-{"id": "user_oriented_task_164", "motivation_app": "Twitter", "instruction": "Classify the tweet into one of the following three categories: event, demo, or news. Event is a happening, often social or public, that people attend or are aware of. Demo is a public showing or presentation of something, typically to promote or advertise it. And, news is information about recent events or happenings, especially as reported by news media.", "instances": [{"input": "Using this tutorial, you can analyze and classify a user\u2019s review as either positive or negative using sentiment analysis features in an existing Next.js app. We show you how to leverage language AI into your applications. Check it out \u2193", "output": "demo"}]}
-{"id": "user_oriented_task_165", "motivation_app": "CNN News", "instruction": "Give the news title a category. Pick a category from the list of News & Buzz, Travel, Style, Arts & Culture, Politics, Tech, and Science & Health.", "instances": [{"input": "The #Banksy Exhibit in Cambridge, MA is absolutely terrific.", "output": "Arts & Culture"}]}
-{"id": "user_oriented_task_166", "motivation_app": "https://cohere.ai/", "instruction": "Classify the questions in the FAQ into Finding policy details, Change account settings, Filing a claim and viewing status, or Cancelling coverage.", "instances": [{"input": "Could you deposit money into my account rather than mailing me a physical cheque?", "output": "Change account settings"}]}
-{"id": "user_oriented_task_167", "motivation_app": "Quora", "instruction": "Choose an appealing title for your post.", "instances": [{"input": "The typical avocado is over 300 calories from the oil in it. That\u2019s the amount of calories in a large candy bar. If you get enough exercise to eat a large candy bar every day without gaining weight, it wouldn\u2019t be a problem to eat an avocado every day. Other wise you should probably eat them sparingly.", "output": "What will happen if you eat an avocado everyday?"}]}
-{"id": "user_oriented_task_168", "motivation_app": "Quora", "instruction": "Give some examples of what people usually say in the given social situation.", "instances": [{"input": "when someone arrives safely", "output": "- \u201cGlad you made it safe and sound.\u201d\n- \u201cGood to see you got here unscathed.\u201d\n- \u201cSo, bullet proof vests actually work!\u201d"}]}
-{"id": "user_oriented_task_169", "motivation_app": "Indeed", "instruction": "You will be required to write a few questions from the given company's interview section.", "instances": [{"input": "Kohl's", "output": "- Why do you want to work at kohls? \n- If you were helping a customer and another interrupted you, what would you do? \n- What\u2019s your availability? \n- What do you think is the most important aspect of working in a retail store? \n- What\u2019s a reasonable reason for not showing up at work?"}]}
-{"id": "user_oriented_task_170", "motivation_app": "Indeed", "instruction": "Based on the information provided, you need to estimate the average summary for the given job.", "instances": [{"input": "Data entry clerk in United States", "output": "$17.58 Per hour"}]}
-{"id": "user_oriented_task_171", "motivation_app": "Indeed", "instruction": "Describe the responsibilities of the given job.", "instances": [{"input": "Security Officer", "output": "A security officer maintains a safe environment on behalf of their employers. They ensure the staff, customers, visitors and business premises are secure. They patrol perimeters, use surveillance equipment to monitor activity and perform security checks required for visitors to the site."}]}
-{"id": "user_oriented_task_172", "motivation_app": "Indeed", "instruction": "Provide a list of the skills that may help you find the job.", "instances": [{"input": "Crew Members", "output": "Baking\nBasic math\nBilingual\nCash handling\nCashiering\nCommunication skills\nComputer literacy\nComputer skills\nConstruction\nCooking\nCustomer service"}]}
-{"id": "user_oriented_task_173", "motivation_app": "Google Calendar", "instruction": "Schedule the meetings based on the given informqation.", "instances": [{"input": "Person1: available from 9-11 am, meeting duration: 30 min \nPerson2: available from 1-3 pm, meeting duration: 90 min\nPerson3: available from 2-3:30 pm, meeting duration: 60 min", "output": "Person1: 9-9:30 am, Person2: 1-2:30 pm, Person3: 2:30-3:30 pm"}]}
-{"id": "user_oriented_task_174", "motivation_app": "Google Map", "instruction": "Find the directions between the given locations.", "instances": [{"input": "From: Rowan Avenue Elementary School, 600 S Rowan Ave, Los Angeles, CA 90023, United States\nTo: Calvary Cemetery and Mortuary, 4201 Whittier Blvd, Los Angeles, CA 90023, United States", "output": "Head north on S Rowan Ave toward E 6th St. Turn right onto Whittier Blvd. Destination will be on the right."}]}
-{"id": "user_oriented_task_175", "motivation_app": "Explainpaper", "instruction": "Give a brief explanation of the requested part of the paper.", "instances": [{"input": "Paper: The dominant sequence transduction models are based on complex recurrent or convolutional neural networks that include an encoder and a decoder. The best performing models also connect the encoder and decoder through an attention mechanism. We propose a new simple network architecture, the Transformer, based solely on attention mechanisms, dispensing with recurrence and convolutions entirely. Experiments on two machine translation tasks show these models to be superior in quality while being more parallelizable and requiring significantly less time to train. Our model achieves 28.4 BLEU on the WMT 2014 English-to-German translation task, improving over the existing best results, including ensembles, by over 2 BLEU. On the WMT 2014 English-to-French translation task, our model establishes a new single-model state-of-the-art BLEU score of 41.8 after training for 3.5 days on eight GPUs, a small fraction of the training costs of the best models from the literature. We show that the Transformer generalizes well to other tasks by applying it successfully to English constituency parsing both with large and limited training data.\nExplain: The dominant sequence transduction models", "output": "The dominant sequence transduction models are the most common models used to translate between two languages. These models are based on complex neural networks that include an encoder and a decoder. The best performing models also connect the encoder and decoder through an attention mechanism."}]}
-{"id": "user_oriented_task_176", "motivation_app": "Explainpaper", "instruction": "Answer the question about the paper after reading it carefully.", "instances": [{"input": "To evaluate if the Transformer can generalize to other tasks we performed experiments on English constituency parsing. This task presents specific challenges: the output is subject to strong structural constraints and is significantly longer than the input. Furthermore, RNN sequence-to-sequence models have not been able to attain state-of-the-art results in small-data regimes.\nQuestion: What is English constituency parsing?", "output": "English constituency parsing is a task in natural language processing where the goal is to analyze the syntactic structure of a sentence in English. This can be used to understand the meaning of a sentence, or to generate new sentences with similar meaning."}]}
-{"id": "user_oriented_task_177", "motivation_app": "Meetup", "instruction": "Recommend some helpful activities or conversation starters to use at meetups to break the ice.", "instances": [{"input": "", "output": "- \u201cLet me tell you something very interesting\u2026\u201d (nature, history, ecology, language ,geography etc)\n- Joke (neutral not abrasive)\n- ask for help with something\n- ask open ended questions"}]}
-{"id": "user_oriented_task_178", "motivation_app": "Markdown", "instruction": "Convert the given HTML into Markdown format.", "instances": [{"input": "Use `code` in your Markdown file.", "output": "``Use `code` in your Markdown file.``"}]}
-{"id": "user_oriented_task_179", "motivation_app": "Semantic Scholar", "instruction": "Extract the method that has been used in the research from the abstract of the paper. Give the exact sentences from the abstract that discuss the method.", "instances": [{"input": "We present a state-of-the-art speech recognition system developed using end-to-end deep learning. Our architecture is significantly simpler than traditional speech systems, which rely on laboriously engineered processing pipelines; these traditional systems also tend to perform poorly when used in noisy environments. In contrast, our system does not need hand-designed components to model background noise, reverberation, or speaker variation, but instead directly learns a function that is robust to such effects. We do not need a phoneme dictionary, nor even the concept of a \"phoneme.\" Key to our approach is a well-optimized RNN training system that uses multiple GPUs, as well as a set of novel data synthesis techniques that allow us to efficiently obtain a large amount of varied data for training. Our system, called Deep Speech, outperforms previously published results on the widely studied Switchboard Hub5'00, achieving 16.0% error on the full test set. Deep Speech also handles challenging noisy environments better than widely used, state-of-the-art commercial speech systems.Collapse", "output": "We do not need a phoneme dictionary, nor even the concept of a \"phoneme.\" Key to our approach is a well-optimized RNN training system that uses multiple GPUs, as well as a set of novel data synthesis techniques that allow us to efficiently obtain a large amount of varied data for training."}]}
-{"id": "user_oriented_task_180", "motivation_app": "Semantic Scholar", "instruction": "You need to come up with the names of some of the papers that relate to the paper you are given.", "instances": [{"input": "RoMQA: A Benchmark for Robust, Multi-evidence, Multi-answer Question Answering", "output": "+ Question Answering Infused Pre-training of General-Purpose Contextualized Representations\n+ Zero-Shot Dialogue State Tracking via Cross-Task Transfer\n+ Adapting Language Models for Zero-shot Learning by Meta-tuning on Dataset and Prompt Collections\n+ HotpotQA: A Dataset for Diverse, Explainable Multi-hop Question Answering\n+ FewshotQA: A simple framework for few-shot learning of question answering tasks using pre-trained text-to-text models\n+ MKQA: A Linguistically Diverse Benchmark for Multilingual Open Domain Question Answering\n+ FewCLUE: A Chinese Few-shot Learning Evaluation Benchmark\n+ Multilingual Transfer Learning for QA Using Translation as Data Augmentation\n+ Towards Zero Shot Conditional Summarization with Adaptive Multi-task Fine-Tuning"}]}
-{"id": "user_oriented_task_181", "motivation_app": "Semantic Scholar", "instruction": "Create a short, concise summary of the paper based on its abstract.", "instances": [{"input": "Few-shot learning (FSL) is one of the key future steps in machine learning and raises a lot of attention. In this paper, we focus on the FSL problem of dialogue understanding, which contains two closely related tasks: intent detection and slot filling. Dialogue understanding has been proven to benefit a lot from jointly learning the two sub-tasks. However, such joint learning becomes challenging in the few-shot scenarios: on the one hand, the sparsity of samples greatly magnifies the difficulty of modeling the connection between the two tasks; on the other hand, how to jointly learn multiple tasks in the few-shot setting is still less investigated. In response to this, we introduce FewJoint, the first FSL benchmark for joint dialogue understanding. FewJoint provides a new corpus with 59 different dialogue domains from real industrial API and a code platform to ease FSL experiment set-up, which are expected to advance the research of this field. Further, we find that insufficient performance of the few-shot setting often leads to noisy sharing between two sub-task and disturbs joint learning. To tackle this, we guide slot with explicit intent information and propose a novel trust gating mechanism that blocks low-confidence intent information to ensure high quality sharing. Besides, we introduce a Reptile-based meta-learning strategy to achieve better generalization in unseen few-shot domains. In the experiments, the proposed method brings significant improvements on two datasets and achieve new state-of-the-art performance.", "output": "This paper introduces FewJoint, the first FSL benchmark for joint dialogue understanding, and guides slot with explicit intent information and proposes a novel trust gating mechanism that blocks low-confidence intent information to ensure high quality sharing."}]}
-{"id": "user_oriented_task_182", "motivation_app": "https://abcnotation.com/", "instruction": "You will need to compose the ABC notation for a given song.", "instances": [{"input": "The South Wind", "output": "X:435\nT:Southwind\nR:WALTZ\nM:3/4\nL:1/4\nQ:1/4=68\nK:G\n|:d/c/|\"G\" B3/2 A/2 G|Bcd|\"D\" (A3 |A2) d/c/|\"G\"B3/2 A/2 G|\"C\" EDE|\"G\"(G3|G2) d/c/|!\n|\"G\" B3/2 A/2 G|Bcd|\"D\" A3/2 (3G/F/G/|\"D\" A2 d/c/| \"G\" B3/2 A/ G| \"C\" EDE | \"G\" (G3|G) B d|!\n\"G\" g2 g| \"C\" g3/2 f/ e| \"G\" (d3|\"D\"d2) d/c/|\"G\" B3/2 A/ G| B c d| \"D\" (A3|A2) d| \"C\" g2 g|!\n\"C\" g3/2 f/ e| \"G\" d3| \"G\" d2 c| \"G\" B3/2 A/ G| \"D\" A/ c3/2 F| \"G\" (G3|G2):||"}]}
-{"id": "user_oriented_task_183", "motivation_app": "Scribd", "instruction": "Categorize the given magazine based on its content into one of the categories: Politics, Entertainment News, Tech News, Business News, Parenting & Kids, Health & Healing, Cooking, Food & Wine, and Arts & Literature,", "instances": [{"input": "Inc.", "output": "Business News"}]}
-{"id": "user_oriented_task_184", "motivation_app": "Gmail", "instruction": "Decide if the given email belongs to the Promotions or Social category.", "instances": [{"input": "Subject: You have an invitation\nEmail: Hi, I'd like to join your LinkedIn network.", "output": "Social"}]}
-{"id": "user_oriented_task_185", "motivation_app": "Netflix", "instruction": "The story of a movie has been described using many emojis. You need to guess the name of the movie based on the emojis.", "instances": [{"input": "\ud83e\uddd9\u200d\u2642\ufe0f\ud83d\udeaa\ud83d\udcda\ud83e\udd85\u26a1", "output": "Harry Potter"}]}
-{"id": "user_oriented_task_186", "motivation_app": "Netflix", "instruction": "Come up with an interesting idea for a new movie plot. Your plot should be described with a title and a summary.", "instances": [{"input": "", "output": "The Cat burglars. \nA group of criminals specializes in stealing from high-end residences. They use their knowledge of cats to help them break in and avoid detection. However, one member of the group is a police officer working undercover to bring them down."}]}
-{"id": "user_oriented_task_187", "motivation_app": "Grammarly", "instruction": "Change the first person to the third person in the given sentence. The meaning should be kept, but you can paraphrase it or expand it in order to have a better pose.", "instances": [{"input": "We were recently able to increase the amount of stock we hold with the same supplier thereby reducing our risk.", "output": "The company was recently able to increase the amount of stock it holds with the same supplier thereby reducing its risk."}]}
-{"id": "user_oriented_task_188", "motivation_app": "Grammarly", "instruction": "You should capitalize the sentence according to the guide.", "instances": [{"input": "Guide: Every other letter alternates between lower case and upper case.\nSentence: A giant spider blocks your path.", "output": "a gIaNt SpIdEr BlOcKs YoUr PaTh."}]}
-{"id": "user_oriented_task_189", "motivation_app": "merriam-webster.com", "instruction": "Create alliterations by finding synonyms for words in the given sentence.", "instances": [{"input": "David wears a hat everyday.", "output": "David dons a derby daily."}]}
-{"id": "user_oriented_task_190", "motivation_app": "GeeksforGeeks", "instruction": "Convert the given description to a bash command.", "instances": [{"input": "read the first 10 lines from f", "output": "f.readlines(10)"}]}
-{"id": "user_oriented_task_191", "motivation_app": "Notion", "instruction": "Please prepare a table containing a summary of the information of the participants at the conference.", "instances": [{"input": "The conference was called to discuss how to improve the quality of education in schools. The main speaker was Professor Crew Purdie (cpurdie@email.com), who spoke about the importance of setting high standards for students and teachers. He said that schools should be places where students can learn and be challenged. Other speakers included Teacher Oliver Malachi (oliver@email.com) and Parent Kolby Reese (kolbyreese82@email.com). The teacher talked about the need for teachers to be well-prepared and to have high expectations for their students. The parent spoke about the need for parents to be involved in their children's education. There was also a discussion about the importance of funding for schools.", "output": "Name | Email | Occupation\nCrew Purdie | cpurdie@email.com | Professor\nOliver Malachi | oliver@email.com | Teacher\nKolby Reese | kolbyreese82@email.com | Parent"}]}
-{"id": "user_oriented_task_192", "motivation_app": "Coursera", "instruction": "List the concepts that should be learned before approaching the given complex concept.", "instances": [{"input": "Deep Learning", "output": "- Linear Algebra\n- Calculus\n- Statistics\n- Probability\n- Programming\n- Machine Learning"}]}
-{"id": "user_oriented_task_193", "motivation_app": "Grammarly", "instruction": "Given a sentence that may be ambiguous, rewrite the sentence using less ambiguous words.", "instances": [{"input": "The chicken is ready to eat.", "output": "The chicken is going to eat."}]}
-{"id": "user_oriented_task_194", "motivation_app": "Twitter", "instruction": "Assess the sentiment of the given tweet and categorize it as Positive, Negative, or Neutral.", "instances": [{"input": "\"This is the best cheat-sheet paper that I've found in a LONG time.\"", "output": "Positive"}]}
-{"id": "user_oriented_task_195", "motivation_app": "GeeksforGeeks", "instruction": "You are given a Python command and are required to convert it into Ruby.", "instances": [{"input": "print(\"Hello World!\")", "output": "puts \"Hello World!\""}]}
-{"id": "user_oriented_task_196", "motivation_app": "StackOverflow", "instruction": "You are given a sentence in first-order logic. Translate it into natural language.", "instances": [{"input": "\u2203x\u2200y(soldier(x)\u2192general(x,y))", "output": "There is someone (x) such that if if he is a soldier, then he is general of everyone."}]}
-{"id": "user_oriented_task_197", "motivation_app": "Google Search", "instruction": "You are given a search query and a document. Classify whether the document is relevant to the search query or not relevant.", "instances": [{"input": "Search: why sky is blue\nDocument: The Short Answer: Sunlight reaches Earth's atmosphere and is scattered in all directions by all the gases and particles in the air. Blue light is scattered more than the other colors because it travels as shorter, smaller waves. This is why we see a blue sky most of the time.", "output": "relevant"}]}
-{"id": "user_oriented_task_198", "motivation_app": "Quora", "instruction": "Create a list of subtopics for the given topic.", "instances": [{"input": "Music theory", "output": "melody, rhythm, counterpoint, harmony, form, tonal systems, scales, tuning, intervals, consonance, dissonance, durational proportions, the acoustics of pitch systems, composition, performance, orchestration, ornamentation, improvisation, electronic sound production"}]}
-{"id": "user_oriented_task_199", "motivation_app": "Netflix", "instruction": "Summarize the movie in a snarky way. Try to explain the movie in just one sentence.", "instances": [{"input": "The Shining", "output": "A family's first Airbnb experience goes very wrong."}]}
-{"id": "user_oriented_task_200", "motivation_app": "merriam-webster.com", "instruction": "Enter the words that satisfy the given condition.", "instances": [{"input": "5 Countries that Start with S", "output": "Switzerland, Spain, Senegal, Sweden, Sudan"}]}
-{"id": "user_oriented_task_201", "motivation_app": "Tasty", "instruction": "Provide a name for the dish given the ingredients and instructions.", "instances": [{"input": "INGREDIENTS:\n2 (5 oz) cans Bumble Bee\u00ae Solid White Albacore Tuna, drained\n1 avocado\n2 Tbsp Sriracha\n1 Tbsp Dijon mustard\n2 to 3 Tbsp celery, chopped\n2 Tbsp red onion, chopped\n2 green onions, chopped\n1 Tbsp fresh cilantro, chopped\nSalt and pepper, to taste\n2 heaping cups leafy green lettuce\n1 cup matchstick carrots\n4 (10 inch) whole wheat tortillas\nINSTRUCTIONS:\nIn a medium bowl, mash together tuna and avocado until combined. Add in the rest of the ingredients through the salt and pepper, mixing well.\nTo assemble, top each tortilla with a 1/2 cup leafy greens, 1/4 cup matchstick carrots and divide the tuna mixture evenly among the wraps. Tightly roll up the tortilla, slice and enjoy!", "output": "Tuna & Avocado Wrap"}]}
-{"id": "user_oriented_task_202", "motivation_app": "Google Meet", "instruction": "Summarize a meeting from the given list of bullet points. Be sure to convert shorthand into a first-hand account.", "instances": [{"input": "Rose: Analyze data and presents findings\nJohn: propose new idea\nJane: appointed to head project\nTom: need more time to fix software bug", "output": "At the meeting, Rose analyzed data and presented her findings. Then, John proposed a new idea. Moreover, Jane was appointed to head the project. And, Tom said he needs more time to fix a software bug."}]}
-{"id": "user_oriented_task_203", "motivation_app": "Spotify", "instruction": "List a few popular songs from the given album.", "instances": [{"input": "Back To Black", "output": "- Rehab\n- Just Friends\n- Tears Dry on Their Own\n- Wake Up Alone\n- Addicted"}]}
-{"id": "user_oriented_task_204", "motivation_app": "merriam-webster.com", "instruction": "Invent a new word based on a description of the word.", "instances": [{"input": "The feeling of knowing, while you are still in the moment, that something occurring will be remembered forever.", "output": "mnemonty"}]}
-{"id": "user_oriented_task_205", "motivation_app": "w3schools", "instruction": "Given a programming language and the name of a function, write a command to show how to use the function.", "instances": [{"input": "Language: Python\nFunction: input", "output": "username = input(\"Enter username:\")"}]}
-{"id": "user_oriented_task_206", "motivation_app": "Reddit", "instruction": "Provide a pleasant compliment based on the quality you have been given.", "instances": [{"input": "Truthfulness", "output": "Everything would be better if more people were like you."}]}
-{"id": "user_oriented_task_207", "motivation_app": "Overleaf", "instruction": "Create a brief resume introduction using the following information.", "instances": [{"input": "name: Mark\nnationality: Italian\nlanguage: English\nattributes: creative, goal-oriented, motivated\nskills: software engineering, content marketing", "output": "Mark is a creative and goal-oriented software engineer and content marketer. He has a strong background in software engineering and is skilled in content marketing. He is a motivated individual who always looks for new opportunities to learn and grow."}]}
-{"id": "user_oriented_task_208", "motivation_app": "Amazon", "instruction": "Make a list of adjectives that can be used to describe the given brand.", "instances": [{"input": "a creative tech startup", "output": "1. Innovative\n2. Creative\n3. Disruptive\n4. cutting-edge\n5. revolutionary\n6. game-changing\n7. industry-leading\n8. trailblazing\n9. groundbreaking\n10. world-changing"}]}
-{"id": "user_oriented_task_209", "motivation_app": "tripadvisor.com", "instruction": "Someone gives you some information about where they want to go and what they enjoy seeing and doing. Describe a proper road trip for them and find the best stops and sctivities in the way.", "instances": [{"input": "I'm looking for a west coast road trip. My goal is to start near San Diego and travel to Los Angeles.", "output": "Begin near the city of San Diego and make the first stop in coastal La Jolla. This upscale little village is a great a place for an overnight, a day of sunbathing at La Jolla Shores Beach, or a cocktail with a view.\nJust 20 minutes north, stop into beachy Encinitas to window shop the boutiques or sample fish tacos. Walk through the coastal meditation garden at the Self-Realization Fellowship or join surfers in the line-up at one of the fantastic local beaches here\nJust up the coast is cute little San Juan Capistrano, home to one of California\u2019s favorite missions. In spring, more than 6,000 swallows migrate here from Argentina! As you approach LA, don\u2019t miss swank Laguna Hills. Home to the one-of-a-kind Pageant of the Masters art event, Laguna is brimming with artists and charm. It\u2019s a great place for an overnight. Plus, Crystal Cove Beach in nearby Newport Beach is one of California\u2019s finest!\nAfter that, the choice is yours: Disney lovers should head directly to Disneyland in Anaheim to meet Mickey. To join the beautiful people and snap a photo of an iconic California scene, head to Santa Monica Pier and walk the boardwalk or ride the world\u2019s only solar-powered Ferris wheel. A San Diego to LA road trip is the perfect way to enjoy all things Southern California."}]}
-{"id": "user_oriented_task_210", "motivation_app": "StackOverflow", "instruction": "Regex is a set of characters that define a search pattern. This kind of pattern is usually used by string-searching algorithms to find or replace strings or to validate inputs. Convert the natural english text to regex.", "instances": [{"input": "Match a valid ip address", "output": "^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$"}]}
-{"id": "user_oriented_task_211", "motivation_app": "sth related to real estate?", "instruction": "Write an engaging and well-written property listing description for selling a house. Address of the house and some of the details are given to you. Fill in the information gap with hallucinations if needed.", "instances": [{"input": "Property Address: 412 Monterey Ave, Capitola, CA 95010\nParking: 3 cars + a finished single car garage\nDetails:\n- open floorplan\n- window with views to park/mountains\n- Kitchen with shaker cabinetry and white Calcatta quartz counters", "output": "Amazing designer updates to this beach chich home that is just a quick 3-minute stroll to Capitola Village! As you enter this light-filled home you will love the open floorplan and huge picture window with views to park/mountains! Kitchen with shaker cabinetry and white Calcatta quartz counters new stainless appliances (gas stove). Master bedroom with own access to yard! Beautiful bathrooms w/designer finishes and custom, subway & picket tile! New composition roof and gutters, new indoor and outdoor paint, new dual pane windows, new wide plank washed oak laminate floors, new doors, new fencing, forced air heat + LOTS more! Home sits high above Monterey Ave with alley access and parking for 3 cars + a finished single car garage! The indoor/outdoor living is ideal from this sunny locale with front, side and backyards that are super sunny with pathways and perfect for gardening or entertaining! Stroll to beach, Gayles bakery, parks, schools & churches from this adorable beach home!"}]}
-{"id": "user_oriented_task_212", "motivation_app": "Redfin", "instruction": "Given a real estate listing description, extract the information and details of the property from the text. The details consist of address, basic house attributes, etc.", "instances": [{"input": "WALK TO THE BEACH FROM CAPITOLA KNOLLS! Single-level upstairs condo with your own large private deck overlooking nature. Adorable home that is move-in ready and waiting for you to put your own personal touches on. Gorgeous and well-maintained landscaped park-like setting with lush green grasses with a community pool and spa to enjoy. Desirable location...walk to Capitola Village, Gayle's Bakery, restaurants, shopping, schools or jump on Hwy 1 right around the corner.", "output": "-Address: \"Capitola\"\n-Type of home: Condo\n-Location: \"walk to Capitola Village, Gayle's Bakery, restaurants, shopping, and schools or jump on Hwy 1 right around the corner.\"\n-House attributes: \"Single-level upstairs, large private deck overlooking nature, move-in ready home\"\n-Outdoor details: \"well-maintained landscaped park-like setting with lush green grasses with a Community pool and spa\""}]}
-{"id": "user_oriented_task_213", "motivation_app": "Redfin", "instruction": "Review the property description and tell us what is good and what is bad about it and list its key takeaways.", "instances": [{"input": "There's real magic in combining inherently compelling Victorian architecture with a modern aesthetic. Add in an extraordinary multi-level garden & the result is the undeniable elegance of 444 29th St. The front of the home is light-filled w/ a seamless connection between living, dining & kitchen. Newly updated kitchen w/ quartz counters & upgraded appliances. A lg dining rm has a bay of windows &, when paired w/ the adjacent living rm, provides an ideal space to entertain. 2 lg bdrms on this level are at the rear. Both w/ lg closets & walk out to the garden. Stylishly remodeled full bath on this level. 3rd bdrm/2nd full bath are located on the ground floor - perfect as a guest rm, home office, fitness area, etc. The enormous garden occupies an extra deep lot & is beautifully landscaped & terraced. Laundry/storage rm, 1-car garage pkg, Tesla solar panels. Prime, flat part of Noe Valley - short walk to all that Noe has to offer. Steps from the J-Church & convenient to shuttles/freeways.", "output": "1- Really engaging opening hook \u2013 very unique wording used here that captures the buyers' attention\n2- Good use of search engine optimization (SEO) keywords, for example using the address within the description, as the address is often what a buyer will search\n3- Use of too many short forms may make it harder for buyers to understand and read. It\u2019s often better to improve the quality of the content vs maximizing the quantity \u2013 in this case it may have made more sense to remove some pieces and reduce the use of acronyms"}]}
-{"id": "user_oriented_task_214", "motivation_app": "Redfin", "instruction": "List some of the top real estate marketing words to add value to the listing and engage more potential buyers.", "instances": [{"input": "", "output": "1. Luxurious\n2. New\n3. Spacious\n4. Captivating\n5. Move-In-Ready\n6. Landscaped\n7. Beautiful\n8. Character\n9. Custom\n10. Pet-Friendly"}]}
-{"id": "user_oriented_task_215", "motivation_app": "Amazon", "instruction": "Suggest some product bundles that are often purchased with a given product.", "instances": [{"input": "MacBook Air", "output": "MacBook Air, MacBook Air sleeve, MacBook Air case, MacBook Air external drive"}]}
-{"id": "user_oriented_task_216", "motivation_app": "Jira", "instruction": "Create a template in markdown to create scope for Jira tickets. Members should use this template as a checklist to ensure they have included all the necessary information when creating a ticket.", "instances": [{"input": "", "output": "# TO SCOPE THIS TICKET:\n[] create a descriptive title\n[] complete TBDs below\n[] assign epic to this ticket\n[] create relevant linking relations\n[] delete this section\n# DELETE ME AND ABOVE!\n\n## \ud83e\uddd1 Story\nWe need to TBDTASK from TBDRESOURCE in order for TBDUSER to TBDACTION\n\n## \ud83d\udd28 Acceptance Criteria\n- AC1\n- AC2\n\n## \ud83d\udcda Resources\n- [PLANNINGDOC1](WWWDOTEXAMPLEDOTCOM)\n- [FIGMADOC1](WWWDOTEXAMPLEDOTCOM)\n- [TECHDOCUMENTATION1](WWWDOTEXAMPLEDOTCOM)\n- [TECHDOCUMENTATION2](WWWDOTEXAMPLEDOTCOM)"}]}
-{"id": "user_oriented_task_217", "motivation_app": "Jira", "instruction": "As part of software engineering, user stories describe what the user wants the system to do, while acceptance criteria describe the conditions a specific user story must meet. Create basic acceptance criteria based on the given user story.", "instances": [{"input": "User story: As a user, I want to use a search field to type a city, name, or street, so that I could find matching hotel options.", "output": "- The search field is placed on the top bar\n- Search starts once the user clicks \u201cSearch\u201d\n- The field contains a placeholder with a grey-colored text: \u201cWhere are you going?\u201d\n- The placeholder disappears once the user starts typing\n- Search is performed if a user types in a city, hotel name, street, or all combined\nSearch is in English, French, German, and Ukrainian\nThe user can\u2019t type more than 200 symbols\nThe search doesn\u2019t support special symbols (characters). If the user has typed a special symbol, show the warning message: \u201cSearch input cannot contain special symbols.\u201d"}]}
-{"id": "user_oriented_task_218", "motivation_app": "Jira", "instruction": "A user story is an informal, general explanation of a software feature written from the perspective of the end user or customer. Write a user story for a given software.", "instances": [{"input": "Gmail", "output": "As a user, I want to be able to recover the password to my account, so that I will be able to access my account in case I forgot the password."}]}
-{"id": "user_oriented_task_219", "motivation_app": "Jira", "instruction": "Write a Jira ticket for the given task.", "instances": [{"input": "New Employee onboarding", "output": "Title: Please help our new employee get settled.\nPrepare a desk, chair, and computer for them to use.\nCreate their company account, email, and temporary password.\nGrant access to company documents."}]}
-{"id": "user_oriented_task_220", "motivation_app": "Google Docs", "instruction": "Paraphrase the given text in an academic style.", "instances": [{"input": "Lots of papers have been published on this topic.", "output": "A considerable amount of literature has been published on this topic."}]}
-{"id": "user_oriented_task_221", "motivation_app": "Google Sheet", "instruction": "Make a list of the pros and cons of the given decision.", "instances": [{"input": "Implementing a Remote Working Policy", "output": "Pros: \n- Allowing employees to work from home can reduce their stressful commutes and increase their productivity. \n- Remote working provides greater flexibility, particularly for working parents and caregivers who have other responsibilities. \n- It's possible to hire highly creative people who live further away if they don't have to come into the office every day.\n- People who aren't distracted by office noises and regular interruptions will perform better. \n- Team members' communication skills will improve, as they will learn to communicate more effectively using email and IM, for example. \n\nCons:\n- It may be more difficult for team members to build meaningful, productive, creative relationships with one another.\n- The organization would have to budget for equipment that team members can take home.\n- The organization may not be able to keep track of how people spend their time. \n- The company could lose control of its data, because equipment and documents are removed from the office.\n- It will be harder to schedule face-to-face meetings and provide constructive feedback."}]}
-{"id": "user_oriented_task_222", "motivation_app": "Google Docs", "instruction": "Include important study notes and key points that someone should know about the given subject.", "instances": [{"input": "history of the USA", "output": "1. The United States of America was founded in 1776.\n2. The United States Constitution was ratified in 1788.\n3. The United States Declaration of Independence was signed in 1776.\n4. The United States Bill of Rights was ratified in 1791.\n5. The United States of America is a federal republic."}]}
-{"id": "user_oriented_task_223", "motivation_app": "Yelp", "instruction": "Based on the given keywords, write a review of a restaurant.", "instances": [{"input": "Name: Pizzeria\ngood prices, quiet place, tasty, nice staff", "output": "I had a great experience at Pizzeria. The prices were good, it was a quiet place, and the food was tasty. The staff was also very nice."}]}
-{"id": "user_oriented_task_224", "motivation_app": "Google Map", "instruction": "Convert natural language into a list of turn-by-turn directions.", "instances": [{"input": "Go west on I-10 until you hit the 101 then take it north. You will eventually see the Hollywood sign.", "output": "1. Go west on I-10.\n2. Take the 101 north.\n3. You will eventually see the Hollywood sign."}]}
-{"id": "user_oriented_task_225", "motivation_app": "Play Store", "instruction": "Provide an ESRB rating for the following game.", "instances": [{"input": "This is an action-adventure game in which players help Kratos and his son on a dangerous quest. Players explore various realms and engage in frenetic hand-to-hand combat against human-like raiders and fantastical creatures (e.g., centaurs, trolls, dragons). Players use axes and chained blades to battle enemies, often resulting in large blood-splatter effects and dismemberment. Players can perform finishing attacks that depict close-up impalement via bladed and hand-held weapons; repeated axe strikes to a creature's neck results in decapitation. The words \u201cf**k\u201d and \u201csh*t\u201d are heard in the game.", "output": "Mature 17+"}]}
-{"id": "user_oriented_task_226", "motivation_app": "MS Powerpoint", "instruction": "A color description has been provided. Find the CSS code associated with that color.", "instances": [{"input": "A light red color with a medium light shade of pink", "output": "#DB7093"}]}
-{"id": "user_oriented_task_227", "motivation_app": "Leetcode", "instruction": "Identify and fix bugs in the given code and rewrite it", "instances": [{"input": "for i in range(10)\n print(Answer is:)\n print(i)", "output": "for i in range(10):\n print(\"Answer is:\")\n print(i)"}]}
-{"id": "user_oriented_task_228", "motivation_app": "Google Docs", "instruction": "Make a list of common phrases for the given section of the paper.", "instances": [{"input": "Introduction", "output": "1- ... is an important component of ...\n2- Central to the discipline of ... is ...\n3- Recent developments in ... have shown the need for ...\n4- The study build on ... and contributes to ...\n5- Until now, has only been applied to ..."}]}
-{"id": "user_oriented_task_229", "motivation_app": "Netflix", "instruction": "We have described a scene from a movie that we have seen in the past. Find out what movie it is.", "instances": [{"input": "A choice has to be made between picking the red pill or the blue pill", "output": "Matrix"}]}
-{"id": "user_oriented_task_230", "motivation_app": "Facebook", "instruction": "Take the product description and write a creative ad for it.", "instances": [{"input": "Document Cameras allow teachers to display documents, books, or other materials for their students to see.", "output": "Looking for a way to make your lessons more engaging? Check out our document cameras! With our cameras, you can display documents, books, or other materials for your students to see. This is a great way to add more visual elements to your lessons and make them more exciting for your students."}]}
-{"id": "user_oriented_task_231", "motivation_app": "Google Sheets", "instruction": "Translate the natural language description into a Google Sheets formula.", "instances": [{"input": "if C7 is equal to \"U.S.A.\" or \"Canada\" or \"Europe\" or \"The Americas\"", "output": "IF (OR(C7 = \"U.S.A.\"; C7 = \"Canada\"); \"The Americas\"; \"Europe\")"}]}
-{"id": "user_oriented_task_232", "motivation_app": "Twitter", "instruction": "You are given a tweet and you should decide whether it's offensive or not.", "instances": [{"input": "She has certainly taken some heat for being such an....well idiot.", "output": "Offensive"}]}
-{"id": "user_oriented_task_233", "motivation_app": "Workout", "instruction": "For a quick and efficient office workout, suggest a short routine.", "instances": [{"input": "", "output": "Desk pushups: 20 reps.\nSeated knee-to-chest: 10 reps.\nSquats: 10 reps.\nDesk plank: 30 seconds.\nSeated flutter kicks: 10 reps on each side.\nRomanian deadlifts: 10 reps.\nJumping jacks: 20 reps."}]}
-{"id": "user_oriented_task_234", "motivation_app": "Yelp", "instruction": "Predict how many stars the author will give to the restaurant from a Yelp review.", "instances": [{"input": "The reviews were great, but honestly i felt it was just ok. Seemed like a typical tourist spot were the food was mediocre. The service was ok, not great, but not the worst.\nThe bread was very good. I ordered the muscle fries. They had a sweet Vidalia onion flavor. No need for fries bc they were soggy wo much flavor. My boyfriend ordered a pesto chicken pasta and it was again mediocre.", "output": "3 stars"}]}
-{"id": "user_oriented_task_235", "motivation_app": "Yelp", "instruction": "Categorize the Business into one of the Restaurants, Home Services, Auto Services and Miscellaneous based on its Specialties.", "instances": [{"input": "Call us at 650-636-4884 or visit our website to receive a quote. This shop specializes in New Tires and General Auto Repair. We carry all tires in-house and have a wide range to fit any budget or car specifics. If you are unsure what tires you need there are experts on hand and available to help you pick the best tire for your needs. We also carry commercial vehicle tires and can serve a wide array of fleets.", "output": "Auto Services"}]}
-{"id": "user_oriented_task_236", "motivation_app": "Grammarly", "instruction": "Notify me of any suggestions you may have for making the text more grammatically correct.", "instances": [{"input": "This is the second time I've been here, and the vibes, and the food were exquisite.", "output": "It appears that you have an unnecessary comma in a compound subject. Consider removing it."}]}
-{"id": "user_oriented_task_237", "motivation_app": "Tasty", "instruction": "Using the ingredients that are available, create a dinner recipe.", "instances": [{"input": "Kale + almond butter + apple cider vinegar + agave nectar + tamari", "output": "Gently pat the kale leaves dry.\nDe-stem the kale and tear into pieces of desired size. Add the kale to a large bowl.\nAdd the almond butter, apple cider vinegar, liquid aminos, and agave nectar to a small bowl and whisk together for 30 seconds.\nPour the dressing over the kale leaves, roll up your sleeves, and massage the kale with your hands for 2-3 minutes. The more you massage the kale, the more tender and flavorful the leaves will become.*\nTransfer the kale to one large or two small salad bowls and top with desired garnishes."}]}
-{"id": "user_oriented_task_238", "motivation_app": "Amazon", "instruction": "Categorize the given product into one of Electronics, Computers, Smart Home, or Arts & Crafts departments.", "instances": [{"input": "Google Nest Learning Thermostat", "output": "Smart Home"}]}
-{"id": "user_oriented_task_239", "motivation_app": "Workout", "instruction": "Provide instructions for the given exercise.", "instances": [{"input": "Leg Raises", "output": "To start toning your abs by hitting up the lower abdominal muscles, Riggins suggests 30 seconds of leg raises. Here's how: Turn on your back with legs straight and your feet and ankles together. Raise your legs up and down in a vertical position from your body, while keeping your belly button. Slowly bring legs back down, but if that is too difficult, tuck your knees. (And be careful not to strain your lower back!)"}]}
-{"id": "user_oriented_task_240", "motivation_app": "Grammarly", "instruction": "Rewrite the text and correct the spelling errors.", "instances": [{"input": "It solves problems comon and uniqe to every team.", "output": "It solves problems common and unique to every team."}]}
-{"id": "user_oriented_task_241", "motivation_app": "merriam-webster.com", "instruction": "Write down antonyms for the given word.", "instances": [{"input": "laureating", "output": "discrediting, humiliating, dishonoring, shaming"}]}
-{"id": "user_oriented_task_242", "motivation_app": "w3schools", "instruction": "Write the HTML syntax for the required action.", "instances": [{"input": "adding an image to the page", "output": ""}]}
-{"id": "user_oriented_task_243", "motivation_app": "w3schools", "instruction": "Identify the programming language used to write the given code.", "instances": [{"input": "if (20 > 18) {\n printf(\"20 is greater than 18\");\n}", "output": "C"}]}
-{"id": "user_oriented_task_244", "motivation_app": "merriam-webster.com", "instruction": "Decide which part of speech the underlined word belongs to.", "instances": [{"input": "school will _keep through the winter", "output": "verb"}]}
-{"id": "user_oriented_task_245", "motivation_app": "merriam-webster.com", "instruction": "Define what the underlined word means for kids.", "instances": [{"input": "_keep a promise", "output": "to be faithful to : Fulfill"}]}
-{"id": "user_oriented_task_246", "motivation_app": "Grammarly", "instruction": "Expand the given sentence and give it more details and depth.", "instances": [{"input": "It changed my life.", "output": "The experience changed the course of my life forever."}]}
-{"id": "user_oriented_task_247", "motivation_app": "Airbnb", "instruction": "Extract the address of the property from the \"About this space\" section of Airbnb.", "instances": [{"input": "Tucked in the foothills of the quaint historic mining town of Grass Valley, CA this funky and spacious chalet invites you to experience the great outdoors with family, friends and pets. Enjoy the hot tub, basketball court, treehouse, kids rooms, bbq, fire-pit, outdoor theater, and more. One hour from Tahoe and 10 minutes to town, you\u2019re never short of things to explore.", "output": "Grass Valley, California, United States"}]}
-{"id": "user_oriented_task_248", "motivation_app": "Strava", "instruction": "Design a simple training challenge on Strava that everyone could attend.", "instances": [{"input": "", "output": "This month, push yourself to log 30 hours of whatever activities get you stoked to move. Carve out time for yourself and your health and get out there for less than one hour a day \u2013 or go long on the weekends to mix it up. It doesn\u2019t always have to be your most all-out effort, yoga or walks count for just as much. Commit to consistency to earn this time-intensive trophy."}]}
-{"id": "user_oriented_task_249", "motivation_app": "Telegram", "instruction": "Suggest some names for a friendly group in telegram.", "instances": [{"input": "", "output": "1. The Conversation Club \n2. The Chatterbox \n3. F.R.I.E.N.D.S \n4. The Chatty Collective \n5. The Tea Talkers \n6. The Support Squad \n7. The Cheerful Chums"}]}
-{"id": "user_oriented_task_250", "motivation_app": "Google Search", "instruction": "Write what the pronunciation of the given word sounds like. Follow the \"Google pronunciation dictionary\" scheme for phonetic spelling.", "instances": [{"input": "interpretations", "output": "in\u00b7tr\u00b7pruh\u00b7tay\u00b7shnz"}]}
-{"id": "user_oriented_task_251", "motivation_app": "Reddit", "instruction": "Explain the meaning of the given phrase in simple terms. Use an example if possible. It would be helpful if you could give an example.", "instances": [{"input": "\"With a little give in them\"", "output": "It means with some space to move. It's a metaphor. \"Give\" is a noun here. It's meaning is similar to\"flexibility\". If something has no \"give\" it's tight and rigid. It's the opposite of flexible and loose. For example, you might use it to talk about clothes: \"These jeans are so tight. There's no give in them. I can't move.\""}]}
diff --git a/gpt4all-api/README.md b/gpt4all-api/README.md
new file mode 100644
index 00000000..f4d6b7cf
--- /dev/null
+++ b/gpt4all-api/README.md
@@ -0,0 +1,2 @@
+# GPT4All API
+This directory will contain code to build out a RESTful API for GPT4All models. Exact details TBD, but as an MVP, user should be able to send requests to list, download, and generate text with different models.
\ No newline at end of file
diff --git a/gpt4all-backend/README.md b/gpt4all-backend/README.md
new file mode 100644
index 00000000..743b4bc2
--- /dev/null
+++ b/gpt4all-backend/README.md
@@ -0,0 +1,2 @@
+# GPT4ALL Backend
+This directory will contain the C/C++ model backends. We will want a subdirectory for each model we build out (e.g. gptj, llama). Ideally, there will be a universal library/wrapper for all models. Language bindings will be build on top of the universal library.
\ No newline at end of file
diff --git a/gpt4all-backend/gptj/placeholder b/gpt4all-backend/gptj/placeholder
new file mode 100644
index 00000000..e69de29b
diff --git a/gpt4all-backend/llama/placeholder b/gpt4all-backend/llama/placeholder
new file mode 100644
index 00000000..e69de29b
diff --git a/gpt4all-bindings/README.md b/gpt4all-bindings/README.md
new file mode 100644
index 00000000..1969027e
--- /dev/null
+++ b/gpt4all-bindings/README.md
@@ -0,0 +1,3 @@
+# GPT4All Bindings
+This directory will contain language specific bindings on top of the C/C++ model backends.
+We will have one directory per language binding (e.g. Python, Typescript, Golang, etc.).
\ No newline at end of file
diff --git a/gpt4all-bindings/golang/placeholder b/gpt4all-bindings/golang/placeholder
new file mode 100644
index 00000000..e69de29b
diff --git a/gpt4all-bindings/python/placeholder b/gpt4all-bindings/python/placeholder
new file mode 100644
index 00000000..e69de29b
diff --git a/gpt4all-bindings/typescript/placeholder b/gpt4all-bindings/typescript/placeholder
new file mode 100644
index 00000000..e69de29b
diff --git a/gpt4all-chat/CMakeLists.txt b/gpt4all-chat/CMakeLists.txt
new file mode 100644
index 00000000..fbe018b9
--- /dev/null
+++ b/gpt4all-chat/CMakeLists.txt
@@ -0,0 +1,225 @@
+cmake_minimum_required(VERSION 3.16)
+
+if(APPLE)
+ option(BUILD_UNIVERSAL "Build a Universal binary on macOS" OFF)
+ if(BUILD_UNIVERSAL)
+ # Build a Universal binary on macOS
+ # This requires that the found Qt library is compiled as Universal binaries.
+ set(CMAKE_OSX_ARCHITECTURES "arm64;x86_64" CACHE STRING "" FORCE)
+ else()
+ # Build for the host architecture on macOS
+ set(CMAKE_OSX_ARCHITECTURES "${CMAKE_HOST_SYSTEM_PROCESSOR}" CACHE STRING "" FORCE)
+ endif()
+endif()
+
+set(APP_VERSION_MAJOR 2)
+set(APP_VERSION_MINOR 4)
+set(APP_VERSION_PATCH 2)
+set(APP_VERSION "${APP_VERSION_MAJOR}.${APP_VERSION_MINOR}.${APP_VERSION_PATCH}")
+
+# Include the binary directory for the generated header file
+include_directories("${CMAKE_CURRENT_BINARY_DIR}")
+
+project(gpt4all VERSION ${APP_VERSION} LANGUAGES CXX C)
+
+set(CMAKE_AUTOMOC ON)
+set(CMAKE_AUTORCC ON)
+set(CMAKE_CXX_STANDARD_REQUIRED ON)
+
+option(GPT4ALL_LOCALHOST OFF "Build installer for localhost repo")
+option(GPT4ALL_AVX_ONLY OFF "Build for avx only")
+option(GPT4ALL_OFFLINE_INSTALLER "Build an offline installer" OFF)
+
+# Generate a header file with the version number
+configure_file(
+ "${CMAKE_CURRENT_SOURCE_DIR}/cmake/config.h.in"
+ "${CMAKE_CURRENT_BINARY_DIR}/config.h"
+)
+
+find_package(Qt6 6.2 COMPONENTS Core Quick QuickDialogs2 Svg REQUIRED)
+
+# Get the Qt6Core target properties
+get_target_property(Qt6Core_INCLUDE_DIRS Qt6::Core INTERFACE_INCLUDE_DIRECTORIES)
+get_target_property(Qt6Core_LIBRARY_RELEASE Qt6::Core LOCATION_RELEASE)
+
+# Find the qmake binary
+find_program(QMAKE_EXECUTABLE NAMES qmake qmake6 PATHS ${Qt6Core_INCLUDE_DIRS}/../.. NO_DEFAULT_PATH)
+
+# Get the Qt 6 root directory
+get_filename_component(Qt6_ROOT_DIR "${Qt6Core_LIBRARY_RELEASE}" DIRECTORY)
+get_filename_component(Qt6_ROOT_DIR "${Qt6_ROOT_DIR}/.." ABSOLUTE)
+
+message(STATUS "qmake binary: ${QMAKE_EXECUTABLE}")
+message(STATUS "Qt 6 root directory: ${Qt6_ROOT_DIR}")
+
+add_subdirectory(llmodel)
+
+set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
+
+qt_add_executable(chat
+ main.cpp
+ chat.h chat.cpp
+ chatllm.h chatllm.cpp
+ chatmodel.h chatlistmodel.h chatlistmodel.cpp
+ download.h download.cpp
+ network.h network.cpp
+ llm.h llm.cpp
+ sysinfo.h
+)
+
+qt_add_qml_module(chat
+ URI gpt4all
+ VERSION 1.0
+ QML_FILES
+ main.qml
+ qml/ChatDrawer.qml
+ qml/ModelDownloaderDialog.qml
+ qml/NetworkDialog.qml
+ qml/NewVersionDialog.qml
+ qml/ThumbsDownDialog.qml
+ qml/SettingsDialog.qml
+ qml/StartupDialog.qml
+ qml/PopupDialog.qml
+ qml/AboutDialog.qml
+ qml/Theme.qml
+ RESOURCES
+ icons/send_message.svg
+ icons/stop_generating.svg
+ icons/regenerate.svg
+ icons/copy.svg
+ icons/settings.svg
+ icons/edit.svg
+ icons/trash.svg
+ icons/network.svg
+ icons/thumbs_up.svg
+ icons/thumbs_down.svg
+ icons/logo.svg
+ icons/logo-32.png
+ icons/logo-48.png
+ icons/favicon.ico
+ icons/favicon.icns
+)
+
+set_target_properties(chat PROPERTIES
+ MACOSX_BUNDLE_GUI_IDENTIFIER gpt4all
+ MACOSX_BUNDLE_BUNDLE_VERSION ${PROJECT_VERSION}
+ MACOSX_BUNDLE_SHORT_VERSION_STRING ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}
+ MACOSX_BUNDLE TRUE
+ WIN32_EXECUTABLE TRUE
+ MACOSX_BUNDLE_ICON_FILE "favicon.icns"
+)
+
+if(${CMAKE_SYSTEM_NAME} MATCHES Darwin)
+ set_target_properties(chat PROPERTIES
+ OUTPUT_NAME gpt4all
+ )
+endif()
+
+target_compile_definitions(chat
+ PRIVATE $<$,$>:QT_QML_DEBUG>)
+target_link_libraries(chat
+ PRIVATE Qt6::Quick Qt6::Svg)
+target_link_libraries(chat
+ PRIVATE llmodel)
+
+set(COMPONENT_NAME_MAIN ${PROJECT_NAME})
+set(CMAKE_INSTALL_PREFIX ${CMAKE_BINARY_DIR}/install)
+
+if(NOT (CMAKE_HOST_SYSTEM_NAME STREQUAL "Darwin" AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL "arm64"))
+ add_executable(test_hw test_hw.cpp)
+ install(TARGETS test_hw DESTINATION bin COMPONENT ${COMPONENT_NAME_MAIN})
+endif()
+
+install(TARGETS chat DESTINATION bin COMPONENT ${COMPONENT_NAME_MAIN})
+install(TARGETS llmodel DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN})
+install(TARGETS llama DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN})
+
+set(CPACK_GENERATOR "IFW")
+set(CPACK_VERBATIM_VARIABLES YES)
+set(CPACK_IFW_VERBOSE ON)
+
+if(${CMAKE_SYSTEM_NAME} MATCHES Linux)
+ set(LINUXDEPLOYQT "$ENV{HOME}/dev/linuxdeployqt/build/tools/linuxdeployqt/linuxdeployqt")
+ configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/deploy-qt-linux.cmake.in"
+ "${CMAKE_BINARY_DIR}/cmake/deploy-qt-linux.cmake" @ONLY)
+ set(CPACK_PRE_BUILD_SCRIPTS ${CMAKE_BINARY_DIR}/cmake/deploy-qt-linux.cmake)
+ set(CPACK_IFW_ROOT "~/Qt/Tools/QtInstallerFramework/4.5")
+ set(CPACK_PACKAGE_FILE_NAME "${COMPONENT_NAME_MAIN}-installer-linux")
+ set(CPACK_IFW_TARGET_DIRECTORY "@HomeDir@/${COMPONENT_NAME_MAIN}")
+elseif(${CMAKE_SYSTEM_NAME} MATCHES Windows)
+ find_program(WINDEPLOYQT windeployqt HINTS ${_qt_bin_dir})
+ configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/deploy-qt-windows.cmake.in"
+ "${CMAKE_BINARY_DIR}/cmake/deploy-qt-windows.cmake" @ONLY)
+ set(CPACK_PRE_BUILD_SCRIPTS ${CMAKE_BINARY_DIR}/cmake/deploy-qt-windows.cmake)
+ set(CPACK_IFW_ROOT "C:/Qt/Tools/QtInstallerFramework/4.5")
+ set(CPACK_IFW_PACKAGE_ICON "${CMAKE_CURRENT_SOURCE_DIR}/icons/favicon.ico")
+ set(CPACK_PACKAGE_FILE_NAME "${COMPONENT_NAME_MAIN}-installer-win64")
+ set(CPACK_IFW_TARGET_DIRECTORY "@HomeDir@\\${COMPONENT_NAME_MAIN}")
+elseif(${CMAKE_SYSTEM_NAME} MATCHES Darwin)
+ find_program(MACDEPLOYQT macdeployqt HINTS ${_qt_bin_dir})
+ configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/deploy-qt-mac.cmake.in"
+ "${CMAKE_BINARY_DIR}/cmake/deploy-qt-mac.cmake" @ONLY)
+ set(CPACK_PRE_BUILD_SCRIPTS ${CMAKE_BINARY_DIR}/cmake/deploy-qt-mac.cmake)
+ set(CPACK_IFW_ROOT "~/Qt/Tools/QtInstallerFramework/4.5")
+ set(CPACK_IFW_PACKAGE_ICON "${CMAKE_CURRENT_SOURCE_DIR}/icons/favicon.icns")
+ set(CPACK_PACKAGE_FILE_NAME "${COMPONENT_NAME_MAIN}-installer-darwin")
+ set(CPACK_IFW_TARGET_DIRECTORY "@ApplicationsDir@/${COMPONENT_NAME_MAIN}")
+ set(CPACK_BUNDLE_NAME ${COMPONENT_NAME_MAIN})
+ set(CPACK_BUNDLE_ICON "${CMAKE_CURRENT_SOURCE_DIR}/icons/favicon.icns")
+endif()
+
+set(CPACK_PACKAGE_INSTALL_DIRECTORY ${COMPONENT_NAME_MAIN})
+set(CPACK_PACKAGE_VERSION_MAJOR ${PROJECT_VERSION_MAJOR})
+set(CPACK_PACKAGE_VERSION_MINOR ${PROJECT_VERSION_MINOR})
+SET(CPACK_PACKAGE_VERSION_PATCH ${PROJECT_VERSION_PATCH})
+set(CPACK_PACKAGE_HOMEPAGE_URL "https://gpt4all.io")
+set(CPACK_PACKAGE_ICON "${CMAKE_CURRENT_SOURCE_DIR}/icons/logo-48.png")
+set(CPACK_RESOURCE_FILE_LICENSE ${CMAKE_CURRENT_SOURCE_DIR}/LICENSE)
+set(CPACK_RESOURCE_FILE_README ${CMAKE_CURRENT_SOURCE_DIR}/README.md)
+set(CPACK_PACKAGE_EXECUTABLES "GPT4All")
+set(CPACK_CREATE_DESKTOP_LINKS "GPT4All")
+set(CPACK_IFW_PACKAGE_NAME "GPT4All")
+set(CPACK_IFW_PACKAGE_TITLE "GPT4All Installer")
+set(CPACK_IFW_PACKAGE_PUBLISHER "Nomic, Inc.")
+set(CPACK_IFW_PRODUCT_URL "https://gpt4all.io")
+set(CPACK_IFW_PACKAGE_WIZARD_STYLE "Aero")
+set(CPACK_IFW_PACKAGE_LOGO "${CMAKE_CURRENT_SOURCE_DIR}/icons/logo-48.png")
+set(CPACK_IFW_PACKAGE_WINDOW_ICON "${CMAKE_CURRENT_SOURCE_DIR}/icons/logo-32.png")
+set(CPACK_IFW_PACKAGE_WIZARD_SHOW_PAGE_LIST OFF)
+
+include(InstallRequiredSystemLibraries)
+include(CPack)
+include(CPackIFW)
+cpack_add_component(${COMPONENT_NAME_MAIN} DOWNLOADED)
+cpack_ifw_configure_component(${COMPONENT_NAME_MAIN} ESSENTIAL FORCED_INSTALLATION)
+cpack_ifw_configure_component(${COMPONENT_NAME_MAIN} VERSION ${APP_VERSION})
+cpack_ifw_configure_component(${COMPONENT_NAME_MAIN} LICENSES "MIT LICENSE" ${CPACK_RESOURCE_FILE_LICENSE})
+cpack_ifw_configure_component(${COMPONENT_NAME_MAIN} SCRIPT "${CMAKE_CURRENT_SOURCE_DIR}/cmake/installerscript.qs")
+cpack_ifw_configure_component(${COMPONENT_NAME_MAIN} REPLACES "gpt4all-chat") #Was used in very earliest prototypes
+
+if (GPT4ALL_LOCALHOST)
+ cpack_ifw_add_repository("GPT4AllRepository" URL "http://localhost/repository")
+elseif(GPT4ALL_OFFLINE_INSTALLER)
+ cpack_ifw_add_repository("GPT4AllRepository" URL "file://${CMAKE_BINARY_DIR}/packages")
+else()
+ if(${CMAKE_SYSTEM_NAME} MATCHES Linux)
+ if (GPT4ALL_AVX_ONLY)
+ cpack_ifw_add_repository("GPT4AllRepository" URL "https://gpt4all.io/installer_repos/avx_only/linux/repository")
+ else()
+ cpack_ifw_add_repository("GPT4AllRepository" URL "https://gpt4all.io/installer_repos/linux/repository")
+ endif()
+ elseif(${CMAKE_SYSTEM_NAME} MATCHES Windows)
+ #To sign the target on windows have to create a batch script add use it as a custom target and then use CPACK_IFW_EXTRA_TARGETS to set this extra target
+ if (GPT4ALL_AVX_ONLY)
+ cpack_ifw_add_repository("GPT4AllRepository" URL "https://gpt4all.io/installer_repos/avx_only/windows/repository")
+ else()
+ cpack_ifw_add_repository("GPT4AllRepository" URL "https://gpt4all.io/installer_repos/windows/repository")
+ endif()
+ elseif(${CMAKE_SYSTEM_NAME} MATCHES Darwin)
+ if (GPT4ALL_AVX_ONLY)
+ cpack_ifw_add_repository("GPT4AllRepository" URL "https://gpt4all.io/installer_repos/avx_only/mac/repository")
+ else()
+ cpack_ifw_add_repository("GPT4AllRepository" URL "https://gpt4all.io/installer_repos/mac/repository")
+ endif()
+ endif()
+endif()
diff --git a/gpt4all-chat/LICENSE b/gpt4all-chat/LICENSE
new file mode 100644
index 00000000..09ca4546
--- /dev/null
+++ b/gpt4all-chat/LICENSE
@@ -0,0 +1,15 @@
+Copyright 2023 Nomic, Inc., Aaron Miller
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+ADDENDUM:
+
+Any LLM models that are loaded and used by the application are not themselves
+subject to this license if indeed they are even copyrightable. The terms of
+this license apply only to the application software and its accompanying
+documentation and do not extend to any LLM models, whether created by the
+author of the application or obtained from third-party sources.
diff --git a/gpt4all-chat/README.md b/gpt4all-chat/README.md
new file mode 100644
index 00000000..4bbe6016
--- /dev/null
+++ b/gpt4all-chat/README.md
@@ -0,0 +1,78 @@
+# gpt4all-chat
+
+Cross platform Qt based GUI for GPT4All versions with GPT-J as the base
+model. NOTE: The model seen in the screenshot is actually a preview of a
+new training run for GPT4All based on GPT-J. The GPT4All project is busy
+at work getting ready to release this model including installers for all
+three major OS's. In the meantime, you can try this UI out with the original
+GPT-J model by following build instructions below.
+
+![image](https://user-images.githubusercontent.com/50458173/231464085-da9edff6-a593-410e-8f38-7513f75c8aab.png)
+
+## Install
+
+One click installers for macOS, Linux, and Windows at https://gpt4all.io
+
+## Features
+
+* Cross-platform (Linux, Windows, MacOSX)
+* Fast CPU based inference using ggml for GPT-J based models
+* The UI is made to look and feel like you've come to expect from a chatty gpt
+* Check for updates so you can alway stay fresh with latest models
+* Easy to install with precompiled binaries available for all three major desktop platforms
+* Multi-modal - Ability to load more than one model and switch between them
+* Supports both llama.cpp and gptj.cpp style models
+* Model downloader in GUI featuring many popular open source models
+* Settings dialog to change temp, top_p, top_k, threads, etc
+* Copy your conversation to clipboard
+* Check for updates to get the very latest GUI
+
+## Feature wishlist
+
+* Multi-chat - a list of current and past chats and the ability to save/delete/export and switch between
+* Text to speech - have the AI response with voice
+* Speech to text - give the prompt with your voice
+* Python bindings
+* Typescript bindings
+* Plugin support for langchain other developer tools
+* Save your prompt/responses to disk
+* Upload prompt/respones manually/automatically to nomic.ai to aid future training runs
+* Syntax highlighting support for programming languages, etc.
+* REST API with a built-in webserver in the chat gui itself with a headless operation mode as well
+* Advanced settings for changing temperature, topk, etc. (DONE)
+* YOUR IDEA HERE
+
+## Building and running
+
+* Follow the visual instructions on the [build_and_run](build_and_run.md) page
+
+## Getting the latest
+
+If you've already checked out the source code and/or built the program make sure when you do a git fetch to get the latest changes and that you also do ```git submodule update --init --recursive``` to update the submodules.
+
+## Manual download of models
+* https://gpt4all.io/models/ggml-mpt-7b-chat.bin (default) (md5sum 756249d3d6abe23bde3b1ae272628640) Current best non-commercially licensable chat model based on MPT and trained by Mosaic ML.
+* https://gpt4all.io/models/ggml-gpt4all-j-v1.3-groovy.bin (default) (md5sum 81a09a0ddf89690372fc296ff7f625af) Current best commercially licensable model based on GPT-J and trained by Nomic AI on the latest curated GPT4All dataset.
+* https://gpt4all.io/models/ggml-gpt4all-l13b-snoozy.bin (md5sum 91f886b68fbce697e9a3cd501951e455) Current best non-commercially licensable model based on Llama 13b and trained by Nomic AI on the latest curated GPT4All dataset.
+* https://gpt4all.io/models/ggml-gpt4all-j-v1.2-jazzy.bin (md5sum 879344aaa9d62fdccbda0be7a09e7976) An commercially licensable model based on GPT-J and trained by Nomic AI on the v2 GPT4All dataset.
+* https://gpt4all.io/models/ggml-gpt4all-j-v1.1-breezy.bin (md5sum 61d48a82cb188cceb14ebb8082bfec37) An commercially licensable model based on GPT-J and trained by Nomic AI on the v1 GPT4All dataset.
+* https://gpt4all.io/models/ggml-gpt4all-j.bin (md5sum 5b5a3f9b858d33b29b52b89692415595) An commercially licensable model based on GPT-J and trained by Nomic AI on the v0 GPT4All dataset.
+* https://gpt4all.io/models/ggml-vicuna-7b-1.1-q4_2.bin (md5sum 29119f8fa11712704c6b22ac5ab792ea) An non-commercially licensable model based on Llama 7b and trained by teams from UC Berkeley, CMU, Stanford, MBZUAI, and UC San Diego.
+* https://gpt4all.io/models/ggml-vicuna-13b-1.1-q4_2.bin (md5sum 95999b7b0699e2070af63bf5d34101a8) An non-commercially licensable model based on Llama 13b and trained by teams from UC Berkeley, CMU, Stanford, MBZUAI, and UC San Diego.
+* https://gpt4all.io/models/ggml-wizardLM-7B.q4_2.bin (md5sum 99e6d129745a3f1fb1121abed747b05a) An non-commercially licensable model based on Llama 7b and trained by Microsoft and Peking University.
+* https://gpt4all.io/models/ggml-stable-vicuna-13B.q4_2.bin (md5sum 6cb4ee297537c9133bddab9692879de0) An non-commercially licensable model based on Llama 13b and RLHF trained by Stable AI.
+* https://gpt4all.io/models/ggml-mpt-7b-base.bin (md5sum 120c32a51d020066288df045ef5d52b9) A commercially licensable model base pre-trained by Mosaic ML.
+
+## Terminal Only Interface with no Qt dependency
+
+Check out https://github.com/kuvaus/LlamaGPTJ-chat which is using the llmodel backend so it is compliant with our ecosystem and all models downloaded above should work with it.
+
+## Contributing
+
+* Pull requests welcome. See the feature wish list for ideas :)
+
+
+## License
+The source code of this chat interface is currently under a MIT license. The underlying GPT4All-j model is released under non-restrictive open-source Apache 2 License.
+
+The GPT4All-J license allows for users to use generated outputs as they see fit. Users take responsibility for ensuring their content meets applicable requirements for publication in a given context or region.
diff --git a/gpt4all-chat/build_and_run.md b/gpt4all-chat/build_and_run.md
new file mode 100644
index 00000000..e111d0b4
--- /dev/null
+++ b/gpt4all-chat/build_and_run.md
@@ -0,0 +1,57 @@
+# Install Qt 6.x and setup/build gpt4all-chat from source
+
+Depending upon your operating system, there are many ways that Qt is distributed.
+Here is the recommended method for getting the Qt dependency installed to setup and build
+gpt4all-chat from source.
+
+## Create a [Qt account](https://login.qt.io/register)
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/d1e44cab-4245-4144-a91c-7b02267df2b2)
+
+## Go to the Qt open source [download page](https://www.qt.io/download-qt-installer-oss)
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/d68f5f45-cca3-4fe9-acf4-cabdcb95f669)
+
+## Start the installer and sign in
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/899b1422-51ae-4bb5-acc9-b9027a8e9b19)
+
+## After some screens about license, select custom
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/2290031a-fdb0-4f47-a7f1-d77ad5451068)
+
+## Select the following
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/c6e999e5-cc8a-4dfc-8065-b59139e8c7ae)
+
+NOTE: This is for macOS. For Linux it is similar, but you need ming64 for Windows, not the MSVC install
+
+## Open up QtCreator
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/a34978f4-a220-459c-af66-e901d7ccd7bb)
+
+## Clone the git repo for gpt4all-chat
+
+```
+git clone --recurse-submodules https://github.com/nomic-ai/gpt4all-chat
+```
+
+## Open the gpt4all-chat project in QtCreator
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/3d3e2743-2a1d-43d6-9e55-62f7f4306de7)
+
+NOTE: File->Open File or Project and navigate to the gpt4all-chat repo and choose the CMakeLists.txt
+
+## Configure project
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/44d5aafb-a95d-434b-ba2a-a3138c0e49a0)
+
+## Build project
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/43cd7b42-32f0-4efa-9612-d51f85637103)
+
+## Run project
+
+![image](https://github.com/nomic-ai/gpt4all-chat/assets/10168/611ea795-bdcd-4feb-a466-eb1c2e936e7e)
+
+
diff --git a/gpt4all-chat/chat.cpp b/gpt4all-chat/chat.cpp
new file mode 100644
index 00000000..2a6b941f
--- /dev/null
+++ b/gpt4all-chat/chat.cpp
@@ -0,0 +1,274 @@
+#include "chat.h"
+#include "llm.h"
+#include "network.h"
+#include "download.h"
+
+Chat::Chat(QObject *parent)
+ : QObject(parent)
+ , m_id(Network::globalInstance()->generateUniqueId())
+ , m_name(tr("New Chat"))
+ , m_chatModel(new ChatModel(this))
+ , m_responseInProgress(false)
+ , m_creationDate(QDateTime::currentSecsSinceEpoch())
+ , m_llmodel(new ChatLLM(this))
+{
+ // Should be in same thread
+ connect(Download::globalInstance(), &Download::modelListChanged, this, &Chat::modelListChanged, Qt::DirectConnection);
+ connect(this, &Chat::modelNameChanged, this, &Chat::modelListChanged, Qt::DirectConnection);
+
+ // Should be in different threads
+ connect(m_llmodel, &ChatLLM::isModelLoadedChanged, this, &Chat::isModelLoadedChanged, Qt::QueuedConnection);
+ connect(m_llmodel, &ChatLLM::responseChanged, this, &Chat::handleResponseChanged, Qt::QueuedConnection);
+ connect(m_llmodel, &ChatLLM::responseStarted, this, &Chat::responseStarted, Qt::QueuedConnection);
+ connect(m_llmodel, &ChatLLM::responseStopped, this, &Chat::responseStopped, Qt::QueuedConnection);
+ connect(m_llmodel, &ChatLLM::modelNameChanged, this, &Chat::handleModelNameChanged, Qt::QueuedConnection);
+ connect(m_llmodel, &ChatLLM::modelLoadingError, this, &Chat::modelLoadingError, Qt::QueuedConnection);
+ connect(m_llmodel, &ChatLLM::recalcChanged, this, &Chat::handleRecalculating, Qt::QueuedConnection);
+ connect(m_llmodel, &ChatLLM::generatedNameChanged, this, &Chat::generatedNameChanged, Qt::QueuedConnection);
+
+ connect(this, &Chat::promptRequested, m_llmodel, &ChatLLM::prompt, Qt::QueuedConnection);
+ connect(this, &Chat::modelNameChangeRequested, m_llmodel, &ChatLLM::modelNameChangeRequested, Qt::QueuedConnection);
+ connect(this, &Chat::loadDefaultModelRequested, m_llmodel, &ChatLLM::loadDefaultModel, Qt::QueuedConnection);
+ connect(this, &Chat::loadModelRequested, m_llmodel, &ChatLLM::loadModel, Qt::QueuedConnection);
+ connect(this, &Chat::unloadModelRequested, m_llmodel, &ChatLLM::unloadModel, Qt::QueuedConnection);
+ connect(this, &Chat::reloadModelRequested, m_llmodel, &ChatLLM::reloadModel, Qt::QueuedConnection);
+ connect(this, &Chat::generateNameRequested, m_llmodel, &ChatLLM::generateName, Qt::QueuedConnection);
+
+ // The following are blocking operations and will block the gui thread, therefore must be fast
+ // to respond to
+ connect(this, &Chat::regenerateResponseRequested, m_llmodel, &ChatLLM::regenerateResponse, Qt::BlockingQueuedConnection);
+ connect(this, &Chat::resetResponseRequested, m_llmodel, &ChatLLM::resetResponse, Qt::BlockingQueuedConnection);
+ connect(this, &Chat::resetContextRequested, m_llmodel, &ChatLLM::resetContext, Qt::BlockingQueuedConnection);
+}
+
+void Chat::reset()
+{
+ stopGenerating();
+ // Erase our current on disk representation as we're completely resetting the chat along with id
+ LLM::globalInstance()->chatListModel()->removeChatFile(this);
+ emit resetContextRequested(); // blocking queued connection
+ m_id = Network::globalInstance()->generateUniqueId();
+ emit idChanged();
+ // NOTE: We deliberately do no reset the name or creation date to indictate that this was originally
+ // an older chat that was reset for another purpose. Resetting this data will lead to the chat
+ // name label changing back to 'New Chat' and showing up in the chat model list as a 'New Chat'
+ // further down in the list. This might surprise the user. In the future, we me might get rid of
+ // the "reset context" button in the UI. Right now, by changing the model in the combobox dropdown
+ // we effectively do a reset context. We *have* to do this right now when switching between different
+ // types of models. The only way to get rid of that would be a very long recalculate where we rebuild
+ // the context if we switch between different types of models. Probably the right way to fix this
+ // is to allow switching models but throwing up a dialog warning users if we switch between types
+ // of models that a long recalculation will ensue.
+ m_chatModel->clear();
+}
+
+bool Chat::isModelLoaded() const
+{
+ return m_llmodel->isModelLoaded();
+}
+
+void Chat::prompt(const QString &prompt, const QString &prompt_template, int32_t n_predict,
+ int32_t top_k, float top_p, float temp, int32_t n_batch, float repeat_penalty,
+ int32_t repeat_penalty_tokens)
+{
+ emit promptRequested(prompt, prompt_template, n_predict, top_k, top_p, temp, n_batch,
+ repeat_penalty, repeat_penalty_tokens, LLM::globalInstance()->threadCount());
+}
+
+void Chat::regenerateResponse()
+{
+ emit regenerateResponseRequested(); // blocking queued connection
+}
+
+void Chat::stopGenerating()
+{
+ m_llmodel->stopGenerating();
+}
+
+QString Chat::response() const
+{
+ return m_llmodel->response();
+}
+
+void Chat::handleResponseChanged()
+{
+ const int index = m_chatModel->count() - 1;
+ m_chatModel->updateValue(index, response());
+ emit responseChanged();
+}
+
+void Chat::responseStarted()
+{
+ m_responseInProgress = true;
+ emit responseInProgressChanged();
+}
+
+void Chat::responseStopped()
+{
+ m_responseInProgress = false;
+ emit responseInProgressChanged();
+ if (m_llmodel->generatedName().isEmpty())
+ emit generateNameRequested();
+ if (chatModel()->count() < 3)
+ Network::globalInstance()->sendChatStarted();
+}
+
+QString Chat::modelName() const
+{
+ return m_llmodel->modelName();
+}
+
+void Chat::setModelName(const QString &modelName)
+{
+ // doesn't block but will unload old model and load new one which the gui can see through changes
+ // to the isModelLoaded property
+ emit modelNameChangeRequested(modelName);
+}
+
+void Chat::newPromptResponsePair(const QString &prompt)
+{
+ m_chatModel->appendPrompt(tr("Prompt: "), prompt);
+ m_chatModel->appendResponse(tr("Response: "), prompt);
+ emit resetResponseRequested(); // blocking queued connection
+}
+
+bool Chat::isRecalc() const
+{
+ return m_llmodel->isRecalc();
+}
+
+void Chat::loadDefaultModel()
+{
+ emit loadDefaultModelRequested();
+}
+
+void Chat::loadModel(const QString &modelName)
+{
+ emit loadModelRequested(modelName);
+}
+
+void Chat::unloadModel()
+{
+ stopGenerating();
+ emit unloadModelRequested();
+}
+
+void Chat::reloadModel()
+{
+ emit reloadModelRequested(m_savedModelName);
+}
+
+void Chat::generatedNameChanged()
+{
+ // Only use the first three words maximum and remove newlines and extra spaces
+ QString gen = m_llmodel->generatedName().simplified();
+ QStringList words = gen.split(' ', Qt::SkipEmptyParts);
+ int wordCount = qMin(3, words.size());
+ m_name = words.mid(0, wordCount).join(' ');
+ emit nameChanged();
+}
+
+void Chat::handleRecalculating()
+{
+ Network::globalInstance()->sendRecalculatingContext(m_chatModel->count());
+ emit recalcChanged();
+}
+
+void Chat::handleModelNameChanged()
+{
+ m_savedModelName = modelName();
+ emit modelNameChanged();
+}
+
+bool Chat::serialize(QDataStream &stream, int version) const
+{
+ stream << m_creationDate;
+ stream << m_id;
+ stream << m_name;
+ stream << m_userName;
+ stream << m_savedModelName;
+ if (!m_llmodel->serialize(stream, version))
+ return false;
+ if (!m_chatModel->serialize(stream, version))
+ return false;
+ return stream.status() == QDataStream::Ok;
+}
+
+bool Chat::deserialize(QDataStream &stream, int version)
+{
+ stream >> m_creationDate;
+ stream >> m_id;
+ emit idChanged();
+ stream >> m_name;
+ stream >> m_userName;
+ emit nameChanged();
+ stream >> m_savedModelName;
+
+ // Prior to version 2 gptj models had a bug that fixed the kv_cache to F32 instead of F16 so
+ // unfortunately, we cannot deserialize these
+ if (version < 2 && m_savedModelName.contains("gpt4all-j"))
+ return false;
+
+ if (!m_llmodel->deserialize(stream, version))
+ return false;
+ if (!m_chatModel->deserialize(stream, version))
+ return false;
+ emit chatModelChanged();
+ return stream.status() == QDataStream::Ok;
+}
+
+QList Chat::modelList() const
+{
+ // Build a model list from exepath and from the localpath
+ QList list;
+
+ QString exePath = QCoreApplication::applicationDirPath() + QDir::separator();
+ QString localPath = Download::globalInstance()->downloadLocalModelsPath();
+
+ {
+ QDir dir(exePath);
+ dir.setNameFilters(QStringList() << "ggml-*.bin");
+ QStringList fileNames = dir.entryList();
+ for (QString f : fileNames) {
+ QString filePath = exePath + f;
+ QFileInfo info(filePath);
+ QString name = info.completeBaseName().remove(0, 5);
+ if (info.exists()) {
+ if (name == modelName())
+ list.prepend(name);
+ else
+ list.append(name);
+ }
+ }
+ }
+
+ if (localPath != exePath) {
+ QDir dir(localPath);
+ dir.setNameFilters(QStringList() << "ggml-*.bin");
+ QStringList fileNames = dir.entryList();
+ for (QString f : fileNames) {
+ QString filePath = localPath + f;
+ QFileInfo info(filePath);
+ QString name = info.completeBaseName().remove(0, 5);
+ if (info.exists() && !list.contains(name)) { // don't allow duplicates
+ if (name == modelName())
+ list.prepend(name);
+ else
+ list.append(name);
+ }
+ }
+ }
+
+ if (list.isEmpty()) {
+ if (exePath != localPath) {
+ qWarning() << "ERROR: Could not find any applicable models in"
+ << exePath << "nor" << localPath;
+ } else {
+ qWarning() << "ERROR: Could not find any applicable models in"
+ << exePath;
+ }
+ return QList();
+ }
+
+ return list;
+}
diff --git a/gpt4all-chat/chat.h b/gpt4all-chat/chat.h
new file mode 100644
index 00000000..4ec97ee6
--- /dev/null
+++ b/gpt4all-chat/chat.h
@@ -0,0 +1,106 @@
+#ifndef CHAT_H
+#define CHAT_H
+
+#include
+#include
+#include
+
+#include "chatllm.h"
+#include "chatmodel.h"
+
+class Chat : public QObject
+{
+ Q_OBJECT
+ Q_PROPERTY(QString id READ id NOTIFY idChanged)
+ Q_PROPERTY(QString name READ name WRITE setName NOTIFY nameChanged)
+ Q_PROPERTY(ChatModel *chatModel READ chatModel NOTIFY chatModelChanged)
+ Q_PROPERTY(bool isModelLoaded READ isModelLoaded NOTIFY isModelLoadedChanged)
+ Q_PROPERTY(QString response READ response NOTIFY responseChanged)
+ Q_PROPERTY(QString modelName READ modelName WRITE setModelName NOTIFY modelNameChanged)
+ Q_PROPERTY(bool responseInProgress READ responseInProgress NOTIFY responseInProgressChanged)
+ Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged)
+ Q_PROPERTY(QList modelList READ modelList NOTIFY modelListChanged)
+ QML_ELEMENT
+ QML_UNCREATABLE("Only creatable from c++!")
+
+public:
+ explicit Chat(QObject *parent = nullptr);
+
+ QString id() const { return m_id; }
+ QString name() const { return m_userName.isEmpty() ? m_name : m_userName; }
+ void setName(const QString &name)
+ {
+ m_userName = name;
+ emit nameChanged();
+ }
+ ChatModel *chatModel() { return m_chatModel; }
+
+ Q_INVOKABLE void reset();
+ Q_INVOKABLE bool isModelLoaded() const;
+ Q_INVOKABLE void prompt(const QString &prompt, const QString &prompt_template, int32_t n_predict,
+ int32_t top_k, float top_p, float temp, int32_t n_batch, float repeat_penalty, int32_t repeat_penalty_tokens);
+ Q_INVOKABLE void regenerateResponse();
+ Q_INVOKABLE void stopGenerating();
+ Q_INVOKABLE void newPromptResponsePair(const QString &prompt);
+
+ QString response() const;
+ bool responseInProgress() const { return m_responseInProgress; }
+ QString modelName() const;
+ void setModelName(const QString &modelName);
+ bool isRecalc() const;
+
+ void loadDefaultModel();
+ void loadModel(const QString &modelName);
+ void unloadModel();
+ void reloadModel();
+
+ qint64 creationDate() const { return m_creationDate; }
+ bool serialize(QDataStream &stream, int version) const;
+ bool deserialize(QDataStream &stream, int version);
+
+ QList modelList() const;
+
+Q_SIGNALS:
+ void idChanged();
+ void nameChanged();
+ void chatModelChanged();
+ void isModelLoadedChanged();
+ void responseChanged();
+ void responseInProgressChanged();
+ void promptRequested(const QString &prompt, const QString &prompt_template, int32_t n_predict,
+ int32_t top_k, float top_p, float temp, int32_t n_batch, float repeat_penalty, int32_t repeat_penalty_tokens,
+ int32_t n_threads);
+ void regenerateResponseRequested();
+ void resetResponseRequested();
+ void resetContextRequested();
+ void modelNameChangeRequested(const QString &modelName);
+ void modelNameChanged();
+ void recalcChanged();
+ void loadDefaultModelRequested();
+ void loadModelRequested(const QString &modelName);
+ void unloadModelRequested();
+ void reloadModelRequested(const QString &modelName);
+ void generateNameRequested();
+ void modelListChanged();
+ void modelLoadingError(const QString &error);
+
+private Q_SLOTS:
+ void handleResponseChanged();
+ void responseStarted();
+ void responseStopped();
+ void generatedNameChanged();
+ void handleRecalculating();
+ void handleModelNameChanged();
+
+private:
+ QString m_id;
+ QString m_name;
+ QString m_userName;
+ QString m_savedModelName;
+ ChatModel *m_chatModel;
+ bool m_responseInProgress;
+ qint64 m_creationDate;
+ ChatLLM *m_llmodel;
+};
+
+#endif // CHAT_H
diff --git a/gpt4all-chat/chatlistmodel.cpp b/gpt4all-chat/chatlistmodel.cpp
new file mode 100644
index 00000000..3fd2246f
--- /dev/null
+++ b/gpt4all-chat/chatlistmodel.cpp
@@ -0,0 +1,246 @@
+#include "chatlistmodel.h"
+#include "download.h"
+
+#include
+#include
+
+#define CHAT_FORMAT_MAGIC 0xF5D553CC
+#define CHAT_FORMAT_VERSION 2
+
+ChatListModel::ChatListModel(QObject *parent)
+ : QAbstractListModel(parent)
+ , m_newChat(nullptr)
+ , m_dummyChat(nullptr)
+ , m_currentChat(nullptr)
+ , m_shouldSaveChats(false)
+{
+ addDummyChat();
+
+ ChatsRestoreThread *thread = new ChatsRestoreThread;
+ connect(thread, &ChatsRestoreThread::chatRestored, this, &ChatListModel::restoreChat);
+ connect(thread, &ChatsRestoreThread::finished, this, &ChatListModel::chatsRestoredFinished);
+ connect(thread, &ChatsRestoreThread::finished, thread, &QObject::deleteLater);
+ thread->start();
+}
+
+bool ChatListModel::shouldSaveChats() const
+{
+ return m_shouldSaveChats;
+}
+
+void ChatListModel::setShouldSaveChats(bool b)
+{
+ if (m_shouldSaveChats == b)
+ return;
+ m_shouldSaveChats = b;
+ emit shouldSaveChatsChanged();
+}
+
+void ChatListModel::removeChatFile(Chat *chat) const
+{
+ const QString savePath = Download::globalInstance()->downloadLocalModelsPath();
+ QFile file(savePath + "/gpt4all-" + chat->id() + ".chat");
+ if (!file.exists())
+ return;
+ bool success = file.remove();
+ if (!success)
+ qWarning() << "ERROR: Couldn't remove chat file:" << file.fileName();
+}
+
+void ChatListModel::saveChats() const
+{
+ if (!m_shouldSaveChats)
+ return;
+
+ QElapsedTimer timer;
+ timer.start();
+ const QString savePath = Download::globalInstance()->downloadLocalModelsPath();
+ for (Chat *chat : m_chats) {
+ QString fileName = "gpt4all-" + chat->id() + ".chat";
+ QFile file(savePath + "/" + fileName);
+ bool success = file.open(QIODevice::WriteOnly);
+ if (!success) {
+ qWarning() << "ERROR: Couldn't save chat to file:" << file.fileName();
+ continue;
+ }
+ QDataStream out(&file);
+
+ out << (quint32)CHAT_FORMAT_MAGIC;
+ out << (qint32)CHAT_FORMAT_VERSION;
+ out.setVersion(QDataStream::Qt_6_2);
+
+ qDebug() << "serializing chat" << fileName;
+ if (!chat->serialize(out, CHAT_FORMAT_VERSION)) {
+ qWarning() << "ERROR: Couldn't serialize chat to file:" << file.fileName();
+ file.remove();
+ }
+ file.close();
+ }
+ qint64 elapsedTime = timer.elapsed();
+ qDebug() << "serializing chats took:" << elapsedTime << "ms";
+}
+
+void ChatsRestoreThread::run()
+{
+ QElapsedTimer timer;
+ timer.start();
+ struct FileInfo {
+ bool oldFile;
+ qint64 creationDate;
+ QString file;
+ };
+ QList files;
+ {
+ // Look for any files in the original spot which was the settings config directory
+ QSettings settings;
+ QFileInfo settingsInfo(settings.fileName());
+ QString settingsPath = settingsInfo.absolutePath();
+ QDir dir(settingsPath);
+ dir.setNameFilters(QStringList() << "gpt4all-*.chat");
+ QStringList fileNames = dir.entryList();
+ for (QString f : fileNames) {
+ QString filePath = settingsPath + "/" + f;
+ QFile file(filePath);
+ bool success = file.open(QIODevice::ReadOnly);
+ if (!success) {
+ qWarning() << "ERROR: Couldn't restore chat from file:" << file.fileName();
+ continue;
+ }
+ QDataStream in(&file);
+ FileInfo info;
+ info.oldFile = true;
+ info.file = filePath;
+ in >> info.creationDate;
+ files.append(info);
+ file.close();
+ }
+ }
+ {
+ const QString savePath = Download::globalInstance()->downloadLocalModelsPath();
+ QDir dir(savePath);
+ dir.setNameFilters(QStringList() << "gpt4all-*.chat");
+ QStringList fileNames = dir.entryList();
+ for (QString f : fileNames) {
+ QString filePath = savePath + "/" + f;
+ QFile file(filePath);
+ bool success = file.open(QIODevice::ReadOnly);
+ if (!success) {
+ qWarning() << "ERROR: Couldn't restore chat from file:" << file.fileName();
+ continue;
+ }
+ QDataStream in(&file);
+ // Read and check the header
+ quint32 magic;
+ in >> magic;
+ if (magic != CHAT_FORMAT_MAGIC) {
+ qWarning() << "ERROR: Chat file has bad magic:" << file.fileName();
+ continue;
+ }
+
+ // Read the version
+ qint32 version;
+ in >> version;
+ if (version < 1) {
+ qWarning() << "ERROR: Chat file has non supported version:" << file.fileName();
+ continue;
+ }
+
+ if (version <= 1)
+ in.setVersion(QDataStream::Qt_6_2);
+
+ FileInfo info;
+ info.oldFile = false;
+ info.file = filePath;
+ in >> info.creationDate;
+ files.append(info);
+ file.close();
+ }
+ }
+ std::sort(files.begin(), files.end(), [](const FileInfo &a, const FileInfo &b) {
+ return a.creationDate > b.creationDate;
+ });
+
+ for (FileInfo &f : files) {
+ QFile file(f.file);
+ bool success = file.open(QIODevice::ReadOnly);
+ if (!success) {
+ qWarning() << "ERROR: Couldn't restore chat from file:" << file.fileName();
+ continue;
+ }
+ QDataStream in(&file);
+
+ qint32 version = 0;
+ if (!f.oldFile) {
+ // Read and check the header
+ quint32 magic;
+ in >> magic;
+ if (magic != CHAT_FORMAT_MAGIC) {
+ qWarning() << "ERROR: Chat file has bad magic:" << file.fileName();
+ continue;
+ }
+
+ // Read the version
+ in >> version;
+ if (version < 1) {
+ qWarning() << "ERROR: Chat file has non supported version:" << file.fileName();
+ continue;
+ }
+
+ if (version <= 1)
+ in.setVersion(QDataStream::Qt_6_2);
+ }
+
+ qDebug() << "deserializing chat" << f.file;
+
+ Chat *chat = new Chat;
+ chat->moveToThread(qApp->thread());
+ if (!chat->deserialize(in, version)) {
+ qWarning() << "ERROR: Couldn't deserialize chat from file:" << file.fileName();
+ file.remove();
+ } else {
+ emit chatRestored(chat);
+ }
+ if (f.oldFile)
+ file.remove(); // No longer storing in this directory
+ file.close();
+ }
+
+ qint64 elapsedTime = timer.elapsed();
+ qDebug() << "deserializing chats took:" << elapsedTime << "ms";
+}
+
+void ChatListModel::restoreChat(Chat *chat)
+{
+ chat->setParent(this);
+ connect(chat, &Chat::nameChanged, this, &ChatListModel::nameChanged);
+ connect(chat, &Chat::modelLoadingError, this, &ChatListModel::handleModelLoadingError);
+
+ if (m_dummyChat) {
+ beginResetModel();
+ m_chats = QList({chat});
+ setCurrentChat(chat);
+ delete m_dummyChat;
+ m_dummyChat = nullptr;
+ endResetModel();
+ } else {
+ beginInsertRows(QModelIndex(), m_chats.size(), m_chats.size());
+ m_chats.append(chat);
+ endInsertRows();
+ }
+}
+
+void ChatListModel::chatsRestoredFinished()
+{
+ if (m_dummyChat) {
+ beginResetModel();
+ Chat *dummy = m_dummyChat;
+ m_dummyChat = nullptr;
+ m_chats.clear();
+ addChat();
+ delete dummy;
+ endResetModel();
+ }
+
+ if (m_chats.isEmpty())
+ addChat();
+}
diff --git a/gpt4all-chat/chatlistmodel.h b/gpt4all-chat/chatlistmodel.h
new file mode 100644
index 00000000..c695e05d
--- /dev/null
+++ b/gpt4all-chat/chatlistmodel.h
@@ -0,0 +1,233 @@
+#ifndef CHATLISTMODEL_H
+#define CHATLISTMODEL_H
+
+#include
+#include "chat.h"
+
+class ChatsRestoreThread : public QThread
+{
+ Q_OBJECT
+public:
+ void run() override;
+
+Q_SIGNALS:
+ void chatRestored(Chat *chat);
+};
+
+class ChatListModel : public QAbstractListModel
+{
+ Q_OBJECT
+ Q_PROPERTY(int count READ count NOTIFY countChanged)
+ Q_PROPERTY(Chat *currentChat READ currentChat WRITE setCurrentChat NOTIFY currentChatChanged)
+ Q_PROPERTY(bool shouldSaveChats READ shouldSaveChats WRITE setShouldSaveChats NOTIFY shouldSaveChatsChanged)
+
+public:
+ explicit ChatListModel(QObject *parent = nullptr);
+
+ enum Roles {
+ IdRole = Qt::UserRole + 1,
+ NameRole
+ };
+
+ int rowCount(const QModelIndex &parent = QModelIndex()) const override
+ {
+ Q_UNUSED(parent)
+ return m_chats.size();
+ }
+
+ QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override
+ {
+ if (!index.isValid() || index.row() < 0 || index.row() >= m_chats.size())
+ return QVariant();
+
+ const Chat *item = m_chats.at(index.row());
+ switch (role) {
+ case IdRole:
+ return item->id();
+ case NameRole:
+ return item->name();
+ }
+
+ return QVariant();
+ }
+
+ QHash roleNames() const override
+ {
+ QHash roles;
+ roles[IdRole] = "id";
+ roles[NameRole] = "name";
+ return roles;
+ }
+
+ bool shouldSaveChats() const;
+ void setShouldSaveChats(bool b);
+
+ Q_INVOKABLE void addChat()
+ {
+ // Don't add a new chat if we already have one
+ if (m_newChat || m_dummyChat)
+ return;
+
+ // Create a new chat pointer and connect it to determine when it is populated
+ m_newChat = new Chat(this);
+ connect(m_newChat->chatModel(), &ChatModel::countChanged,
+ this, &ChatListModel::newChatCountChanged);
+ connect(m_newChat, &Chat::nameChanged,
+ this, &ChatListModel::nameChanged);
+
+ beginInsertRows(QModelIndex(), 0, 0);
+ m_chats.prepend(m_newChat);
+ endInsertRows();
+ emit countChanged();
+ setCurrentChat(m_newChat);
+ }
+
+ Q_INVOKABLE void addDummyChat()
+ {
+ // Create a new dummy chat pointer and don't connect it
+ m_dummyChat = new Chat(this);
+ beginInsertRows(QModelIndex(), 0, 0);
+ m_chats.prepend(m_dummyChat);
+ endInsertRows();
+ emit countChanged();
+ m_currentChat = m_dummyChat;
+ emit currentChatChanged();
+ }
+
+ void setNewChat(Chat* chat)
+ {
+ // Don't add a new chat if we already have one
+ if (m_newChat)
+ return;
+
+ m_newChat = chat;
+ connect(m_newChat->chatModel(), &ChatModel::countChanged,
+ this, &ChatListModel::newChatCountChanged);
+ connect(m_newChat, &Chat::nameChanged,
+ this, &ChatListModel::nameChanged);
+ connect(m_newChat, &Chat::modelLoadingError,
+ this, &ChatListModel::handleModelLoadingError);
+ setCurrentChat(m_newChat);
+ }
+
+ Q_INVOKABLE void removeChat(Chat* chat)
+ {
+ if (!m_chats.contains(chat)) {
+ qWarning() << "WARNING: Removing chat failed with id" << chat->id();
+ return;
+ }
+
+ removeChatFile(chat);
+
+ if (chat == m_newChat) {
+ m_newChat->disconnect(this);
+ m_newChat = nullptr;
+ }
+
+ const int index = m_chats.indexOf(chat);
+ if (m_chats.count() < 2) {
+ addChat();
+ } else {
+ int nextIndex;
+ if (index == m_chats.count() - 1)
+ nextIndex = index - 1;
+ else
+ nextIndex = index + 1;
+ Chat *nextChat = get(nextIndex);
+ Q_ASSERT(nextChat);
+ setCurrentChat(nextChat);
+ }
+
+ const int newIndex = m_chats.indexOf(chat);
+ beginRemoveRows(QModelIndex(), newIndex, newIndex);
+ m_chats.removeAll(chat);
+ endRemoveRows();
+ delete chat;
+ }
+
+ Chat *currentChat() const
+ {
+ return m_currentChat;
+ }
+
+ void setCurrentChat(Chat *chat)
+ {
+ if (!m_chats.contains(chat)) {
+ qWarning() << "ERROR: Setting current chat failed with id" << chat->id();
+ return;
+ }
+
+ if (m_currentChat && m_currentChat->isModelLoaded())
+ m_currentChat->unloadModel();
+
+ m_currentChat = chat;
+ if (!m_currentChat->isModelLoaded())
+ m_currentChat->reloadModel();
+ emit currentChatChanged();
+ }
+
+ Q_INVOKABLE Chat* get(int index)
+ {
+ if (index < 0 || index >= m_chats.size()) return nullptr;
+ return m_chats.at(index);
+ }
+
+ int count() const { return m_chats.size(); }
+
+ void removeChatFile(Chat *chat) const;
+ void saveChats() const;
+ void restoreChat(Chat *chat);
+ void chatsRestoredFinished();
+
+Q_SIGNALS:
+ void countChanged();
+ void currentChatChanged();
+ void shouldSaveChatsChanged();
+
+private Q_SLOTS:
+ void newChatCountChanged()
+ {
+ Q_ASSERT(m_newChat && m_newChat->chatModel()->count());
+ m_newChat->chatModel()->disconnect(this);
+ m_newChat = nullptr;
+ }
+
+ void nameChanged()
+ {
+ Chat *chat = qobject_cast(sender());
+ if (!chat)
+ return;
+
+ int row = m_chats.indexOf(chat);
+ if (row < 0 || row >= m_chats.size())
+ return;
+
+ QModelIndex index = createIndex(row, 0);
+ emit dataChanged(index, index, {NameRole});
+ }
+
+ void handleModelLoadingError(const QString &error)
+ {
+ Chat *chat = qobject_cast(sender());
+ qWarning() << "ERROR:" << qPrintable(error) << "id" << chat->id();
+ removeChat(chat);
+ }
+
+ void printChats()
+ {
+ for (auto c : m_chats) {
+ qDebug() << c->name()
+ << (c == m_currentChat ? "currentChat: true" : "currentChat: false")
+ << (c == m_newChat ? "newChat: true" : "newChat: false");
+ }
+ }
+
+private:
+ bool m_shouldSaveChats;
+ Chat* m_newChat;
+ Chat* m_dummyChat;
+ Chat* m_currentChat;
+ QList m_chats;
+};
+
+#endif // CHATITEMMODEL_H
diff --git a/gpt4all-chat/chatllm.cpp b/gpt4all-chat/chatllm.cpp
new file mode 100644
index 00000000..2ffbc3c7
--- /dev/null
+++ b/gpt4all-chat/chatllm.cpp
@@ -0,0 +1,483 @@
+#include "chatllm.h"
+#include "chat.h"
+#include "download.h"
+#include "network.h"
+#include "llmodel/gptj.h"
+#include "llmodel/llamamodel.h"
+#include "llmodel/mpt.h"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+//#define DEBUG
+
+#define MPT_INTERNAL_STATE_VERSION 0
+#define GPTJ_INTERNAL_STATE_VERSION 0
+#define LLAMA_INTERNAL_STATE_VERSION 0
+
+static QString modelFilePath(const QString &modelName)
+{
+ QString appPath = QCoreApplication::applicationDirPath()
+ + "/ggml-" + modelName + ".bin";
+ QFileInfo infoAppPath(appPath);
+ if (infoAppPath.exists())
+ return appPath;
+
+ QString downloadPath = Download::globalInstance()->downloadLocalModelsPath()
+ + "/ggml-" + modelName + ".bin";
+
+ QFileInfo infoLocalPath(downloadPath);
+ if (infoLocalPath.exists())
+ return downloadPath;
+ return QString();
+}
+
+ChatLLM::ChatLLM(Chat *parent)
+ : QObject{nullptr}
+ , m_llmodel(nullptr)
+ , m_promptResponseTokens(0)
+ , m_responseLogits(0)
+ , m_isRecalc(false)
+ , m_chat(parent)
+{
+ moveToThread(&m_llmThread);
+ connect(this, &ChatLLM::sendStartup, Network::globalInstance(), &Network::sendStartup);
+ connect(this, &ChatLLM::sendModelLoaded, Network::globalInstance(), &Network::sendModelLoaded);
+ connect(m_chat, &Chat::idChanged, this, &ChatLLM::handleChatIdChanged);
+ m_llmThread.setObjectName(m_chat->id());
+ m_llmThread.start();
+}
+
+bool ChatLLM::loadDefaultModel()
+{
+ const QList models = m_chat->modelList();
+ if (models.isEmpty()) {
+ // try again when we get a list of models
+ connect(Download::globalInstance(), &Download::modelListChanged, this,
+ &ChatLLM::loadDefaultModel, Qt::SingleShotConnection);
+ return false;
+ }
+
+ QSettings settings;
+ settings.sync();
+ // The user default model can be set by the user in the settings dialog. The "default" user
+ // default model is "Application default" which signals we should use the default model that was
+ // specified by the models.json file.
+ QString defaultModel = settings.value("userDefaultModel").toString();
+ if (defaultModel.isEmpty() || !models.contains(defaultModel) || defaultModel == "Application default")
+ defaultModel = settings.value("defaultModel").toString();
+ if (defaultModel.isEmpty() || !models.contains(defaultModel))
+ defaultModel = models.first();
+ return loadModel(defaultModel);
+}
+
+bool ChatLLM::loadModel(const QString &modelName)
+{
+ if (isModelLoaded() && m_modelName == modelName)
+ return true;
+
+ if (isModelLoaded()) {
+ resetContextPrivate();
+ delete m_llmodel;
+ m_llmodel = nullptr;
+ emit isModelLoadedChanged();
+ }
+
+ bool isGPTJ = false;
+ bool isMPT = false;
+ QString filePath = modelFilePath(modelName);
+ QFileInfo info(filePath);
+ if (info.exists()) {
+
+ auto fin = std::ifstream(filePath.toStdString(), std::ios::binary);
+ uint32_t magic;
+ fin.read((char *) &magic, sizeof(magic));
+ fin.seekg(0);
+ fin.close();
+ isGPTJ = magic == 0x67676d6c;
+ isMPT = magic == 0x67676d6d;
+ if (isGPTJ) {
+ m_modelType = ModelType::GPTJ_;
+ m_llmodel = new GPTJ;
+ m_llmodel->loadModel(filePath.toStdString());
+ } else if (isMPT) {
+ m_modelType = ModelType::MPT_;
+ m_llmodel = new MPT;
+ m_llmodel->loadModel(filePath.toStdString());
+ } else {
+ m_modelType = ModelType::LLAMA_;
+ m_llmodel = new LLamaModel;
+ m_llmodel->loadModel(filePath.toStdString());
+ }
+
+ restoreState();
+
+#if defined(DEBUG)
+ qDebug() << "chatllm modelLoadedChanged" << m_chat->id();
+ fflush(stdout);
+#endif
+
+ emit isModelLoadedChanged();
+
+ static bool isFirstLoad = true;
+ if (isFirstLoad) {
+ emit sendStartup();
+ isFirstLoad = false;
+ } else
+ emit sendModelLoaded();
+ } else {
+ const QString error = QString("Could not find model %1").arg(modelName);
+ emit modelLoadingError(error);
+ }
+
+ if (m_llmodel)
+ setModelName(info.completeBaseName().remove(0, 5)); // remove the ggml- prefix
+
+ return m_llmodel;
+}
+
+bool ChatLLM::isModelLoaded() const
+{
+ return m_llmodel && m_llmodel->isModelLoaded();
+}
+
+void ChatLLM::regenerateResponse()
+{
+ m_ctx.n_past -= m_promptResponseTokens;
+ m_ctx.n_past = std::max(0, m_ctx.n_past);
+ // FIXME: This does not seem to be needed in my testing and llama models don't to it. Remove?
+ m_ctx.logits.erase(m_ctx.logits.end() -= m_responseLogits, m_ctx.logits.end());
+ m_ctx.tokens.erase(m_ctx.tokens.end() -= m_promptResponseTokens, m_ctx.tokens.end());
+ m_promptResponseTokens = 0;
+ m_responseLogits = 0;
+ m_response = std::string();
+ emit responseChanged();
+}
+
+void ChatLLM::resetResponse()
+{
+ m_promptResponseTokens = 0;
+ m_responseLogits = 0;
+ m_response = std::string();
+ emit responseChanged();
+}
+
+void ChatLLM::resetContext()
+{
+ resetContextPrivate();
+ emit sendResetContext();
+}
+
+void ChatLLM::resetContextPrivate()
+{
+ regenerateResponse();
+ m_ctx = LLModel::PromptContext();
+}
+
+std::string remove_leading_whitespace(const std::string& input) {
+ auto first_non_whitespace = std::find_if(input.begin(), input.end(), [](unsigned char c) {
+ return !std::isspace(c);
+ });
+
+ return std::string(first_non_whitespace, input.end());
+}
+
+std::string trim_whitespace(const std::string& input) {
+ auto first_non_whitespace = std::find_if(input.begin(), input.end(), [](unsigned char c) {
+ return !std::isspace(c);
+ });
+
+ auto last_non_whitespace = std::find_if(input.rbegin(), input.rend(), [](unsigned char c) {
+ return !std::isspace(c);
+ }).base();
+
+ return std::string(first_non_whitespace, last_non_whitespace);
+}
+
+QString ChatLLM::response() const
+{
+ return QString::fromStdString(remove_leading_whitespace(m_response));
+}
+
+QString ChatLLM::modelName() const
+{
+ return m_modelName;
+}
+
+void ChatLLM::setModelName(const QString &modelName)
+{
+ m_modelName = modelName;
+ emit modelNameChanged();
+}
+
+void ChatLLM::modelNameChangeRequested(const QString &modelName)
+{
+ if (!loadModel(modelName))
+ qWarning() << "ERROR: Could not load model" << modelName;
+}
+
+bool ChatLLM::handlePrompt(int32_t token)
+{
+ // m_promptResponseTokens and m_responseLogits are related to last prompt/response not
+ // the entire context window which we can reset on regenerate prompt
+#if defined(DEBUG)
+ qDebug() << "chatllm prompt process" << m_chat->id() << token;
+#endif
+ ++m_promptResponseTokens;
+ return !m_stopGenerating;
+}
+
+bool ChatLLM::handleResponse(int32_t token, const std::string &response)
+{
+#if defined(DEBUG)
+ printf("%s", response.c_str());
+ fflush(stdout);
+#endif
+
+ // check for error
+ if (token < 0) {
+ m_response.append(response);
+ emit responseChanged();
+ return false;
+ }
+
+ // m_promptResponseTokens and m_responseLogits are related to last prompt/response not
+ // the entire context window which we can reset on regenerate prompt
+ ++m_promptResponseTokens;
+ Q_ASSERT(!response.empty());
+ m_response.append(response);
+ emit responseChanged();
+ return !m_stopGenerating;
+}
+
+bool ChatLLM::handleRecalculate(bool isRecalc)
+{
+ if (m_isRecalc != isRecalc) {
+ m_isRecalc = isRecalc;
+ emit recalcChanged();
+ }
+ return !m_stopGenerating;
+}
+
+bool ChatLLM::prompt(const QString &prompt, const QString &prompt_template, int32_t n_predict, int32_t top_k,
+ float top_p, float temp, int32_t n_batch, float repeat_penalty, int32_t repeat_penalty_tokens, int n_threads)
+{
+ if (!isModelLoaded())
+ return false;
+
+ QString instructPrompt = prompt_template.arg(prompt);
+
+ m_stopGenerating = false;
+ auto promptFunc = std::bind(&ChatLLM::handlePrompt, this, std::placeholders::_1);
+ auto responseFunc = std::bind(&ChatLLM::handleResponse, this, std::placeholders::_1,
+ std::placeholders::_2);
+ auto recalcFunc = std::bind(&ChatLLM::handleRecalculate, this, std::placeholders::_1);
+ emit responseStarted();
+ qint32 logitsBefore = m_ctx.logits.size();
+ m_ctx.n_predict = n_predict;
+ m_ctx.top_k = top_k;
+ m_ctx.top_p = top_p;
+ m_ctx.temp = temp;
+ m_ctx.n_batch = n_batch;
+ m_ctx.repeat_penalty = repeat_penalty;
+ m_ctx.repeat_last_n = repeat_penalty_tokens;
+ m_llmodel->setThreadCount(n_threads);
+#if defined(DEBUG)
+ printf("%s", qPrintable(instructPrompt));
+ fflush(stdout);
+#endif
+ m_llmodel->prompt(instructPrompt.toStdString(), promptFunc, responseFunc, recalcFunc, m_ctx);
+#if defined(DEBUG)
+ printf("\n");
+ fflush(stdout);
+#endif
+ m_responseLogits += m_ctx.logits.size() - logitsBefore;
+ std::string trimmed = trim_whitespace(m_response);
+ if (trimmed != m_response) {
+ m_response = trimmed;
+ emit responseChanged();
+ }
+ emit responseStopped();
+ return true;
+}
+
+void ChatLLM::unloadModel()
+{
+#if defined(DEBUG)
+ qDebug() << "chatllm unloadModel" << m_chat->id();
+#endif
+ saveState();
+ delete m_llmodel;
+ m_llmodel = nullptr;
+ emit isModelLoadedChanged();
+}
+
+void ChatLLM::reloadModel(const QString &modelName)
+{
+#if defined(DEBUG)
+ qDebug() << "chatllm reloadModel" << m_chat->id();
+#endif
+ if (modelName.isEmpty()) {
+ loadDefaultModel();
+ } else {
+ loadModel(modelName);
+ }
+}
+
+void ChatLLM::generateName()
+{
+ Q_ASSERT(isModelLoaded());
+ if (!isModelLoaded())
+ return;
+
+ QString instructPrompt("### Instruction:\n"
+ "Describe response above in three words.\n"
+ "### Response:\n");
+ auto promptFunc = std::bind(&ChatLLM::handleNamePrompt, this, std::placeholders::_1);
+ auto responseFunc = std::bind(&ChatLLM::handleNameResponse, this, std::placeholders::_1,
+ std::placeholders::_2);
+ auto recalcFunc = std::bind(&ChatLLM::handleNameRecalculate, this, std::placeholders::_1);
+ LLModel::PromptContext ctx = m_ctx;
+#if defined(DEBUG)
+ printf("%s", qPrintable(instructPrompt));
+ fflush(stdout);
+#endif
+ m_llmodel->prompt(instructPrompt.toStdString(), promptFunc, responseFunc, recalcFunc, ctx);
+#if defined(DEBUG)
+ printf("\n");
+ fflush(stdout);
+#endif
+ std::string trimmed = trim_whitespace(m_nameResponse);
+ if (trimmed != m_nameResponse) {
+ m_nameResponse = trimmed;
+ emit generatedNameChanged();
+ }
+}
+
+void ChatLLM::handleChatIdChanged()
+{
+ m_llmThread.setObjectName(m_chat->id());
+}
+
+bool ChatLLM::handleNamePrompt(int32_t token)
+{
+ Q_UNUSED(token);
+ qt_noop();
+ return true;
+}
+
+bool ChatLLM::handleNameResponse(int32_t token, const std::string &response)
+{
+ Q_UNUSED(token);
+
+ m_nameResponse.append(response);
+ emit generatedNameChanged();
+ QString gen = QString::fromStdString(m_nameResponse).simplified();
+ QStringList words = gen.split(' ', Qt::SkipEmptyParts);
+ int wordCount = words.size();
+ return words.size() <= 3;
+}
+
+bool ChatLLM::handleNameRecalculate(bool isRecalc)
+{
+ Q_UNUSED(isRecalc);
+ Q_UNREACHABLE();
+ return true;
+}
+
+bool ChatLLM::serialize(QDataStream &stream, int version)
+{
+ if (version > 1) {
+ stream << m_modelType;
+ switch (m_modelType) {
+ case MPT_: stream << MPT_INTERNAL_STATE_VERSION; break;
+ case GPTJ_: stream << GPTJ_INTERNAL_STATE_VERSION; break;
+ case LLAMA_: stream << LLAMA_INTERNAL_STATE_VERSION; break;
+ default: Q_UNREACHABLE();
+ }
+ }
+ stream << response();
+ stream << generatedName();
+ stream << m_promptResponseTokens;
+ stream << m_responseLogits;
+ stream << m_ctx.n_past;
+ stream << quint64(m_ctx.logits.size());
+ stream.writeRawData(reinterpret_cast(m_ctx.logits.data()), m_ctx.logits.size() * sizeof(float));
+ stream << quint64(m_ctx.tokens.size());
+ stream.writeRawData(reinterpret_cast(m_ctx.tokens.data()), m_ctx.tokens.size() * sizeof(int));
+ saveState();
+ QByteArray compressed = qCompress(m_state);
+ stream << compressed;
+#if defined(DEBUG)
+ qDebug() << "chatllm serialize" << m_chat->id() << m_state.size();
+#endif
+ return stream.status() == QDataStream::Ok;
+}
+
+bool ChatLLM::deserialize(QDataStream &stream, int version)
+{
+ if (version > 1) {
+ int internalStateVersion;
+ stream >> m_modelType;
+ stream >> internalStateVersion; // for future use
+ }
+ QString response;
+ stream >> response;
+ m_response = response.toStdString();
+ QString nameResponse;
+ stream >> nameResponse;
+ m_nameResponse = nameResponse.toStdString();
+ stream >> m_promptResponseTokens;
+ stream >> m_responseLogits;
+ stream >> m_ctx.n_past;
+ quint64 logitsSize;
+ stream >> logitsSize;
+ m_ctx.logits.resize(logitsSize);
+ stream.readRawData(reinterpret_cast(m_ctx.logits.data()), logitsSize * sizeof(float));
+ quint64 tokensSize;
+ stream >> tokensSize;
+ m_ctx.tokens.resize(tokensSize);
+ stream.readRawData(reinterpret_cast(m_ctx.tokens.data()), tokensSize * sizeof(int));
+ if (version > 0) {
+ QByteArray compressed;
+ stream >> compressed;
+ m_state = qUncompress(compressed);
+ } else {
+ stream >> m_state;
+ }
+#if defined(DEBUG)
+ qDebug() << "chatllm deserialize" << m_chat->id();
+#endif
+ return stream.status() == QDataStream::Ok;
+}
+
+void ChatLLM::saveState()
+{
+ if (!isModelLoaded())
+ return;
+
+ const size_t stateSize = m_llmodel->stateSize();
+ m_state.resize(stateSize);
+#if defined(DEBUG)
+ qDebug() << "chatllm saveState" << m_chat->id() << "size:" << m_state.size();
+#endif
+ m_llmodel->saveState(static_cast(reinterpret_cast(m_state.data())));
+}
+
+void ChatLLM::restoreState()
+{
+ if (!isModelLoaded() || m_state.isEmpty())
+ return;
+
+#if defined(DEBUG)
+ qDebug() << "chatllm restoreState" << m_chat->id() << "size:" << m_state.size();
+#endif
+ m_llmodel->restoreState(static_cast(reinterpret_cast(m_state.data())));
+ m_state.clear();
+ m_state.resize(0);
+}
diff --git a/gpt4all-chat/chatllm.h b/gpt4all-chat/chatllm.h
new file mode 100644
index 00000000..bb488b16
--- /dev/null
+++ b/gpt4all-chat/chatllm.h
@@ -0,0 +1,100 @@
+#ifndef CHATLLM_H
+#define CHATLLM_H
+
+#include
+#include
+
+#include "llmodel/llmodel.h"
+
+class Chat;
+class ChatLLM : public QObject
+{
+ Q_OBJECT
+ Q_PROPERTY(bool isModelLoaded READ isModelLoaded NOTIFY isModelLoadedChanged)
+ Q_PROPERTY(QString response READ response NOTIFY responseChanged)
+ Q_PROPERTY(QString modelName READ modelName WRITE setModelName NOTIFY modelNameChanged)
+ Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged)
+ Q_PROPERTY(QString generatedName READ generatedName NOTIFY generatedNameChanged)
+
+public:
+ enum ModelType {
+ MPT_,
+ GPTJ_,
+ LLAMA_
+ };
+
+ ChatLLM(Chat *parent);
+
+ bool isModelLoaded() const;
+ void regenerateResponse();
+ void resetResponse();
+ void resetContext();
+
+ void stopGenerating() { m_stopGenerating = true; }
+
+ QString response() const;
+ QString modelName() const;
+
+ void setModelName(const QString &modelName);
+
+ bool isRecalc() const { return m_isRecalc; }
+
+ QString generatedName() const { return QString::fromStdString(m_nameResponse); }
+
+ bool serialize(QDataStream &stream, int version);
+ bool deserialize(QDataStream &stream, int version);
+
+public Q_SLOTS:
+ bool prompt(const QString &prompt, const QString &prompt_template, int32_t n_predict,
+ int32_t top_k, float top_p, float temp, int32_t n_batch, float repeat_penalty, int32_t repeat_penalty_tokens,
+ int32_t n_threads);
+ bool loadDefaultModel();
+ bool loadModel(const QString &modelName);
+ void modelNameChangeRequested(const QString &modelName);
+ void unloadModel();
+ void reloadModel(const QString &modelName);
+ void generateName();
+ void handleChatIdChanged();
+
+Q_SIGNALS:
+ void isModelLoadedChanged();
+ void modelLoadingError(const QString &error);
+ void responseChanged();
+ void responseStarted();
+ void responseStopped();
+ void modelNameChanged();
+ void recalcChanged();
+ void sendStartup();
+ void sendModelLoaded();
+ void sendResetContext();
+ void generatedNameChanged();
+ void stateChanged();
+
+private:
+ void resetContextPrivate();
+ bool handlePrompt(int32_t token);
+ bool handleResponse(int32_t token, const std::string &response);
+ bool handleRecalculate(bool isRecalc);
+ bool handleNamePrompt(int32_t token);
+ bool handleNameResponse(int32_t token, const std::string &response);
+ bool handleNameRecalculate(bool isRecalc);
+ void saveState();
+ void restoreState();
+
+private:
+ LLModel::PromptContext m_ctx;
+ LLModel *m_llmodel;
+ std::string m_response;
+ std::string m_nameResponse;
+ quint32 m_promptResponseTokens;
+ quint32 m_responseLogits;
+ QString m_modelName;
+ ModelType m_modelType;
+ Chat *m_chat;
+ QByteArray m_state;
+ QThread m_llmThread;
+ std::atomic m_stopGenerating;
+ bool m_isRecalc;
+};
+
+#endif // CHATLLM_H
diff --git a/gpt4all-chat/chatmodel.h b/gpt4all-chat/chatmodel.h
new file mode 100644
index 00000000..e3c01a9a
--- /dev/null
+++ b/gpt4all-chat/chatmodel.h
@@ -0,0 +1,261 @@
+#ifndef CHATMODEL_H
+#define CHATMODEL_H
+
+#include
+#include
+#include
+
+struct ChatItem
+{
+ Q_GADGET
+ Q_PROPERTY(int id MEMBER id)
+ Q_PROPERTY(QString name MEMBER name)
+ Q_PROPERTY(QString value MEMBER value)
+ Q_PROPERTY(QString prompt MEMBER prompt)
+ Q_PROPERTY(QString newResponse MEMBER newResponse)
+ Q_PROPERTY(bool currentResponse MEMBER currentResponse)
+ Q_PROPERTY(bool stopped MEMBER stopped)
+ Q_PROPERTY(bool thumbsUpState MEMBER thumbsUpState)
+ Q_PROPERTY(bool thumbsDownState MEMBER thumbsDownState)
+
+public:
+ int id = 0;
+ QString name;
+ QString value;
+ QString prompt;
+ QString newResponse;
+ bool currentResponse = false;
+ bool stopped = false;
+ bool thumbsUpState = false;
+ bool thumbsDownState = false;
+};
+Q_DECLARE_METATYPE(ChatItem)
+
+class ChatModel : public QAbstractListModel
+{
+ Q_OBJECT
+ Q_PROPERTY(int count READ count NOTIFY countChanged)
+
+public:
+ explicit ChatModel(QObject *parent = nullptr) : QAbstractListModel(parent) {}
+
+ enum Roles {
+ IdRole = Qt::UserRole + 1,
+ NameRole,
+ ValueRole,
+ PromptRole,
+ NewResponseRole,
+ CurrentResponseRole,
+ StoppedRole,
+ ThumbsUpStateRole,
+ ThumbsDownStateRole
+ };
+
+ int rowCount(const QModelIndex &parent = QModelIndex()) const override
+ {
+ Q_UNUSED(parent)
+ return m_chatItems.size();
+ }
+
+ QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override
+ {
+ if (!index.isValid() || index.row() < 0 || index.row() >= m_chatItems.size())
+ return QVariant();
+
+ const ChatItem &item = m_chatItems.at(index.row());
+ switch (role) {
+ case IdRole:
+ return item.id;
+ case NameRole:
+ return item.name;
+ case ValueRole:
+ return item.value;
+ case PromptRole:
+ return item.prompt;
+ case NewResponseRole:
+ return item.newResponse;
+ case CurrentResponseRole:
+ return item.currentResponse;
+ case StoppedRole:
+ return item.stopped;
+ case ThumbsUpStateRole:
+ return item.thumbsUpState;
+ case ThumbsDownStateRole:
+ return item.thumbsDownState;
+ }
+
+ return QVariant();
+ }
+
+ QHash roleNames() const override
+ {
+ QHash roles;
+ roles[IdRole] = "id";
+ roles[NameRole] = "name";
+ roles[ValueRole] = "value";
+ roles[PromptRole] = "prompt";
+ roles[NewResponseRole] = "newResponse";
+ roles[CurrentResponseRole] = "currentResponse";
+ roles[StoppedRole] = "stopped";
+ roles[ThumbsUpStateRole] = "thumbsUpState";
+ roles[ThumbsDownStateRole] = "thumbsDownState";
+ return roles;
+ }
+
+ void appendPrompt(const QString &name, const QString &value)
+ {
+ ChatItem item;
+ item.name = name;
+ item.value = value;
+ beginInsertRows(QModelIndex(), m_chatItems.size(), m_chatItems.size());
+ m_chatItems.append(item);
+ endInsertRows();
+ emit countChanged();
+ }
+
+ void appendResponse(const QString &name, const QString &prompt)
+ {
+ ChatItem item;
+ item.id = m_chatItems.count(); // This is only relevant for responses
+ item.name = name;
+ item.prompt = prompt;
+ item.currentResponse = true;
+ beginInsertRows(QModelIndex(), m_chatItems.size(), m_chatItems.size());
+ m_chatItems.append(item);
+ endInsertRows();
+ emit countChanged();
+ }
+
+ Q_INVOKABLE void clear()
+ {
+ if (m_chatItems.isEmpty()) return;
+
+ beginResetModel();
+ m_chatItems.clear();
+ endResetModel();
+ emit countChanged();
+ }
+
+ Q_INVOKABLE ChatItem get(int index)
+ {
+ if (index < 0 || index >= m_chatItems.size()) return ChatItem();
+ return m_chatItems.at(index);
+ }
+
+ Q_INVOKABLE void updateCurrentResponse(int index, bool b)
+ {
+ if (index < 0 || index >= m_chatItems.size()) return;
+
+ ChatItem &item = m_chatItems[index];
+ if (item.currentResponse != b) {
+ item.currentResponse = b;
+ emit dataChanged(createIndex(index, 0), createIndex(index, 0), {CurrentResponseRole});
+ }
+ }
+
+ Q_INVOKABLE void updateStopped(int index, bool b)
+ {
+ if (index < 0 || index >= m_chatItems.size()) return;
+
+ ChatItem &item = m_chatItems[index];
+ if (item.stopped != b) {
+ item.stopped = b;
+ emit dataChanged(createIndex(index, 0), createIndex(index, 0), {StoppedRole});
+ }
+ }
+
+ Q_INVOKABLE void updateValue(int index, const QString &value)
+ {
+ if (index < 0 || index >= m_chatItems.size()) return;
+
+ ChatItem &item = m_chatItems[index];
+ if (item.value != value) {
+ item.value = value;
+ emit dataChanged(createIndex(index, 0), createIndex(index, 0), {ValueRole});
+ }
+ }
+
+ Q_INVOKABLE void updateThumbsUpState(int index, bool b)
+ {
+ if (index < 0 || index >= m_chatItems.size()) return;
+
+ ChatItem &item = m_chatItems[index];
+ if (item.thumbsUpState != b) {
+ item.thumbsUpState = b;
+ emit dataChanged(createIndex(index, 0), createIndex(index, 0), {ThumbsUpStateRole});
+ }
+ }
+
+ Q_INVOKABLE void updateThumbsDownState(int index, bool b)
+ {
+ if (index < 0 || index >= m_chatItems.size()) return;
+
+ ChatItem &item = m_chatItems[index];
+ if (item.thumbsDownState != b) {
+ item.thumbsDownState = b;
+ emit dataChanged(createIndex(index, 0), createIndex(index, 0), {ThumbsDownStateRole});
+ }
+ }
+
+ Q_INVOKABLE void updateNewResponse(int index, const QString &newResponse)
+ {
+ if (index < 0 || index >= m_chatItems.size()) return;
+
+ ChatItem &item = m_chatItems[index];
+ if (item.newResponse != newResponse) {
+ item.newResponse = newResponse;
+ emit dataChanged(createIndex(index, 0), createIndex(index, 0), {NewResponseRole});
+ }
+ }
+
+ int count() const { return m_chatItems.size(); }
+
+ bool serialize(QDataStream &stream, int version) const
+ {
+ stream << count();
+ for (auto c : m_chatItems) {
+ stream << c.id;
+ stream << c.name;
+ stream << c.value;
+ stream << c.prompt;
+ stream << c.newResponse;
+ stream << c.currentResponse;
+ stream << c.stopped;
+ stream << c.thumbsUpState;
+ stream << c.thumbsDownState;
+ }
+ return stream.status() == QDataStream::Ok;
+ }
+
+ bool deserialize(QDataStream &stream, int version)
+ {
+ int size;
+ stream >> size;
+ for (int i = 0; i < size; ++i) {
+ ChatItem c;
+ stream >> c.id;
+ stream >> c.name;
+ stream >> c.value;
+ stream >> c.prompt;
+ stream >> c.newResponse;
+ stream >> c.currentResponse;
+ stream >> c.stopped;
+ stream >> c.thumbsUpState;
+ stream >> c.thumbsDownState;
+ beginInsertRows(QModelIndex(), m_chatItems.size(), m_chatItems.size());
+ m_chatItems.append(c);
+ endInsertRows();
+ }
+ emit countChanged();
+ return stream.status() == QDataStream::Ok;
+ }
+
+Q_SIGNALS:
+ void countChanged();
+
+private:
+
+ QList m_chatItems;
+};
+
+#endif // CHATMODEL_H
diff --git a/gpt4all-chat/cmake/config.h.in b/gpt4all-chat/cmake/config.h.in
new file mode 100644
index 00000000..e578a82d
--- /dev/null
+++ b/gpt4all-chat/cmake/config.h.in
@@ -0,0 +1,7 @@
+#ifndef CONFIG_H
+#define CONFIG_H
+
+#define APP_VERSION "@APP_VERSION@"
+#define GPT4ALL_AVX_ONLY "@GPT4ALL_AVX_ONLY@"
+
+#endif // CONFIG_H
diff --git a/gpt4all-chat/cmake/deploy-qt-linux.cmake.in b/gpt4all-chat/cmake/deploy-qt-linux.cmake.in
new file mode 100644
index 00000000..8c4240f1
--- /dev/null
+++ b/gpt4all-chat/cmake/deploy-qt-linux.cmake.in
@@ -0,0 +1,12 @@
+set(LINUXDEPLOYQT "@LINUXDEPLOYQT@")
+set(COMPONENT_NAME_MAIN "@COMPONENT_NAME_MAIN@")
+set(CMAKE_CURRENT_SOURCE_DIR "@CMAKE_CURRENT_SOURCE_DIR@")
+set(DATA_DIR ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data)
+set(BIN_DIR ${DATA_DIR}/bin)
+set(Qt6_ROOT_DIR "@Qt6_ROOT_DIR@")
+set(ENV{LD_LIBRARY_PATH} "${BIN_DIR}:${Qt6_ROOT_DIR}/../lib/")
+execute_process(COMMAND ${LINUXDEPLOYQT} ${BIN_DIR}/chat -qmldir=${CMAKE_CURRENT_SOURCE_DIR} -bundle-non-qt-libs -qmake=${Qt6_ROOT_DIR}/bin/qmake -verbose=2)
+file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/logo-32.png"
+ DESTINATION ${DATA_DIR})
+file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/logo-48.png"
+ DESTINATION ${DATA_DIR})
diff --git a/gpt4all-chat/cmake/deploy-qt-mac.cmake.in b/gpt4all-chat/cmake/deploy-qt-mac.cmake.in
new file mode 100644
index 00000000..f6ceb718
--- /dev/null
+++ b/gpt4all-chat/cmake/deploy-qt-mac.cmake.in
@@ -0,0 +1,16 @@
+set(MACDEPLOYQT "@MACDEPLOYQT@")
+set(COMPONENT_NAME_MAIN "@COMPONENT_NAME_MAIN@")
+set(CMAKE_CURRENT_SOURCE_DIR "@CMAKE_CURRENT_SOURCE_DIR@")
+execute_process(COMMAND ${MACDEPLOYQT} ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin/gpt4all.app -qmldir=${CMAKE_CURRENT_SOURCE_DIR} -verbose=2)
+file(COPY ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/lib/libllama.dylib
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin/gpt4all.app/Contents/Frameworks)
+file(COPY ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/lib/libllmodel.dylib
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin/gpt4all.app/Contents/Frameworks)
+file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/favicon.icns"
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin/gpt4all.app/Contents/Resources)
+file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/logo-32.png"
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data)
+file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/logo-48.png"
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data)
+file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/favicon.icns"
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data)
diff --git a/gpt4all-chat/cmake/deploy-qt-windows.cmake.in b/gpt4all-chat/cmake/deploy-qt-windows.cmake.in
new file mode 100644
index 00000000..80493951
--- /dev/null
+++ b/gpt4all-chat/cmake/deploy-qt-windows.cmake.in
@@ -0,0 +1,14 @@
+set(WINDEPLOYQT "@WINDEPLOYQT@")
+set(COMPONENT_NAME_MAIN "@COMPONENT_NAME_MAIN@")
+set(CMAKE_CURRENT_SOURCE_DIR "@CMAKE_CURRENT_SOURCE_DIR@")
+execute_process(COMMAND ${WINDEPLOYQT} --qmldir ${CMAKE_CURRENT_SOURCE_DIR} ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin)
+file(COPY ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/lib/libllama.dll
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin)
+file(COPY ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/lib/libllmodel.dll
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin)
+file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/logo-32.png"
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data)
+file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/logo-48.png"
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data)
+file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/favicon.ico"
+ DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data)
diff --git a/gpt4all-chat/cmake/installerscript.qs b/gpt4all-chat/cmake/installerscript.qs
new file mode 100644
index 00000000..841e1e52
--- /dev/null
+++ b/gpt4all-chat/cmake/installerscript.qs
@@ -0,0 +1,68 @@
+function Component() {
+}
+
+var targetDirectory;
+Component.prototype.beginInstallation = function() {
+ targetDirectory = installer.value("TargetDir");
+};
+
+Component.prototype.createOperations = function()
+{
+ try {
+ // call the base create operations function
+ component.createOperations();
+ if (systemInfo.productType === "windows") {
+ try {
+ var userProfile = installer.environmentVariable("USERPROFILE");
+ installer.setValue("UserProfile", userProfile);
+ component.addOperation("CreateShortcut",
+ targetDirectory + "/bin/chat.exe",
+ "@UserProfile@/Desktop/GPT4All.lnk",
+ "workingDirectory=" + targetDirectory + "/bin",
+ "iconPath=" + targetDirectory + "/favicon.ico",
+ "iconId=0", "description=Open GPT4All");
+ } catch (e) {
+ print("ERROR: creating desktop shortcut" + e);
+ }
+ component.addOperation("CreateShortcut",
+ targetDirectory + "/bin/chat.exe",
+ "@StartMenuDir@/GPT4All.lnk",
+ "workingDirectory=" + targetDirectory + "/bin",
+ "iconPath=" + targetDirectory + "/favicon.ico",
+ "iconId=0", "description=Open GPT4All");
+ } else if (systemInfo.productType === "osx") {
+ var gpt4allAppPath = targetDirectory + "/bin/gpt4all.app";
+ var symlinkPath = targetDirectory + "/../GPT4All.app";
+ // Remove the symlink if it already exists
+ component.addOperation("Execute", "rm", "-f", symlinkPath);
+ // Create the symlink
+ component.addOperation("Execute", "ln", "-s", gpt4allAppPath, symlinkPath);
+ } else { // linux
+ var homeDir = installer.environmentVariable("HOME");
+ if (!installer.fileExists(homeDir + "/Desktop/GPT4All.desktop")) {
+ component.addOperation("CreateDesktopEntry",
+ homeDir + "/Desktop/GPT4All.desktop",
+ "Type=Application\nTerminal=false\nExec=\"" + targetDirectory +
+ "/bin/chat\"\nName=GPT4All\nIcon=" + targetDirectory +
+ "/logo-48.png\nName[en_US]=GPT4All");
+ }
+ }
+ } catch (e) {
+ print("ERROR: running post installscript.qs" + e);
+ }
+}
+
+Component.prototype.createOperationsForArchive = function(archive)
+{
+ component.createOperationsForArchive(archive);
+
+ if (systemInfo.productType === "osx") {
+ var uninstallTargetDirectory = installer.value("TargetDir");
+ var symlinkPath = uninstallTargetDirectory + "/../GPT4All.app";
+
+ // Remove the symlink during uninstallation
+ if (installer.isUninstaller()) {
+ component.addOperation("Execute", "rm", "-f", symlinkPath, "UNDOEXECUTE");
+ }
+ }
+}
diff --git a/gpt4all-chat/cmake/sign_dmg.py b/gpt4all-chat/cmake/sign_dmg.py
new file mode 100644
index 00000000..c448db36
--- /dev/null
+++ b/gpt4all-chat/cmake/sign_dmg.py
@@ -0,0 +1,81 @@
+import os
+import subprocess
+import tempfile
+import shutil
+import click
+from typing import Optional
+
+# Requires click
+# pip install click
+
+# Example usage
+# python sign_dmg.py --input-dmg /path/to/your/input.dmg --output-dmg /path/to/your/output.dmg --signing-identity "Developer ID Application: YOUR_NAME (TEAM_ID)"
+
+# NOTE: This script assumes that you have the necessary Developer ID Application certificate in your
+# Keychain Access and that the codesign and hdiutil command-line tools are available on your system.
+
+@click.command()
+@click.option('--input-dmg', required=True, help='Path to the input DMG file.')
+@click.option('--output-dmg', required=True, help='Path to the output signed DMG file.')
+@click.option('--sha1-hash', help='SHA-1 hash of the Developer ID Application certificate')
+@click.option('--signing-identity', default=None, help='Common name of the Developer ID Application certificate')
+def sign_dmg(input_dmg: str, output_dmg: str, signing_identity: Optional[str] = None, sha1_hash: Optional[str] = None) -> None:
+ if not signing_identity and not sha1_hash:
+ print("Error: Either --signing-identity or --sha1-hash must be provided.")
+ exit(1)
+
+ # Mount the input DMG
+ mount_point = tempfile.mkdtemp()
+ subprocess.run(['hdiutil', 'attach', input_dmg, '-mountpoint', mount_point])
+
+ # Copy the contents of the DMG to a temporary folder
+ temp_dir = tempfile.mkdtemp()
+ shutil.copytree(mount_point, os.path.join(temp_dir, 'contents'))
+ subprocess.run(['hdiutil', 'detach', mount_point])
+
+ # Find the .app bundle in the temporary folder
+ app_bundle = None
+ for item in os.listdir(os.path.join(temp_dir, 'contents')):
+ if item.endswith('.app'):
+ app_bundle = os.path.join(temp_dir, 'contents', item)
+ break
+
+ if not app_bundle:
+ print('No .app bundle found in the DMG.')
+ exit(1)
+
+ # Sign the .app bundle
+ try:
+ subprocess.run([
+ 'codesign',
+ '--deep',
+ '--force',
+ '--verbose',
+ '--options', 'runtime',
+ '--timestamp',
+ '--sign', sha1_hash or signing_identity,
+ app_bundle
+ ], check=True)
+ except subprocess.CalledProcessError as e:
+ print(f"Error during codesign: {e}")
+ # Clean up temporary directories
+ shutil.rmtree(temp_dir)
+ shutil.rmtree(mount_point)
+ exit(1)
+
+ # Create a new DMG containing the signed .app bundle
+ subprocess.run([
+ 'hdiutil', 'create',
+ '-volname', os.path.splitext(os.path.basename(input_dmg))[0],
+ '-srcfolder', os.path.join(temp_dir, 'contents'),
+ '-ov',
+ '-format', 'UDZO',
+ output_dmg
+ ])
+
+ # Clean up temporary directories
+ shutil.rmtree(temp_dir)
+ shutil.rmtree(mount_point)
+
+if __name__ == '__main__':
+ sign_dmg()
diff --git a/gpt4all-chat/download.cpp b/gpt4all-chat/download.cpp
new file mode 100644
index 00000000..736c8fa1
--- /dev/null
+++ b/gpt4all-chat/download.cpp
@@ -0,0 +1,600 @@
+#include "download.h"
+#include "network.h"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+class MyDownload: public Download { };
+Q_GLOBAL_STATIC(MyDownload, downloadInstance)
+Download *Download::globalInstance()
+{
+ return downloadInstance();
+}
+
+Download::Download()
+ : QObject(nullptr)
+ , m_hashAndSave(new HashAndSaveFile)
+{
+ connect(this, &Download::requestHashAndSave, m_hashAndSave,
+ &HashAndSaveFile::hashAndSave, Qt::QueuedConnection);
+ connect(m_hashAndSave, &HashAndSaveFile::hashAndSaveFinished, this,
+ &Download::handleHashAndSaveFinished, Qt::QueuedConnection);
+ connect(&m_networkManager, &QNetworkAccessManager::sslErrors, this,
+ &Download::handleSslErrors);
+ connect(this, &Download::downloadLocalModelsPathChanged, this, &Download::updateModelList);
+ updateModelList();
+ updateReleaseNotes();
+ QSettings settings;
+ settings.sync();
+ m_downloadLocalModelsPath = settings.value("modelPath",
+ defaultLocalModelsPath()).toString();
+ m_startTime = QDateTime::currentDateTime();
+}
+
+bool operator==(const ModelInfo& lhs, const ModelInfo& rhs) {
+ return lhs.filename == rhs.filename && lhs.md5sum == rhs.md5sum;
+}
+
+bool operator==(const ReleaseInfo& lhs, const ReleaseInfo& rhs) {
+ return lhs.version == rhs.version;
+}
+
+bool compareVersions(const QString &a, const QString &b) {
+ QStringList aParts = a.split('.');
+ QStringList bParts = b.split('.');
+
+ for (int i = 0; i < std::min(aParts.size(), bParts.size()); ++i) {
+ int aInt = aParts[i].toInt();
+ int bInt = bParts[i].toInt();
+
+ if (aInt > bInt) {
+ return true;
+ } else if (aInt < bInt) {
+ return false;
+ }
+ }
+
+ return aParts.size() > bParts.size();
+}
+
+QList Download::modelList() const
+{
+ // We make sure the default model is listed first
+ QList values = m_modelMap.values();
+ ModelInfo defaultInfo;
+ ModelInfo bestGPTJInfo;
+ ModelInfo bestLlamaInfo;
+ ModelInfo bestMPTInfo;
+ QList filtered;
+ for (ModelInfo v : values) {
+ if (v.isDefault)
+ defaultInfo = v;
+ if (v.bestGPTJ)
+ bestGPTJInfo = v;
+ if (v.bestLlama)
+ bestLlamaInfo = v;
+ if (v.bestMPT)
+ bestMPTInfo = v;
+ filtered.append(v);
+ }
+
+ Q_ASSERT(defaultInfo == bestGPTJInfo || defaultInfo == bestLlamaInfo || defaultInfo == bestMPTInfo);
+
+ if (bestLlamaInfo.bestLlama) {
+ filtered.removeAll(bestLlamaInfo);
+ filtered.prepend(bestLlamaInfo);
+ }
+
+ if (bestGPTJInfo.bestGPTJ) {
+ filtered.removeAll(bestGPTJInfo);
+ filtered.prepend(bestGPTJInfo);
+ }
+
+ if (bestMPTInfo.bestMPT) {
+ filtered.removeAll(bestMPTInfo);
+ filtered.prepend(bestMPTInfo);
+ }
+
+ return filtered;
+}
+
+ReleaseInfo Download::releaseInfo() const
+{
+ const QString currentVersion = QCoreApplication::applicationVersion();
+ if (m_releaseMap.contains(currentVersion))
+ return m_releaseMap.value(currentVersion);
+ return ReleaseInfo();
+}
+
+bool Download::hasNewerRelease() const
+{
+ const QString currentVersion = QCoreApplication::applicationVersion();
+ QList versions = m_releaseMap.keys();
+ std::sort(versions.begin(), versions.end(), compareVersions);
+ if (versions.isEmpty())
+ return false;
+ return compareVersions(versions.first(), currentVersion);
+}
+
+QString Download::downloadLocalModelsPath() const {
+ return m_downloadLocalModelsPath;
+}
+
+void Download::setDownloadLocalModelsPath(const QString &modelPath) {
+ QString filePath = (modelPath.startsWith("file://") ?
+ QUrl(modelPath).toLocalFile() : modelPath);
+ QString canonical = QFileInfo(filePath).canonicalFilePath() + "/";
+ if (m_downloadLocalModelsPath != canonical) {
+ m_downloadLocalModelsPath = canonical;
+ emit downloadLocalModelsPathChanged();
+ }
+}
+
+bool Download::isFirstStart() const
+{
+ QSettings settings;
+ settings.sync();
+ QString lastVersionStarted = settings.value("download/lastVersionStarted").toString();
+ bool first = lastVersionStarted != QCoreApplication::applicationVersion();
+ settings.setValue("download/lastVersionStarted", QCoreApplication::applicationVersion());
+ settings.sync();
+ return first;
+}
+
+QString Download::incompleteDownloadPath(const QString &modelFile) {
+ QString downloadPath = downloadLocalModelsPath() + "incomplete-" +
+ modelFile;
+ return downloadPath;
+}
+
+QString Download::defaultLocalModelsPath() const
+{
+ QString localPath = QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation)
+ + "/";
+ QString testWritePath = localPath + QString("test_write.txt");
+ QString canonicalLocalPath = QFileInfo(localPath).canonicalFilePath() + "/";
+ QDir localDir(localPath);
+ if (!localDir.exists()) {
+ if (!localDir.mkpath(localPath)) {
+ qWarning() << "ERROR: Local download directory can't be created:" << canonicalLocalPath;
+ return canonicalLocalPath;
+ }
+ }
+
+ if (QFileInfo::exists(testWritePath))
+ return canonicalLocalPath;
+
+ QFile testWriteFile(testWritePath);
+ if (testWriteFile.open(QIODeviceBase::ReadWrite)) {
+ testWriteFile.close();
+ return canonicalLocalPath;
+ }
+
+ qWarning() << "ERROR: Local download path appears not writeable:" << canonicalLocalPath;
+ return canonicalLocalPath;
+}
+
+void Download::updateModelList()
+{
+ QUrl jsonUrl("http://gpt4all.io/models/models.json");
+ QNetworkRequest request(jsonUrl);
+ QSslConfiguration conf = request.sslConfiguration();
+ conf.setPeerVerifyMode(QSslSocket::VerifyNone);
+ request.setSslConfiguration(conf);
+ QNetworkReply *jsonReply = m_networkManager.get(request);
+ connect(jsonReply, &QNetworkReply::finished, this, &Download::handleModelsJsonDownloadFinished);
+}
+
+void Download::updateReleaseNotes()
+{
+ QUrl jsonUrl("http://gpt4all.io/meta/release.json");
+ QNetworkRequest request(jsonUrl);
+ QSslConfiguration conf = request.sslConfiguration();
+ conf.setPeerVerifyMode(QSslSocket::VerifyNone);
+ request.setSslConfiguration(conf);
+ QNetworkReply *jsonReply = m_networkManager.get(request);
+ connect(jsonReply, &QNetworkReply::finished, this, &Download::handleReleaseJsonDownloadFinished);
+}
+
+void Download::downloadModel(const QString &modelFile)
+{
+ QFile *tempFile = new QFile(incompleteDownloadPath(modelFile));
+ QDateTime modTime = tempFile->fileTime(QFile::FileModificationTime);
+ bool success = tempFile->open(QIODevice::WriteOnly | QIODevice::Append);
+ qWarning() << "Opening temp file for writing:" << tempFile->fileName();
+ if (!success) {
+ qWarning() << "ERROR: Could not open temp file:"
+ << tempFile->fileName() << modelFile;
+ return;
+ }
+ size_t incomplete_size = tempFile->size();
+ if (incomplete_size > 0) {
+ if (modTime < m_startTime) {
+ qWarning() << "File last modified before app started, rewinding by 1MB";
+ if (incomplete_size >= 1024 * 1024) {
+ incomplete_size -= 1024 * 1024;
+ } else {
+ incomplete_size = 0;
+ }
+ }
+ tempFile->seek(incomplete_size);
+ }
+
+ Network::globalInstance()->sendDownloadStarted(modelFile);
+ QNetworkRequest request("http://gpt4all.io/models/" + modelFile);
+ request.setRawHeader("range", QString("bytes=%1-").arg(incomplete_size).toUtf8());
+ QSslConfiguration conf = request.sslConfiguration();
+ conf.setPeerVerifyMode(QSslSocket::VerifyNone);
+ request.setSslConfiguration(conf);
+ QNetworkReply *modelReply = m_networkManager.get(request);
+ connect(modelReply, &QNetworkReply::downloadProgress, this, &Download::handleDownloadProgress);
+ connect(modelReply, &QNetworkReply::finished, this, &Download::handleModelDownloadFinished);
+ connect(modelReply, &QNetworkReply::readyRead, this, &Download::handleReadyRead);
+ m_activeDownloads.insert(modelReply, tempFile);
+}
+
+void Download::cancelDownload(const QString &modelFile)
+{
+ for (int i = 0; i < m_activeDownloads.size(); ++i) {
+ QNetworkReply *modelReply = m_activeDownloads.keys().at(i);
+ QUrl url = modelReply->request().url();
+ if (url.toString().endsWith(modelFile)) {
+ Network::globalInstance()->sendDownloadCanceled(modelFile);
+
+ // Disconnect the signals
+ disconnect(modelReply, &QNetworkReply::downloadProgress, this, &Download::handleDownloadProgress);
+ disconnect(modelReply, &QNetworkReply::finished, this, &Download::handleModelDownloadFinished);
+
+ modelReply->abort(); // Abort the download
+ modelReply->deleteLater(); // Schedule the reply for deletion
+
+ QFile *tempFile = m_activeDownloads.value(modelReply);
+ tempFile->deleteLater();
+ m_activeDownloads.remove(modelReply);
+
+ // Emit downloadFinished signal for cleanup
+ emit downloadFinished(modelFile);
+ break;
+ }
+ }
+}
+
+void Download::handleSslErrors(QNetworkReply *reply, const QList &errors)
+{
+ QUrl url = reply->request().url();
+ for (auto e : errors)
+ qWarning() << "ERROR: Received ssl error:" << e.errorString() << "for" << url;
+}
+
+void Download::handleModelsJsonDownloadFinished()
+{
+#if 0
+ QByteArray jsonData = QString(""
+ "["
+ " {"
+ " \"md5sum\": \"61d48a82cb188cceb14ebb8082bfec37\","
+ " \"filename\": \"ggml-gpt4all-j-v1.1-breezy.bin\","
+ " \"filesize\": \"3785248281\""
+ " },"
+ " {"
+ " \"md5sum\": \"879344aaa9d62fdccbda0be7a09e7976\","
+ " \"filename\": \"ggml-gpt4all-j-v1.2-jazzy.bin\","
+ " \"filesize\": \"3785248281\","
+ " \"isDefault\": \"true\""
+ " },"
+ " {"
+ " \"md5sum\": \"5b5a3f9b858d33b29b52b89692415595\","
+ " \"filesize\": \"3785248281\","
+ " \"filename\": \"ggml-gpt4all-j.bin\""
+ " }"
+ "]"
+ ).toUtf8();
+ printf("%s\n", jsonData.toStdString().c_str());
+ fflush(stdout);
+#else
+ QNetworkReply *jsonReply = qobject_cast(sender());
+ if (!jsonReply)
+ return;
+
+ QByteArray jsonData = jsonReply->readAll();
+ jsonReply->deleteLater();
+#endif
+ parseModelsJsonFile(jsonData);
+}
+
+void Download::parseModelsJsonFile(const QByteArray &jsonData)
+{
+ QJsonParseError err;
+ QJsonDocument document = QJsonDocument::fromJson(jsonData, &err);
+ if (err.error != QJsonParseError::NoError) {
+ qDebug() << "ERROR: Couldn't parse: " << jsonData << err.errorString();
+ return;
+ }
+
+ QString defaultModel;
+ QJsonArray jsonArray = document.array();
+ const QString currentVersion = QCoreApplication::applicationVersion();
+
+ m_modelMap.clear();
+ for (const QJsonValue &value : jsonArray) {
+ QJsonObject obj = value.toObject();
+
+ QString modelFilename = obj["filename"].toString();
+ QString modelFilesize = obj["filesize"].toString();
+ QString requires = obj["requires"].toString();
+ QByteArray modelMd5sum = obj["md5sum"].toString().toLatin1().constData();
+ bool isDefault = obj.contains("isDefault") && obj["isDefault"] == QString("true");
+ bool bestGPTJ = obj.contains("bestGPTJ") && obj["bestGPTJ"] == QString("true");
+ bool bestLlama = obj.contains("bestLlama") && obj["bestLlama"] == QString("true");
+ bool bestMPT = obj.contains("bestMPT") && obj["bestMPT"] == QString("true");
+ QString description = obj["description"].toString();
+
+ if (!requires.isEmpty()
+ && requires != currentVersion
+ && compareVersions(requires, currentVersion)) {
+ continue;
+ }
+
+ if (isDefault)
+ defaultModel = modelFilename;
+ quint64 sz = modelFilesize.toULongLong();
+ if (sz < 1024) {
+ modelFilesize = QString("%1 bytes").arg(sz);
+ } else if (sz < 1024 * 1024) {
+ modelFilesize = QString("%1 KB").arg(qreal(sz) / 1024, 0, 'g', 3);
+ } else if (sz < 1024 * 1024 * 1024) {
+ modelFilesize = QString("%1 MB").arg(qreal(sz) / (1024 * 1024), 0, 'g', 3);
+ } else {
+ modelFilesize = QString("%1 GB").arg(qreal(sz) / (1024 * 1024 * 1024), 0, 'g', 3);
+ }
+
+ QString filePath = downloadLocalModelsPath() + modelFilename;
+ QFileInfo info(filePath);
+ ModelInfo modelInfo;
+ modelInfo.filename = modelFilename;
+ modelInfo.filesize = modelFilesize;
+ modelInfo.md5sum = modelMd5sum;
+ modelInfo.installed = info.exists();
+ modelInfo.isDefault = isDefault;
+ modelInfo.bestGPTJ = bestGPTJ;
+ modelInfo.bestLlama = bestLlama;
+ modelInfo.bestMPT = bestMPT;
+ modelInfo.description = description;
+ modelInfo.requires = requires;
+ m_modelMap.insert(modelInfo.filename, modelInfo);
+ }
+
+ // remove ggml- prefix and .bin suffix
+ Q_ASSERT(defaultModel.startsWith("ggml-"));
+ defaultModel = defaultModel.remove(0, 5);
+ Q_ASSERT(defaultModel.endsWith(".bin"));
+ defaultModel.chop(4);
+
+ QSettings settings;
+ settings.sync();
+ settings.setValue("defaultModel", defaultModel);
+ settings.sync();
+ emit modelListChanged();
+}
+
+void Download::handleReleaseJsonDownloadFinished()
+{
+ QNetworkReply *jsonReply = qobject_cast(sender());
+ if (!jsonReply)
+ return;
+
+ QByteArray jsonData = jsonReply->readAll();
+ jsonReply->deleteLater();
+ parseReleaseJsonFile(jsonData);
+}
+
+void Download::parseReleaseJsonFile(const QByteArray &jsonData)
+{
+ QJsonParseError err;
+ QJsonDocument document = QJsonDocument::fromJson(jsonData, &err);
+ if (err.error != QJsonParseError::NoError) {
+ qDebug() << "ERROR: Couldn't parse: " << jsonData << err.errorString();
+ return;
+ }
+
+ QJsonArray jsonArray = document.array();
+
+ m_releaseMap.clear();
+ for (const QJsonValue &value : jsonArray) {
+ QJsonObject obj = value.toObject();
+
+ QString version = obj["version"].toString();
+ QString notes = obj["notes"].toString();
+ QString contributors = obj["contributors"].toString();
+ ReleaseInfo releaseInfo;
+ releaseInfo.version = version;
+ releaseInfo.notes = notes;
+ releaseInfo.contributors = contributors;
+ m_releaseMap.insert(version, releaseInfo);
+ }
+
+ emit hasNewerReleaseChanged();
+ emit releaseInfoChanged();
+}
+
+void Download::handleErrorOccurred(QNetworkReply::NetworkError code)
+{
+ QNetworkReply *modelReply = qobject_cast(sender());
+ if (!modelReply)
+ return;
+
+ QString modelFilename = modelReply->url().fileName();
+ qWarning() << "ERROR: Network error occurred attempting to download"
+ << modelFilename
+ << "code:" << code
+ << "errorString" << modelReply->errorString();
+ Network::globalInstance()->sendDownloadError(modelFilename, (int)code, modelReply->errorString());
+ cancelDownload(modelFilename);
+}
+
+void Download::handleDownloadProgress(qint64 bytesReceived, qint64 bytesTotal)
+{
+ QNetworkReply *modelReply = qobject_cast(sender());
+ if (!modelReply)
+ return;
+ QFile *tempFile = m_activeDownloads.value(modelReply);
+ if (!tempFile)
+ return;
+ QString contentRange = modelReply->rawHeader("content-range");
+ if (contentRange.contains("/")) {
+ QString contentTotalSize = contentRange.split("/").last();
+ bytesTotal = contentTotalSize.toLongLong();
+ }
+
+ QString modelFilename = modelReply->url().fileName();
+ emit downloadProgress(tempFile->pos(), bytesTotal, modelFilename);
+}
+
+HashAndSaveFile::HashAndSaveFile()
+ : QObject(nullptr)
+{
+ moveToThread(&m_hashAndSaveThread);
+ m_hashAndSaveThread.setObjectName("hashandsave thread");
+ m_hashAndSaveThread.start();
+}
+
+void HashAndSaveFile::hashAndSave(const QString &expectedHash, const QString &saveFilePath,
+ QFile *tempFile, QNetworkReply *modelReply)
+{
+ Q_ASSERT(!tempFile->isOpen());
+ QString modelFilename = modelReply->url().fileName();
+
+ // Reopen the tempFile for hashing
+ if (!tempFile->open(QIODevice::ReadOnly)) {
+ qWarning() << "ERROR: Could not open temp file for hashing:"
+ << tempFile->fileName() << modelFilename;
+ emit hashAndSaveFinished(false, tempFile, modelReply);
+ return;
+ }
+
+ QCryptographicHash hash(QCryptographicHash::Md5);
+ while(!tempFile->atEnd())
+ hash.addData(tempFile->read(16384));
+ if (hash.result().toHex() != expectedHash) {
+ tempFile->close();
+ qWarning() << "ERROR: Download error MD5SUM did not match:"
+ << hash.result().toHex()
+ << "!=" << expectedHash << "for" << modelFilename;
+ tempFile->remove();
+ emit hashAndSaveFinished(false, tempFile, modelReply);
+ return;
+ }
+
+ // The file save needs the tempFile closed
+ tempFile->close();
+
+ // Attempt to *move* the verified tempfile into place - this should be atomic
+ // but will only work if the destination is on the same filesystem
+ if (tempFile->rename(saveFilePath)) {
+ emit hashAndSaveFinished(true, tempFile, modelReply);
+ return;
+ }
+
+ // Reopen the tempFile for copying
+ if (!tempFile->open(QIODevice::ReadOnly)) {
+ qWarning() << "ERROR: Could not open temp file at finish:"
+ << tempFile->fileName() << modelFilename;
+ emit hashAndSaveFinished(false, tempFile, modelReply);
+ return;
+ }
+
+ // Save the model file to disk
+ QFile file(saveFilePath);
+ if (file.open(QIODevice::WriteOnly)) {
+ QByteArray buffer;
+ while (!tempFile->atEnd()) {
+ buffer = tempFile->read(16384);
+ file.write(buffer);
+ }
+ file.close();
+ tempFile->close();
+ emit hashAndSaveFinished(true, tempFile, modelReply);
+ } else {
+ QFile::FileError error = file.error();
+ qWarning() << "ERROR: Could not save model to location:"
+ << saveFilePath
+ << "failed with code" << error;
+ tempFile->close();
+ emit hashAndSaveFinished(false, tempFile, modelReply);
+ return;
+ }
+}
+
+void Download::handleModelDownloadFinished()
+{
+ QNetworkReply *modelReply = qobject_cast(sender());
+ if (!modelReply)
+ return;
+
+ QString modelFilename = modelReply->url().fileName();
+ QFile *tempFile = m_activeDownloads.value(modelReply);
+ m_activeDownloads.remove(modelReply);
+
+ if (modelReply->error()) {
+ qWarning() << "ERROR: downloading:" << modelReply->errorString();
+ modelReply->deleteLater();
+ tempFile->deleteLater();
+ emit downloadFinished(modelFilename);
+ return;
+ }
+
+ // The hash and save needs the tempFile closed
+ tempFile->close();
+
+ // Notify that we are calculating hash
+ ModelInfo info = m_modelMap.value(modelFilename);
+ info.calcHash = true;
+ m_modelMap.insert(modelFilename, info);
+ emit modelListChanged();
+
+ const QString saveFilePath = downloadLocalModelsPath() + modelFilename;
+ emit requestHashAndSave(info.md5sum, saveFilePath, tempFile, modelReply);
+}
+
+void Download::handleHashAndSaveFinished(bool success,
+ QFile *tempFile, QNetworkReply *modelReply)
+{
+ // The hash and save should send back with tempfile closed
+ Q_ASSERT(!tempFile->isOpen());
+ QString modelFilename = modelReply->url().fileName();
+ Network::globalInstance()->sendDownloadFinished(modelFilename, success);
+
+ ModelInfo info = m_modelMap.value(modelFilename);
+ info.calcHash = false;
+ info.installed = success;
+ m_modelMap.insert(modelFilename, info);
+ emit modelListChanged();
+
+ modelReply->deleteLater();
+ tempFile->deleteLater();
+ emit downloadFinished(modelFilename);
+}
+
+void Download::handleReadyRead()
+{
+ QNetworkReply *modelReply = qobject_cast(sender());
+ if (!modelReply)
+ return;
+
+ QString modelFilename = modelReply->url().fileName();
+ QFile *tempFile = m_activeDownloads.value(modelReply);
+ QByteArray buffer;
+ while (!modelReply->atEnd()) {
+ buffer = modelReply->read(16384);
+ tempFile->write(buffer);
+ }
+ tempFile->flush();
+}
diff --git a/gpt4all-chat/download.h b/gpt4all-chat/download.h
new file mode 100644
index 00000000..638bae43
--- /dev/null
+++ b/gpt4all-chat/download.h
@@ -0,0 +1,136 @@
+#ifndef DOWNLOAD_H
+#define DOWNLOAD_H
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+struct ModelInfo {
+ Q_GADGET
+ Q_PROPERTY(QString filename MEMBER filename)
+ Q_PROPERTY(QString filesize MEMBER filesize)
+ Q_PROPERTY(QByteArray md5sum MEMBER md5sum)
+ Q_PROPERTY(bool calcHash MEMBER calcHash)
+ Q_PROPERTY(bool installed MEMBER installed)
+ Q_PROPERTY(bool isDefault MEMBER isDefault)
+ Q_PROPERTY(bool bestGPTJ MEMBER bestGPTJ)
+ Q_PROPERTY(bool bestLlama MEMBER bestLlama)
+ Q_PROPERTY(bool bestMPT MEMBER bestMPT)
+ Q_PROPERTY(QString description MEMBER description)
+ Q_PROPERTY(QString requires MEMBER requires)
+
+public:
+ QString filename;
+ QString filesize;
+ QByteArray md5sum;
+ bool calcHash = false;
+ bool installed = false;
+ bool isDefault = false;
+ bool bestGPTJ = false;
+ bool bestLlama = false;
+ bool bestMPT = false;
+ QString description;
+ QString requires;
+};
+Q_DECLARE_METATYPE(ModelInfo)
+
+struct ReleaseInfo {
+ Q_GADGET
+ Q_PROPERTY(QString version MEMBER version)
+ Q_PROPERTY(QString notes MEMBER notes)
+ Q_PROPERTY(QString contributors MEMBER contributors)
+
+public:
+ QString version;
+ QString notes;
+ QString contributors;
+};
+
+class HashAndSaveFile : public QObject
+{
+ Q_OBJECT
+public:
+ HashAndSaveFile();
+
+public Q_SLOTS:
+ void hashAndSave(const QString &hash, const QString &saveFilePath,
+ QFile *tempFile, QNetworkReply *modelReply);
+
+Q_SIGNALS:
+ void hashAndSaveFinished(bool success,
+ QFile *tempFile, QNetworkReply *modelReply);
+
+private:
+ QThread m_hashAndSaveThread;
+};
+
+class Download : public QObject
+{
+ Q_OBJECT
+ Q_PROPERTY(QList modelList READ modelList NOTIFY modelListChanged)
+ Q_PROPERTY(bool hasNewerRelease READ hasNewerRelease NOTIFY hasNewerReleaseChanged)
+ Q_PROPERTY(ReleaseInfo releaseInfo READ releaseInfo NOTIFY releaseInfoChanged)
+ Q_PROPERTY(QString downloadLocalModelsPath READ downloadLocalModelsPath
+ WRITE setDownloadLocalModelsPath
+ NOTIFY downloadLocalModelsPathChanged)
+
+public:
+ static Download *globalInstance();
+
+ QList modelList() const;
+ ReleaseInfo releaseInfo() const;
+ bool hasNewerRelease() const;
+ Q_INVOKABLE void updateModelList();
+ Q_INVOKABLE void updateReleaseNotes();
+ Q_INVOKABLE void downloadModel(const QString &modelFile);
+ Q_INVOKABLE void cancelDownload(const QString &modelFile);
+ Q_INVOKABLE QString defaultLocalModelsPath() const;
+ Q_INVOKABLE QString downloadLocalModelsPath() const;
+ Q_INVOKABLE void setDownloadLocalModelsPath(const QString &modelPath);
+ Q_INVOKABLE bool isFirstStart() const;
+
+private Q_SLOTS:
+ void handleSslErrors(QNetworkReply *reply, const QList &errors);
+ void handleModelsJsonDownloadFinished();
+ void handleReleaseJsonDownloadFinished();
+ void handleErrorOccurred(QNetworkReply::NetworkError code);
+ void handleDownloadProgress(qint64 bytesReceived, qint64 bytesTotal);
+ void handleModelDownloadFinished();
+ void handleHashAndSaveFinished(bool success,
+ QFile *tempFile, QNetworkReply *modelReply);
+ void handleReadyRead();
+
+Q_SIGNALS:
+ void downloadProgress(qint64 bytesReceived, qint64 bytesTotal, const QString &modelFile);
+ void downloadFinished(const QString &modelFile);
+ void modelListChanged();
+ void releaseInfoChanged();
+ void hasNewerReleaseChanged();
+ void downloadLocalModelsPathChanged();
+ void requestHashAndSave(const QString &hash, const QString &saveFilePath,
+ QFile *tempFile, QNetworkReply *modelReply);
+
+private:
+ void parseModelsJsonFile(const QByteArray &jsonData);
+ void parseReleaseJsonFile(const QByteArray &jsonData);
+ QString incompleteDownloadPath(const QString &modelFile);
+
+ HashAndSaveFile *m_hashAndSave;
+ QMap m_modelMap;
+ QMap m_releaseMap;
+ QNetworkAccessManager m_networkManager;
+ QMap m_activeDownloads;
+ QString m_downloadLocalModelsPath;
+ QDateTime m_startTime;
+
+private:
+ explicit Download();
+ ~Download() {}
+ friend class MyDownload;
+};
+
+#endif // DOWNLOAD_H
diff --git a/gpt4all-chat/icons/copy.svg b/gpt4all-chat/icons/copy.svg
new file mode 100644
index 00000000..5ab45b5b
--- /dev/null
+++ b/gpt4all-chat/icons/copy.svg
@@ -0,0 +1,9 @@
+
diff --git a/gpt4all-chat/icons/edit.svg b/gpt4all-chat/icons/edit.svg
new file mode 100644
index 00000000..9820173b
--- /dev/null
+++ b/gpt4all-chat/icons/edit.svg
@@ -0,0 +1,5 @@
+
+
diff --git a/gpt4all-chat/icons/favicon.icns b/gpt4all-chat/icons/favicon.icns
new file mode 100644
index 00000000..38638c7a
Binary files /dev/null and b/gpt4all-chat/icons/favicon.icns differ
diff --git a/gpt4all-chat/icons/favicon.ico b/gpt4all-chat/icons/favicon.ico
new file mode 100644
index 00000000..d7c27c4b
Binary files /dev/null and b/gpt4all-chat/icons/favicon.ico differ
diff --git a/gpt4all-chat/icons/logo-1024.png b/gpt4all-chat/icons/logo-1024.png
new file mode 100644
index 00000000..2fb7cbdc
Binary files /dev/null and b/gpt4all-chat/icons/logo-1024.png differ
diff --git a/gpt4all-chat/icons/logo-128.png b/gpt4all-chat/icons/logo-128.png
new file mode 100644
index 00000000..81c52374
Binary files /dev/null and b/gpt4all-chat/icons/logo-128.png differ
diff --git a/gpt4all-chat/icons/logo-16.png b/gpt4all-chat/icons/logo-16.png
new file mode 100644
index 00000000..344ac9b1
Binary files /dev/null and b/gpt4all-chat/icons/logo-16.png differ
diff --git a/gpt4all-chat/icons/logo-256.png b/gpt4all-chat/icons/logo-256.png
new file mode 100644
index 00000000..291f3003
Binary files /dev/null and b/gpt4all-chat/icons/logo-256.png differ
diff --git a/gpt4all-chat/icons/logo-32.png b/gpt4all-chat/icons/logo-32.png
new file mode 100644
index 00000000..06628744
Binary files /dev/null and b/gpt4all-chat/icons/logo-32.png differ
diff --git a/gpt4all-chat/icons/logo-48.png b/gpt4all-chat/icons/logo-48.png
new file mode 100644
index 00000000..4d53f9f7
Binary files /dev/null and b/gpt4all-chat/icons/logo-48.png differ
diff --git a/gpt4all-chat/icons/logo-512.png b/gpt4all-chat/icons/logo-512.png
new file mode 100644
index 00000000..4f70fbb4
Binary files /dev/null and b/gpt4all-chat/icons/logo-512.png differ
diff --git a/gpt4all-chat/icons/logo-64.png b/gpt4all-chat/icons/logo-64.png
new file mode 100644
index 00000000..fbffe619
Binary files /dev/null and b/gpt4all-chat/icons/logo-64.png differ
diff --git a/gpt4all-chat/icons/logo.svg b/gpt4all-chat/icons/logo.svg
new file mode 100644
index 00000000..e7084ec0
--- /dev/null
+++ b/gpt4all-chat/icons/logo.svg
@@ -0,0 +1,14 @@
+
diff --git a/gpt4all-chat/icons/network.svg b/gpt4all-chat/icons/network.svg
new file mode 100644
index 00000000..266f13d6
--- /dev/null
+++ b/gpt4all-chat/icons/network.svg
@@ -0,0 +1 @@
+
diff --git a/gpt4all-chat/icons/regenerate.svg b/gpt4all-chat/icons/regenerate.svg
new file mode 100644
index 00000000..016e6a52
--- /dev/null
+++ b/gpt4all-chat/icons/regenerate.svg
@@ -0,0 +1 @@
+
diff --git a/gpt4all-chat/icons/send_message.svg b/gpt4all-chat/icons/send_message.svg
new file mode 100644
index 00000000..d8650b66
--- /dev/null
+++ b/gpt4all-chat/icons/send_message.svg
@@ -0,0 +1 @@
+
diff --git a/gpt4all-chat/icons/settings.svg b/gpt4all-chat/icons/settings.svg
new file mode 100644
index 00000000..7542ea62
--- /dev/null
+++ b/gpt4all-chat/icons/settings.svg
@@ -0,0 +1,46 @@
+
+
diff --git a/gpt4all-chat/icons/stop_generating.svg b/gpt4all-chat/icons/stop_generating.svg
new file mode 100644
index 00000000..c627ac0e
--- /dev/null
+++ b/gpt4all-chat/icons/stop_generating.svg
@@ -0,0 +1 @@
+
diff --git a/gpt4all-chat/icons/thumbs_down.svg b/gpt4all-chat/icons/thumbs_down.svg
new file mode 100644
index 00000000..b01a82d3
--- /dev/null
+++ b/gpt4all-chat/icons/thumbs_down.svg
@@ -0,0 +1,5 @@
+
+
diff --git a/gpt4all-chat/icons/thumbs_up.svg b/gpt4all-chat/icons/thumbs_up.svg
new file mode 100644
index 00000000..cd5efcd2
--- /dev/null
+++ b/gpt4all-chat/icons/thumbs_up.svg
@@ -0,0 +1,5 @@
+
+
diff --git a/gpt4all-chat/icons/trash.svg b/gpt4all-chat/icons/trash.svg
new file mode 100644
index 00000000..b7c1a141
--- /dev/null
+++ b/gpt4all-chat/icons/trash.svg
@@ -0,0 +1,5 @@
+
+
diff --git a/gpt4all-chat/llm.cpp b/gpt4all-chat/llm.cpp
new file mode 100644
index 00000000..e94c461b
--- /dev/null
+++ b/gpt4all-chat/llm.cpp
@@ -0,0 +1,79 @@
+#include "llm.h"
+#include "config.h"
+#include "download.h"
+#include "network.h"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+class MyLLM: public LLM { };
+Q_GLOBAL_STATIC(MyLLM, llmInstance)
+LLM *LLM::globalInstance()
+{
+ return llmInstance();
+}
+
+LLM::LLM()
+ : QObject{nullptr}
+ , m_chatListModel(new ChatListModel(this))
+ , m_threadCount(std::min(4, (int32_t) std::thread::hardware_concurrency()))
+ , m_compatHardware(true)
+{
+ connect(QCoreApplication::instance(), &QCoreApplication::aboutToQuit,
+ this, &LLM::aboutToQuit);
+
+#if defined(__x86_64__) || defined(__i386__)
+ if (QString(GPT4ALL_AVX_ONLY) == "OFF") {
+ const bool avx(__builtin_cpu_supports("avx"));
+ const bool avx2(__builtin_cpu_supports("avx2"));
+ const bool fma(__builtin_cpu_supports("fma"));
+ m_compatHardware = avx && avx2 && fma;
+ emit compatHardwareChanged();
+ }
+#endif
+}
+
+bool LLM::checkForUpdates() const
+{
+ Network::globalInstance()->sendCheckForUpdates();
+
+#if defined(Q_OS_LINUX)
+ QString tool("maintenancetool");
+#elif defined(Q_OS_WINDOWS)
+ QString tool("maintenancetool.exe");
+#elif defined(Q_OS_DARWIN)
+ QString tool("../../../maintenancetool.app/Contents/MacOS/maintenancetool");
+#endif
+
+ QString fileName = QCoreApplication::applicationDirPath()
+ + "/../" + tool;
+ if (!QFileInfo::exists(fileName)) {
+ qDebug() << "Couldn't find tool at" << fileName << "so cannot check for updates!";
+ return false;
+ }
+
+ return QProcess::startDetached(fileName);
+}
+
+int32_t LLM::threadCount() const
+{
+ return m_threadCount;
+}
+
+void LLM::setThreadCount(int32_t n_threads)
+{
+ if (n_threads <= 0)
+ n_threads = std::min(4, (int32_t) std::thread::hardware_concurrency());
+ m_threadCount = n_threads;
+ emit threadCountChanged();
+}
+
+void LLM::aboutToQuit()
+{
+ m_chatListModel->saveChats();
+}
diff --git a/gpt4all-chat/llm.h b/gpt4all-chat/llm.h
new file mode 100644
index 00000000..ac12981d
--- /dev/null
+++ b/gpt4all-chat/llm.h
@@ -0,0 +1,44 @@
+#ifndef LLM_H
+#define LLM_H
+
+#include
+
+#include "chatlistmodel.h"
+
+class LLM : public QObject
+{
+ Q_OBJECT
+ Q_PROPERTY(ChatListModel *chatListModel READ chatListModel NOTIFY chatListModelChanged)
+ Q_PROPERTY(int32_t threadCount READ threadCount WRITE setThreadCount NOTIFY threadCountChanged)
+ Q_PROPERTY(bool compatHardware READ compatHardware NOTIFY compatHardwareChanged)
+
+public:
+ static LLM *globalInstance();
+
+ ChatListModel *chatListModel() const { return m_chatListModel; }
+ int32_t threadCount() const;
+ void setThreadCount(int32_t n_threads);
+ bool compatHardware() const { return m_compatHardware; }
+
+ Q_INVOKABLE bool checkForUpdates() const;
+
+Q_SIGNALS:
+ void chatListModelChanged();
+ void threadCountChanged();
+ void compatHardwareChanged();
+
+private Q_SLOTS:
+ void aboutToQuit();
+
+private:
+ ChatListModel *m_chatListModel;
+ int32_t m_threadCount;
+ bool m_compatHardware;
+
+private:
+ explicit LLM();
+ ~LLM() {}
+ friend class MyLLM;
+};
+
+#endif // LLM_H
diff --git a/gpt4all-chat/llmodel/CMakeLists.txt b/gpt4all-chat/llmodel/CMakeLists.txt
new file mode 100644
index 00000000..704faccc
--- /dev/null
+++ b/gpt4all-chat/llmodel/CMakeLists.txt
@@ -0,0 +1,47 @@
+cmake_minimum_required(VERSION 3.16)
+
+if(APPLE)
+ option(BUILD_UNIVERSAL "Build a Universal binary on macOS" ON)
+ if(BUILD_UNIVERSAL)
+ # Build a Universal binary on macOS
+ # This requires that the found Qt library is compiled as Universal binaries.
+ set(CMAKE_OSX_ARCHITECTURES "arm64;x86_64" CACHE STRING "" FORCE)
+ else()
+ # Build for the host architecture on macOS
+ set(CMAKE_OSX_ARCHITECTURES "${CMAKE_HOST_SYSTEM_PROCESSOR}" CACHE STRING "" FORCE)
+ endif()
+endif()
+
+# Include the binary directory for the generated header file
+include_directories("${CMAKE_CURRENT_BINARY_DIR}")
+
+project(llmodel VERSION ${APP_VERSION} LANGUAGES CXX C)
+
+set(CMAKE_CXX_STANDARD_REQUIRED ON)
+
+set(LLAMA_BUILD_EXAMPLES ON CACHE BOOL "llama: build examples" FORCE)
+set(BUILD_SHARED_LIBS ON FORCE)
+
+set(CMAKE_VERBOSE_MAKEFILE ON)
+if (GPT4ALL_AVX_ONLY)
+ set(LLAMA_AVX2 OFF CACHE BOOL "llama: enable AVX2" FORCE)
+ set(LLAMA_F16C OFF CACHE BOOL "llama: enable F16C" FORCE)
+ set(LLAMA_FMA OFF CACHE BOOL "llama: enable FMA" FORCE)
+endif()
+
+add_subdirectory(llama.cpp)
+
+add_library(llmodel
+ gptj.h gptj.cpp
+ llamamodel.h llamamodel.cpp
+ llama.cpp/examples/common.cpp
+ llmodel.h llmodel_c.h llmodel_c.cpp
+ mpt.h mpt.cpp
+ utils.h utils.cpp
+)
+
+target_link_libraries(llmodel
+ PRIVATE llama)
+
+set(COMPONENT_NAME_MAIN ${PROJECT_NAME})
+set(CMAKE_INSTALL_PREFIX ${CMAKE_BINARY_DIR}/install)
diff --git a/gpt4all-chat/llmodel/gptj.cpp b/gpt4all-chat/llmodel/gptj.cpp
new file mode 100644
index 00000000..837fd1f8
--- /dev/null
+++ b/gpt4all-chat/llmodel/gptj.cpp
@@ -0,0 +1,1102 @@
+#include "gptj.h"
+#include "llama.cpp/ggml.h"
+
+#include "utils.h"
+
+#include
+#include
+#include
+#include
+#include
+#include