[{"data":1,"prerenderedAt":366},["ShallowReactive",2],{"$fgukOamtKU1RtUiMFsqdObttmqPPQz0uc7bl_gj_LyX0":3,"$fiV-5n_5aw-0fuMLPWVBTOQnqp72b9aNLqAH5Gr_Q7iM":245,"article-9":365},{"code":4,"msg":5,"data":6},0,"",{"category":7,"tag":11,"popular":19,"latest":86,"banner":126,"list":151,"cache":244},[8,9,10],"Agent","OpenAI","LLM",[8,12,13,14,9,10,15,16,17,18],"Google","Nvidia","Claude","DeepSeek","OCR","Chat","Generator",[20,29,37,45,54,62,70,79],{"id":21,"publish_date":22,"is_original":23,"collection":5,"cover_url":24,"cover_url_1_1":25,"title":26,"summary":27,"author":28},411,"2023-09-10",1,"article_res/cover/451ef50c225a8dc61c4336506794d13b.jpeg","article_res/cover/3ba9dc7a72f87d40b20fc2d225289ee3.jpeg","Idealism","Reality is created by the mind, we can change our reality by changing our mind. - Plato","Renee's Entrepreneurial Journey",{"id":30,"publish_date":31,"is_original":23,"collection":32,"cover_url":33,"cover_url_1_1":34,"title":35,"summary":36,"author":28},108,"2024-12-07","#LLM #AGI #AI Agent","article_res/cover/0039044422e4ec9f61c18e8ee1693bb0.jpeg","article_res/cover/4220971b108a91d21407d87bb02fbaa6.jpeg","Freysa.ai: The World's First Adversarial AI Agent Game","说服 Freysa 把钱包里的钱都拿出来",{"id":38,"publish_date":39,"is_original":23,"collection":40,"cover_url":41,"cover_url_1_1":42,"title":43,"summary":44,"author":28},12,"2025-03-09","#Oxford #Reasoning #LLM #Tool Use","article_res/cover/d448e9b3617a0b5302e1bd10c438bca9.jpeg","article_res/cover/864a468f9cc4c9317efadb3811909888.jpeg","Agentic Reasoning Framework - Significantly enhance the reasoning ability of LLMs through the integration of external tools using agents","Agentic Reasoning: Reasoning LLMs with Tools for Deep Research",{"id":46,"publish_date":47,"is_original":4,"collection":48,"cover_url":49,"cover_url_1_1":50,"title":51,"summary":52,"author":53},480,"2023-04-14","#Stable Diffusion","article_res/cover/0bdbe7cb1de4a78e54536e5d9afa7ec9.jpeg","article_res/cover/b3d6ffec0608dcfaf18c5a69906d1490.jpeg","【AIGC Learning】Generate Prompts Using Word Graphs - Stable Diffusion Web UI Series 13","AI will become a powerful tool in education, transforming the way we learn and deliver instruction.  \n- Reid Hoffman","--",{"id":55,"publish_date":56,"is_original":4,"collection":57,"cover_url":58,"cover_url_1_1":59,"title":60,"summary":61,"author":28},413,"2023-09-08","#Neuroscience","article_res/cover/74f8302d78a23d9430f22171eae136b6.jpeg","article_res/cover/87ca08af81bb304746be5261160964c0.jpeg","Can machines be conscious?","Do we have an ethical obligation to not turn off conscious machines? Would turning them off be murder? No. I don't lose any sleep over unplugging a conscious machine.\n- Jeff Hawkins, \"A Thousand Brains\"",{"id":63,"publish_date":64,"is_original":23,"collection":65,"cover_url":66,"cover_url_1_1":67,"title":68,"summary":69,"author":28},178,"2024-09-09","#Entrepreneurship","article_res/cover/a7224f025b55d1820408085faef63079.jpeg","article_res/cover/11a9995b096cbf64465ef01b8673b154.jpeg","37signals company","This damn sense of relaxation",{"id":71,"publish_date":72,"is_original":4,"collection":73,"cover_url":74,"cover_url_1_1":75,"title":76,"summary":77,"author":78},460,"2023-05-12","#Google","article_res/cover/b970687b12faa52da976f91248c2aa7b.jpeg","article_res/cover/d1e71b52cfd2c63bc6e71f3e85ff135c.jpeg","Learn what BRC-20 and Ordinals are using Google Bard","Ordinals - a new protocol that allows users to store arbitrary data on the Bitcoin blockchain","Google Bard mainly writes",{"id":80,"publish_date":81,"is_original":23,"collection":5,"cover_url":82,"cover_url_1_1":83,"title":84,"summary":85,"author":28},309,"2024-03-26","article_res/cover/9877f95894ee88532d0e6012c23a2df3.jpeg","article_res/cover/20092164ddc109ce6ae56b1984246751.jpeg","Learning the Cancun Upgrade with lepton and perplexity","Building a quick conversation-based search demo with Lepton AI.",[87,95,103,111,119],{"id":88,"publish_date":89,"is_original":23,"collection":90,"cover_url":91,"cover_url_1_1":92,"title":93,"summary":94,"author":28},627,"2025-03-20","#AI Avatar #AI Video Generation","article_res/cover/d95481358f73924989f8c4ee9c75d1c8.jpeg","article_res/cover/b74bc0fab01f8b6a6aa87696c0c3ed8b.jpeg","DisPose: Generating Animated Videos by Driving Video with Reference Images","DisPose is a controllable human image animation method that enhances video generation.",{"id":96,"publish_date":97,"is_original":23,"collection":98,"cover_url":99,"cover_url_1_1":100,"title":101,"summary":102,"author":28},626,"2025-03-21","#Deep Dive into LLMs #LLM #RL #Andrej Karpathy #AlphaGo","article_res/cover/446553a5c8f8f2f07d97b20eaee84e56.jpeg","article_res/cover/e6c2823409c9b34624064b9acbaca6f1.jpeg","AlphaGo and the Power of Reinforcement Learning - Andrej Karpathy's Deep Dive on LLMs (Part 9)","Simply learning from humans will never surpass human capabilities.",{"id":104,"publish_date":105,"is_original":23,"collection":106,"cover_url":107,"cover_url_1_1":108,"title":109,"summary":110,"author":28},625,"2025-03-22","#Deep Dive into LLMs #LLM #RL #RLHF #Andrej Karpathy","article_res/cover/8da81d38b1e5cf558a164710fd8a5389.jpeg","article_res/cover/96f028d76c362a99a0dd56389e8f7a9b.jpeg","Reinforcement Learning from Human Feedback (RLHF) - Andrej Karpathy's Deep Dive on LLMs (Part 10)","Fine-Tuning Language Models from Human Preferences",{"id":112,"publish_date":113,"is_original":23,"collection":114,"cover_url":115,"cover_url_1_1":116,"title":117,"summary":118,"author":28},624,"2025-03-23","#Deep Dive into LLMs #LLM #Andrej Karpathy #AI Agent #MMM","article_res/cover/a5e7c3d48bb09109684d6513287c661d.jpeg","article_res/cover/d3f22b7c0ab8d82fd2da457a299e0773.jpeg","The Future of Large Language Models - Andrej Karpathy's In-Depth Explanation of LLM (Part 11)","preview of things to come",{"id":120,"publish_date":113,"is_original":23,"collection":121,"cover_url":122,"cover_url_1_1":123,"title":124,"summary":125,"author":28},623,"#Google #Voe #AI Video Generation","article_res/cover/c44062fea0f336c2b96b3928292392c2.jpeg","article_res/cover/a041041c69092ad3db191c5bf3ff981b.jpeg","Trial of Google's video generation model VOE2","Our state-of-the-art video generation model",[127,135,143],{"id":128,"publish_date":129,"is_original":23,"collection":130,"cover_url":131,"cover_url_1_1":132,"title":133,"summary":134,"author":28},300,"2024-04-16","#AI in Science #AGI","article_res/cover/6bf01e793e0f33e848572412eebdf9b0.jpeg","article_res/cover/91a5ee21dafecb914fabeb9430d46ec1.jpeg","Would Einstein lose his job - AI and Quantum Computing: A Glimpse into the Near Future","So Einstein's job is still safe.",{"id":136,"publish_date":137,"is_original":23,"collection":138,"cover_url":139,"cover_url_1_1":140,"title":141,"summary":142,"author":28},101,"2024-12-14","#Nvidia #AI 3D Generator","article_res/cover/693e07c85980c5c0c8fde3f037733f23.jpeg","article_res/cover/9ea8edff2d5d303ff3fffff3f6f9c3d9.jpeg","NVIDIA's open-source 3D project LLaMA-Mesh","LLaMA-Mesh: Unifying 3D Mesh Generation with Language Models",{"id":144,"publish_date":145,"is_original":23,"collection":146,"cover_url":147,"cover_url_1_1":148,"title":149,"summary":150,"author":28},131,"2024-11-10","#OpenAI","article_res/cover/87f8ed353ce39f31960e7cdfaf075a35.jpeg","article_res/cover/f597a63935f5cd32e484b4aadd6019e8.jpeg","ChatGPT has launched the Search function","Get fast, timely answers with links to relevant web sources.",{"big":152,"small":214},[153,181],{"title":154,"list":155},"AGENT",[156,157,165,173],{"id":112,"publish_date":113,"is_original":23,"collection":114,"cover_url":115,"cover_url_1_1":116,"title":117,"summary":118,"author":28},{"id":158,"publish_date":159,"is_original":23,"collection":160,"cover_url":161,"cover_url_1_1":162,"title":163,"summary":164,"author":28},622,"2025-03-24","#OWL #AI Agent #MAS #MCP #CUA","article_res/cover/cb50ca7f2bf4d1ed50202d7406e1c19a.jpeg","article_res/cover/4aa7aa3badfacf3cc84121334f1050dd.jpeg","OWL: Multi-agent collaboration","OWL: Optimized Workforce Learning for General Multi-Agent Assistance in Real-World Task Automation",{"id":166,"publish_date":167,"is_original":23,"collection":168,"cover_url":169,"cover_url_1_1":170,"title":171,"summary":172,"author":28},620,"2025-03-26","#LLM #Google #Gemini #AI Agent","article_res/cover/53751a6dbbe990b1eb0b63f3b062aed4.jpeg","article_res/cover/031344981f0a212ff82d1f3a64aa5756.jpeg","Gemini 2.5 Pro, claimed to be far ahead of the competition, has been released with great fanfare: comprehensively surpassing other LLMs and topping the global rankings","Gemini 2.5: Our most intelligent AI model",{"id":174,"publish_date":175,"is_original":23,"collection":176,"cover_url":177,"cover_url_1_1":178,"title":179,"summary":180,"author":28},616,"2025-03-29","#MAS #AI Agent #AI Coder #MetaGPT #MGX","article_res/cover/9dcd702ad2035902e5e77967c34a1f1e.jpeg","article_res/cover/0a97fc4a922753c8f46ff38792020df8.jpeg","MGX - An automated website-building platform composed of multiple AI Agents","Your 24/7 AI Team | Dream, Chat, Create.",{"title":182,"list":183},"OPENAI",[184,191,199,206],{"id":185,"publish_date":167,"is_original":23,"collection":186,"cover_url":187,"cover_url_1_1":188,"title":189,"summary":190,"author":28},619,"#OpenAI #AI Image Generator #4o #MMM #AR Transformer","article_res/cover/2faffc97fcecf3151552cb0fd3206d89.jpeg","article_res/cover/1133cb4948af44cee2e7fbe79efb69e5.jpeg","The native image function of GPT-4o is officially launched","Introducing 4o Image Generation",{"id":192,"publish_date":193,"is_original":4,"collection":194,"cover_url":195,"cover_url_1_1":196,"title":197,"summary":198,"author":28},434,"2023-07-15","#Anthropic #OpenAI #Google #AI Code Generator #Claude","article_res/cover/e1b6f600a2b9f262a4392684e5f2ce25.jpeg","article_res/cover/6e1772e83f78f9a351ab23d3e414adee.jpeg","Latest Updates on Google Bard /Anthropic Claude2 / ChatGPT Code Interpreter","We want our models to use their programming skills to provide more natural interfaces to the basic functions of our computers.  \n - OpenAI",{"id":200,"publish_date":201,"is_original":4,"collection":146,"cover_url":202,"cover_url_1_1":203,"title":204,"summary":205,"author":28},417,"2023-08-24","article_res/cover/bccf897d50a88b18364e35f7466387e0.jpeg","article_res/cover/2f871085c1073717c1703ae86e18056f.jpeg","The GPT-3.5 Turbo fine-tuning (fine-tuning function) has been released～","Developers can now bring their own data to customize GPT-3.5 Turbo for their use cases.",{"id":207,"publish_date":208,"is_original":4,"collection":209,"cover_url":210,"cover_url_1_1":211,"title":212,"summary":213,"author":28},407,"2023-09-22","#OpenAI #AI Image Generator","article_res/cover/c59005e903d35cfc32346e2756e2728a.jpeg","article_res/cover/ba011d265e6d84b5c8cb6fd6b757b6cc.jpeg","Dall-E 3","DALL·E 3 understands significantly more nuance and detail, allowing you to easily translate your ideas into images.",[215,221,241],{"title":10,"list":216},[217,218,219,220],{"id":96,"publish_date":97,"is_original":23,"collection":98,"cover_url":99,"cover_url_1_1":100,"title":101,"summary":102,"author":28},{"id":104,"publish_date":105,"is_original":23,"collection":106,"cover_url":107,"cover_url_1_1":108,"title":109,"summary":110,"author":28},{"id":112,"publish_date":113,"is_original":23,"collection":114,"cover_url":115,"cover_url_1_1":116,"title":117,"summary":118,"author":28},{"id":166,"publish_date":167,"is_original":23,"collection":168,"cover_url":169,"cover_url_1_1":170,"title":171,"summary":172,"author":28},{"title":222,"list":223},"GOOGLE",[224,225,226,234],{"id":120,"publish_date":113,"is_original":23,"collection":121,"cover_url":122,"cover_url_1_1":123,"title":124,"summary":125,"author":28},{"id":166,"publish_date":167,"is_original":23,"collection":168,"cover_url":169,"cover_url_1_1":170,"title":171,"summary":172,"author":28},{"id":227,"publish_date":228,"is_original":23,"collection":229,"cover_url":230,"cover_url_1_1":231,"title":232,"summary":233,"author":28},615,"2025-03-30","#AI Researcher #AI Science #HKU #Google #AI Agent","article_res/cover/21fadf906067714bb0db31ae13a77c15.jpeg","article_res/cover/2697999a72bd26b22e85f0e92936d3ed.jpeg","AI-Researcher: LLM-driven全自动 scientific research assistant","AI-Researcher: Fully-Automated Scientific Discovery with LLM Agents  \nOpen-Sourced Alternative to Google AI Co-Scientist",{"id":235,"publish_date":236,"is_original":23,"collection":73,"cover_url":237,"cover_url_1_1":238,"title":239,"summary":240,"author":28},463,"2023-05-09","article_res/cover/89800f207723acdb55fc53bf999ebdc9.jpeg","article_res/cover/5764f369b4accd8f83e94aa4c077a175.jpeg","The Smallville sandbox world - A town with 25 virtual residents","Believable proxies of human behavior can empower interactive apps: Immersive environment, Rehearsal space, Prototyping tool",{"title":242,"list":243},"NVIDIA",[],true,{"code":4,"msg":5,"data":246},{"id":247,"publish_date":248,"is_original":23,"collection":249,"articles_id":250,"cover_url":251,"cover_url_1_1":252,"title":253,"summary":254,"author":28,"content":255,"popular":256,"list":320,"category":363,"tag":364},9,"2025-03-12","#Mistral #OCR #le Chat","lgdAuZR_eIjuEKuwnH9vvA","article_res/cover/301cdb65caf72bf97a07bae7506126be.jpeg","article_res/cover/9a111631ee196a9fab79ebe6bf35ed1d.jpeg","French AI company Mistral has launched Mistral OCR.","Introducing the world’s best document understanding API.","\u003Cdiv class=\"rich_media_content js_underline_content\n                       autoTypeSetting24psection\n            \" id=\"js_content\">\u003Cp style='box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: normal;text-align: left;text-indent: 0px;padding: 8px 0px;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;font-weight: 400;orphans: 2;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan leaf=\"\">Last week, French AI company Mistral launched Mistral OCR.\u003C/span>\u003C/p>\u003Cp style='box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: normal;text-align: left;text-indent: 0px;padding: 8px 0px;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;font-weight: 400;orphans: 2;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan leaf=\"\">Mistral OCR is a document understanding optical character recognition API of an entirely new standard. Unlike traditional OCR tools, Mistral OCR can identify and understand various elements within documents with unprecedented accuracy, including images, text, tables, and complex content such as mathematical equations. It supports input in the form of images and PDF files, and outputs organized text and image content.\u003C/span>\u003C/p>\u003Csection nodeleaf=\"\">\u003Cdiv style=\"height: 508px; background: rgb(0, 0, 0); border-radius: 4px; overflow: hidden; margin-bottom: 12px;\">\u003Cvideo src=\"./assets/17423769603850.625962280882244.mp4\" poster=\"./assets/17423769601840.6621448464721325.jpeg\" controls=\"\" style=\"width: 100%;height: 100%;\">\u003C/video>\u003C/div>\u003C/section>\u003Cp style='box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: normal;text-align: left;text-indent: 0px;padding: 8px 0px;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;font-weight: 400;orphans: 2;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan leaf=\"\">Mistral OCR is particularly well-suited for integration with Retrieval-Augmented Generation (RAG) systems to process multimodal complex documents, such as slideshows or PDFs with rich content.\u003C/span>\u003C/p>\u003Cp style='box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: normal;text-align: left;text-indent: 0px;padding: 8px 0px;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;font-weight: 400;orphans: 2;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan leaf=\"\">Mistral has set Mistral OCR as the default document understanding model for millions of users on the Le Chat platform. The newly launched API (mistral-ocr-latest) is priced at approximately 1000 pages per dollar (about twice the efficiency under batch inference conditions).\u003C/span>\u003C/p>\u003Csection style=\"text-align: center;\" nodeleaf=\"\">\u003Cimg class=\"rich_pages wxw-img js_insertlocalimg\" data-ratio=\"1.3306772908366533\" data-s=\"300,640\" data-type=\"png\" data-w=\"753\" type=\"block\" data-imgfileid=\"100010841\" src=\"./assets/17423769605630.6991246618646512.png\">\u003C/section>\u003Csection style=\"text-align: center;\" nodeleaf=\"\">\u003Cimg class=\"rich_pages wxw-img js_insertlocalimg\" data-ratio=\"1.2811244979919678\" data-s=\"300,640\" data-type=\"png\" data-w=\"747\" type=\"block\" data-imgfileid=\"100010843\" src=\"./assets/17423769607460.09311490061446293.png\">\u003C/section>\u003Csection style=\"text-align: center;\" nodeleaf=\"\">\u003Cimg class=\"rich_pages wxw-img js_insertlocalimg\" data-ratio=\"1.3221024258760108\" data-s=\"300,640\" data-type=\"png\" data-w=\"742\" type=\"block\" data-imgfileid=\"100010842\" src=\"./assets/17423769605700.07457590187849195.png\">\u003C/section>\u003Csection style=\"text-align: center;\" nodeleaf=\"\">\u003Cimg class=\"rich_pages wxw-img js_insertlocalimg\" data-ratio=\"1.3194630872483222\" data-s=\"300,640\" data-type=\"png\" data-w=\"745\" type=\"block\" data-imgfileid=\"100010845\" src=\"./assets/17423769608430.5121824697078954.png\">\u003C/section>\u003Csection style=\"text-align: center;\" nodeleaf=\"\">\u003Cimg class=\"rich_pages wxw-img js_insertlocalimg\" data-ratio=\"1.2348178137651822\" data-s=\"300,640\" data-type=\"png\" data-w=\"741\" type=\"block\" data-imgfileid=\"100010844\" src=\"./assets/17423769623880.7746578171734559.png\">\u003C/section>\u003Ch3 style='box-sizing: border-box;margin: 30px 0px 15px;color: rgba(0, 0, 0, 0.85);font-weight: 500;cursor: pointer;padding: 0px;display: block;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;letter-spacing: normal;orphans: 2;text-align: left;text-indent: 0px;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan style=\"box-sizing: border-box;cursor: pointer;font-size: 20px;color: rgb(0, 0, 0);line-height: 1.5em;letter-spacing: 0em;text-align: left;font-weight: bold;display: block;\">\u003Cspan leaf=\"\">Mistral OCR Highlights\u003C/span>\u003C/span>\u003C/h3>\u003Cul style='box-sizing: border-box;margin: 8px 0px;cursor: pointer;list-style-type: disc;padding: 0px 0px 0px 25px;color: rgb(0, 0, 0);font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-size: 16px;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;font-weight: 400;letter-spacing: normal;orphans: 2;text-align: left;text-indent: 0px;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;' class=\"list-paddingleft-1\">\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cp style=\"box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;text-indent: 0em;padding: 8px 0px;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Outstanding complex document understanding capability\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">\u003Cbr>\u003C/span>\u003Cspan leaf=\"\">Mistral OCR has a strong understanding of documents containing complex elements such as interwoven images, mathematical expressions, tables, and advanced typesetting (such as LaTeX), especially suitable for scientific papers rich in charts, formulas, and images.\u003C/span>\u003C/p>\u003C/section>\u003C/li>\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cp style=\"box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;text-indent: 0em;padding: 8px 0px;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Native support for multi-language and multi-modal content\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">\u003Cbr>\u003C/span>\u003Cspan leaf=\"\">Mistral OCR stands out in its multilingual processing capabilities, accurately parsing thousands of fonts and languages from all continents around the world, making it particularly suitable for global organizations and enterprises with local characteristics.\u003C/span>\u003C/p>\u003C/section>\u003C/li>\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cp style=\"box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;text-indent: 0em;padding: 8px 0px;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Top-tier performance benchmark\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">\u003Cbr>\u003C/span>\u003Cspan leaf=\"\">In rigorous benchmarking, Mistral OCR consistently outperforms other leading OCR models. It excels in overall performance, mathematical content recognition, multilingual support, scanned document recognition, and table recognition.\u003C/span>\u003C/p>\u003C/section>\u003C/li>\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cp style=\"box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;text-indent: 0em;padding: 8px 0px;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Fastest in its class\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">\u003Cbr>\u003C/span>\u003Cspan leaf=\"\">The Mistral OCR model is lightweight and processes at a speed far exceeding similar products, with a single node capable of handling up to 2000 pages per minute, making it especially suitable for high-throughput application scenarios.\u003C/span>\u003C/p>\u003C/section>\u003C/li>\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cp style=\"box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;text-indent: 0em;padding: 8px 0px;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Documents as prompts, structured output\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">\u003Cbr>\u003C/span>\u003Cspan leaf=\"\">Mistral OCR introduces a new approach where documents serve as prompts, enabling users to extract and structure document information more accurately, such as in JSON format, which can be used to build more advanced automation workflows.\u003C/span>\u003C/p>\u003C/section>\u003C/li>\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cp style=\"box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;text-indent: 0em;padding: 8px 0px;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Selective local deployment to meet sensitive or confidential data requirements\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">\u003Cbr>\u003C/span>\u003Cspan leaf=\"\">For institutions with high data privacy requirements, Mistral OCR offers a selective self-deployment option to ensure the security of sensitive or confidential information.\u003C/span>\u003C/p>\u003C/section>\u003C/li>\u003C/ul>\u003Ch3 style='box-sizing: border-box;margin: 30px 0px 15px;color: rgba(0, 0, 0, 0.85);font-weight: 500;cursor: pointer;padding: 0px;display: block;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;letter-spacing: normal;orphans: 2;text-align: left;text-indent: 0px;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan style=\"box-sizing: border-box;cursor: pointer;font-size: 20px;color: rgb(0, 0, 0);line-height: 1.5em;letter-spacing: 0em;text-align: left;font-weight: bold;display: block;\">\u003Cspan leaf=\"\">Use cases\u003C/span>\u003C/span>\u003C/h3>\u003Cul style='box-sizing: border-box;margin: 8px 0px;cursor: pointer;list-style-type: disc;padding: 0px 0px 0px 25px;color: rgb(0, 0, 0);font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-size: 16px;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;font-weight: 400;letter-spacing: normal;orphans: 2;text-align: left;text-indent: 0px;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;' class=\"list-paddingleft-1\">\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Digitization of scientific research\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">: Multiple research institutions use Mistral OCR to convert scientific papers and journals into AI-readable formats, enhancing the efficiency of scientific research collaboration.\u003C/span>\u003C/section>\u003C/li>\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Heritage and Cultural Relics Protection\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">: Cultural protection organizations and non-profit institutions use Mistral OCR to digitize historical documents, expanding their reach.\u003C/span>\u003C/section>\u003C/li>\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Optimizing Customer Service\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">: Customer service departments use Mistral OCR to convert manuals and documents into searchable knowledge bases, improving response times.\u003C/span>\u003C/section>\u003C/li>\u003Cli style=\"box-sizing: border-box;cursor: pointer;\">\u003Csection style=\"box-sizing: border-box;cursor: pointer;margin-top: 5px;margin-bottom: 5px;color: rgb(1, 1, 1);font-size: 16px;line-height: 1.8em;letter-spacing: 0em;text-align: left;font-weight: normal;\">\u003Cstrong style=\"box-sizing: border-box;font-weight: bold;cursor: pointer;color: rgb(0, 0, 0);background: none 0% 0% / auto no-repeat scroll padding-box border-box rgba(0, 0, 0, 0);width: auto;height: auto;margin: 0px;padding: 0px;border-style: none;border-width: 3px;border-color: rgba(0, 0, 0, 0.4);border-radius: 0px;\">\u003Cspan leaf=\"\">Technical Document Conversion\u003C/span>\u003C/strong>\u003Cspan leaf=\"\">：Mistral OCR helps companies convert technical literature, engineering drawings, educational notes, presentations, and legal documents into AI-ready formats, enhancing the efficiency of document processing.\u003C/span>\u003C/section>\u003C/li>\u003C/ul>\u003Ch3 style='box-sizing: border-box;margin: 30px 0px 15px;color: rgba(0, 0, 0, 0.85);font-weight: 500;cursor: pointer;padding: 0px;display: block;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;letter-spacing: normal;orphans: 2;text-align: left;text-indent: 0px;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan style=\"box-sizing: border-box;cursor: pointer;font-size: 20px;color: rgb(0, 0, 0);line-height: 1.5em;letter-spacing: 0em;text-align: left;font-weight: bold;display: block;\">\u003Cspan leaf=\"\">Outstanding performance\u003C/span>\u003C/span>\u003C/h3>\u003Cp style='box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: normal;text-align: left;text-indent: 0px;padding: 8px 0px;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;font-weight: 400;orphans: 2;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan leaf=\"\">In rigorous benchmark testing, Mistral OCR consistently outperforms other leading OCR models. For a fair comparison, we selected mainstream products such as Google Document AI, Microsoft Azure OCR, Gemini, and GPT-4o for contrast. The test data covered mathematical formulas, multilingual content, scanned documents, and complex tables. The test documents included various PDF and image formats commonly found on the web. The evaluation results showed that Mistral OCR excelled in overall performance, math recognition, multilingual support, scanned document recognition, and table recognition, maintaining a leading position.\u003C/span>\u003C/p>\u003Csection style=\"text-align: center;\" nodeleaf=\"\">\u003Cimg class=\"rich_pages wxw-img js_insertlocalimg\" data-ratio=\"0.4595744680851064\" data-s=\"300,640\" data-type=\"png\" data-w=\"940\" type=\"block\" data-imgfileid=\"100010838\" src=\"./assets/17423769615800.5192448240294059.png\">\u003C/section>\u003Ch3 style='box-sizing: border-box;margin: 30px 0px 15px;color: rgba(0, 0, 0, 0.85);font-weight: 500;cursor: pointer;padding: 0px;display: block;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;letter-spacing: normal;orphans: 2;text-align: left;text-indent: 0px;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan style=\"box-sizing: border-box;cursor: pointer;font-size: 20px;color: rgb(0, 0, 0);line-height: 1.5em;letter-spacing: 0em;text-align: left;font-weight: bold;display: block;\">\u003Cspan leaf=\"\">Native multilingual support\u003C/span>\u003C/span>\u003C/h3>\u003Cp style='box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: normal;text-align: left;text-indent: 0px;padding: 8px 0px;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;font-weight: 400;orphans: 2;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan leaf=\"\">Since its inception, Mistral has been committed to serving global users by continuously optimizing and expanding the multilingual capabilities of our models. The newly launched Mistral OCR further enhances this ability, capable of efficiently parsing, understanding, and accurately transcribing thousands of scripts, fonts, and languages from all continents around the world.\u003C/span>\u003C/p>\u003Csection style=\"text-align: center;\" nodeleaf=\"\">\u003Cimg class=\"rich_pages wxw-img js_insertlocalimg\" data-ratio=\"0.2916224814422057\" data-s=\"300,640\" data-type=\"png\" data-w=\"943\" type=\"block\" data-imgfileid=\"100010839\" src=\"./assets/17423769615780.30899558766780055.png\">\u003C/section>\u003Cp style='box-sizing: border-box;margin: 0px;cursor: pointer;color: rgb(0, 0, 0);font-size: 16px;line-height: 1.8em;letter-spacing: normal;text-align: left;text-indent: 0px;padding: 8px 0px;font-family: Optima, \"Microsoft YaHei\", PingFangSC-regular, serif;font-style: normal;font-variant-ligatures: normal;font-variant-caps: normal;font-weight: 400;orphans: 2;text-transform: none;widows: 2;word-spacing: 0px;-webkit-text-stroke-width: 0px;white-space: normal;background-color: rgb(255, 255, 255);text-decoration-thickness: initial;text-decoration-style: initial;text-decoration-color: initial;'>\u003Cspan leaf=\"\">This all-round language support can not only help global enterprises deal with documents of different language backgrounds, but also provide efficient solutions for niche enterprises focusing on local markets.\u003C/span>\u003C/p>\u003Csection style=\"text-align: center;\" nodeleaf=\"\">\u003Cimg class=\"rich_pages wxw-img js_insertlocalimg\" data-ratio=\"0.6835978835978836\" data-s=\"300,640\" data-type=\"png\" data-w=\"945\" type=\"block\" data-imgfileid=\"100010840\" src=\"./assets/17423769616000.5060751784934496.png\">\u003C/section>\u003Cp style=\"display: none;\">\u003Cmp-style-type data-value=\"3\">\u003C/mp-style-type>\u003C/p>\u003C/div>",[257,266,274,283,290,298,306,313],{"id":258,"title_md5":259,"publish_date":260,"author_md5":261,"is_original":23,"collection":262,"summary_md5":263,"cover_url":264,"cover_url_1_1":265},94,"ffa2e96c575160abcd0981661eecf829","2024-12-21","bc27fa490c4d0d525bac812fc0793534","#Google #o1 #LLM #Gemini","04264bc4564cbf0b145278708eb687e6","article_res/cover/0978cc05f28f10330ab5289902cf6003.jpeg","article_res/cover/e13e8d924455d80dda7fa1120a61b2d8.jpeg",{"id":267,"title_md5":268,"publish_date":269,"author_md5":261,"is_original":23,"collection":270,"summary_md5":271,"cover_url":272,"cover_url_1_1":273},233,"4efbd5abe8d4cac8266f5d6501618146","2024-06-27","#Object Detection #Apple Intelligence","bec250298642b1685bf54fdfa7d0967b","article_res/cover/c53380fe34c81365ceb29c4392dbbf27.jpeg","article_res/cover/232a90109b8fbabb17fe5f7d23b338b2.jpeg",{"id":275,"title_md5":276,"publish_date":277,"author_md5":278,"is_original":4,"collection":279,"summary_md5":280,"cover_url":281,"cover_url_1_1":282},589,"50c40e81d66157f7dea2e41b12d5a145","2022-03-28","311a46cfdaa3afda544e9285644f70d7","#History","80393e12a1d91c41a73d2da08fe73002","article_res/cover/0542a11a661c0beab5c7f6f4c69fa430.jpeg","article_res/cover/a4173a2e5858eb51b7899361274f65d0.jpeg",{"id":284,"title_md5":285,"publish_date":286,"author_md5":261,"is_original":4,"collection":5,"summary_md5":287,"cover_url":288,"cover_url_1_1":289},359,"d312e13dacca8310353c1e384964a527","2023-12-27","8ff4b45263441aa0d94cf3a3a7a766de","article_res/cover/7d3e130faa46c143f26aa0c8505f0e3f.jpeg","article_res/cover/13b4fa8f31ded4fd7608c03aa6439f3c.jpeg",{"id":291,"title_md5":292,"publish_date":293,"author_md5":261,"is_original":23,"collection":294,"summary_md5":295,"cover_url":296,"cover_url_1_1":297},306,"5f19ad660109a98c9ca5535026ca01a0","2024-03-29","#AI Image Generator #Microsoft #LoRA","ec689c8e35f54bac86a6ff600c7962c7","article_res/cover/9e1700f6770672bbbcb97068b210e60e.jpeg","article_res/cover/472440aa68b9e44596e20e30df662484.jpeg",{"id":299,"title_md5":300,"publish_date":301,"author_md5":261,"is_original":23,"collection":302,"summary_md5":303,"cover_url":304,"cover_url_1_1":305},141,"e79a6baa2512b1620f36753623e34dc1","2024-10-28","#AI Video Generator","9e92fd7f65b5e95fdfd84816ba328287","article_res/cover/b61ffaa8fe3bda9386441383659cc1ee.jpeg","article_res/cover/dd4c01118caf3107ba8ed34ebe27a8c1.jpeg",{"id":307,"title_md5":308,"publish_date":309,"author_md5":261,"is_original":4,"collection":5,"summary_md5":310,"cover_url":311,"cover_url_1_1":312},374,"e199d037d9e0a146e5477da445c9b54b","2023-12-08","b6e7a71d163b8f71722708afdc2c943a","article_res/cover/f7c8858a8dbff747cd3955dcb59901ee.jpeg","article_res/cover/0cde3e4d28e238b5ff65eb8ea2ff6d4c.jpeg",{"id":314,"title_md5":315,"publish_date":316,"author_md5":261,"is_original":23,"collection":65,"summary_md5":317,"cover_url":318,"cover_url_1_1":319},180,"20a07cafdf9c3de70a291a867667df82","2024-09-06","9cc6b01a69f2de18e61d909603bd1cb4","article_res/cover/ce95c7068a7e2fba26c9090ada7750c4.jpeg","article_res/cover/a61e390482a0d4c927e46660137ab17d.jpeg",{"related":321,"small":348},[322,330,338,339,340],{"id":323,"publish_date":324,"is_original":23,"collection":325,"cover_url":326,"cover_url_1_1":327,"title":328,"summary":329,"author":28},38,"2025-02-15","#AI Audio Generator #Sony","article_res/cover/9162fca94aab5d1117b99c23c206dc87.jpeg","article_res/cover/c5731cc285b5bd55087f7243fc8bad46.jpeg","Sony's MMAudio model — generating sound effects for videos","MMAudio generates synchronized audio given video and/or text inputs.",{"id":331,"publish_date":332,"is_original":23,"collection":333,"cover_url":334,"cover_url_1_1":335,"title":336,"summary":337,"author":28},69,"2025-01-16","#OpenAI #LLM","article_res/cover/0ca2ea75ea4712a795992dc4232a9b0d.jpeg","article_res/cover/1c3c4a37beeeb80a3c288c1a168ad2e7.jpeg","ChatGPT launches a new feature: Scheduled Tasks, ushering in an automated future","Scheduled tasks in ChatGPT",{"id":120,"publish_date":113,"is_original":23,"collection":121,"cover_url":122,"cover_url_1_1":123,"title":124,"summary":125,"author":28},{"id":166,"publish_date":167,"is_original":23,"collection":168,"cover_url":169,"cover_url_1_1":170,"title":171,"summary":172,"author":28},{"id":341,"publish_date":342,"is_original":4,"collection":5,"cover_url":343,"cover_url_1_1":344,"title":345,"summary":346,"author":347},563,"2022-04-23","article_res/cover/3da28616745d4ce670a5cf6cd6b0ec1d.jpeg","article_res/cover/7679a257f47df7d71f36093076587e60.jpeg","GameFi Dashboard organization","This week, I translated an article introducing GameFi, and today I'll share some reports on GameFi. GameFi is known as a game with a financial core under its gaming appearance...","Learning and Sharing",[349,355,361],{"title":10,"list":350},[351,352,353,354],{"id":96,"publish_date":97,"is_original":23,"collection":98,"cover_url":99,"cover_url_1_1":100,"title":101,"summary":102,"author":28},{"id":104,"publish_date":105,"is_original":23,"collection":106,"cover_url":107,"cover_url_1_1":108,"title":109,"summary":110,"author":28},{"id":112,"publish_date":113,"is_original":23,"collection":114,"cover_url":115,"cover_url_1_1":116,"title":117,"summary":118,"author":28},{"id":166,"publish_date":167,"is_original":23,"collection":168,"cover_url":169,"cover_url_1_1":170,"title":171,"summary":172,"author":28},{"title":222,"list":356},[357,358,359,360],{"id":120,"publish_date":113,"is_original":23,"collection":121,"cover_url":122,"cover_url_1_1":123,"title":124,"summary":125,"author":28},{"id":166,"publish_date":167,"is_original":23,"collection":168,"cover_url":169,"cover_url_1_1":170,"title":171,"summary":172,"author":28},{"id":227,"publish_date":228,"is_original":23,"collection":229,"cover_url":230,"cover_url_1_1":231,"title":232,"summary":233,"author":28},{"id":235,"publish_date":236,"is_original":23,"collection":73,"cover_url":237,"cover_url_1_1":238,"title":239,"summary":240,"author":28},{"title":242,"list":362},[],[8,9,10],[8,12,13,14,9,10,15,16,17,18],["Reactive",245],1754646410651]